Best Python code snippet using hypothesis
prune_by_feature_map.py
Source:prune_by_feature_map.py
...55 if del_kernels is not None:56 weight = np.delete(weight, del_kernels, axis=1)57 print(weight.shape)58 return weight, bias, del_filters, origin_channels59 def _prune_rude(self, name, conv_param, del_kernels=None, del_filters=None):60 weight, bias = conv_param61 weight = weight.data62 bias = bias.data63 origin_channels = weight.shape[0]64 if name in [self.base_layer,]: # 以shortcutå±ä¸ºåºç¡æ¥åªæ65 # del_filters = get_del_filter(net, "../models/image/")66 del_filters = np.loadtxt('del_filter.txt', dtype=np.float32)67 kernel_sum = np.sum(np.abs(weight), axis=(1,2,3))68 # print (kernel_sum)69 # del_num = self.ratio70 # kernel_axis = np.argsort(kernel_sum)71 # del_filters = kernel_axis[0:del_num]72 # print(del_filters)73 if del_filters is not None:74 # è£åªééæ°,outputåå°75 weight = np.delete(weight, del_filters, axis=0)76 bias = np.delete(bias, del_filters, axis=0)77 # print('\n')78 # print(name)79 # print(name + " filter nums need to delete is " + str(len(del_filters)))80 # print(name + " filter nums need to preserve is " + str(origin_channels - len(del_filters)))81 if del_kernels is not None:82 # 计ç®åºè¦è£åªçkernel inputåå°83 weight = np.delete(weight, del_kernels, axis=1)84 print("{}å±è£åªåçè¾åºç»´åº¦æ¯{}".format(name,weight.shape))85 return weight, bias, del_filters, origin_channels86 # æ´åè£åª87 def prune_conv_rude(self, name, bottom=None , not_del_filters=False):88 if bottom is None:89 self.conv_data[name] = self._prune_rude(name, self._net.params[name])90 else:91 if not_del_filters is True: # filtersä¸éè¦è£åª(output), ä½æ¯kernelséè¦è£åª(input)92 self.conv_data[name] = self._prune_rude(name, self._net.params[name],93 del_kernels=self.conv_data[self.base_layer][2],94 del_filters=None)95 else: # filterséè¦è£åª(output),ä½æ¯kernelsä¸éè¦è£åª(input)96 self.conv_data[name] = self._prune_rude(name, self._net.params[name],97 del_kernels=None,98 del_filters=self.conv_data[self.base_layer][2],)99 def fc_prune(self,conv_param, del_kernels):100 bias = conv_param[1]101 bias = bias.data102 f2c = fc2conv(self._net)103 weight = f2c.del_inputs(del_kernels)104 return weight, bias105 def prune_conv(self, name, bottom=None):106 if bottom is None:107 self.conv_data[name] = self._prune(name, self._net.params[name])108 else:109 self.conv_data[name] = self._prune(name, self._net.params[name], del_kernels=self.conv_data[bottom][2])110 def prune_concat(self, name, bottoms=None):111 if bottoms is not None:112 offsets = [0] + [self.conv_data[b][3] for b in bottoms]113 for i in range(1, len(offsets)):114 offsets[i] += offsets[i - 1]115 del_filters = [self.conv_data[b][2] + offsets[i] for i, b in enumerate(bottoms)]116 del_filters_new = np.concatenate(del_filters)117 else:118 del_filters_new = []119 if name[0:2] == 'fc':120 self.conv_data[name] = self.fc_prune(self._net.params[name], del_filters_new)121 else:122 self.conv_data[name] = self._prune_rude(name, self._net.params[name],123 del_kernels=del_filters_new, del_filters=None)124 def prune_sum(self, name, bottoms):125 del_filters = [self.conv_data[b][2] for b in bottoms]126 del_filter = np.union1d(del_filters[0], del_filters[1])127 print(del_filter)128 weight = []129 bias = []130 origin_channels = self.conv_data[bottoms[0]][3] - len(del_filter)131 for b in bottoms:132 if b[0:3] != 'res':133 self.conv_data[b] = self._prune(b, self._net.params[b], del_filters=del_filter)134 self.conv_data[name] = weight, bias, del_filter, origin_channels135 print("\n {} preserve num : {}".format(name, origin_channels))136 def save(self, new_model, output_weights):137 net2 = caffe.Net(new_model, caffe.TEST)138 for key in net2.params.keys():139 if key in self.conv_data:140 net2.params[key][0].data[...] = self.conv_data[key][0]141 net2.params[key][1].data[...] = self.conv_data[key][1]142 else:143 net2.params[key][0].data[...] = self._net.params[key][0].data144 net2.params[key][1].data[...] = self._net.params[key][1].data145 net2.save(output_weights)146root = "../my_model/"147prototxt = root + "TestModel_prune.prototxt"148caffemodel = root + "TestModel_prune.caffemodel"149net = caffe.Net(prototxt, caffemodel, caffe.TEST)150pruner = Prune(net)151# block1,2152# pruner.prune_conv("conv1_1_1")153# pruner.prune_conv("conv1_2_1")154# pruner.prune_conv("conv1_2_2", "conv1_2_1")155# pruner.prune_conv("conv1_3_1")156# pruner.prune_conv("conv1_3_2", "conv1_3_1")157# pruner.prune_conv("conv1_3_3", "conv1_3_2")158#159# pruner.prune_concat("conv2_1", ("conv1_1_1", "conv1_2_2", "conv1_3_3"))160# pruner.prune_conv("conv2_2", "conv2_1")161# pruner.prune_conv("conv2_3", "conv2_2")162# pruner.prune_conv("conv2_4", "conv2_3")163# pruner.prune_conv("conv2_5", "conv2_4")164# pruner.prune_conv("conv2_6", "conv2_5")165# pruner.prune_conv("conv2_7", "conv2_6")166# pruner.prune_conv("conv2_8", "conv2_7")167#168# pruner.prune_concat("conv3_1_1", ("conv2_2", "conv2_4", "conv2_6", "conv2_8"))169# pruner.prune_concat("conv3_1_1b", ("conv2_2", "conv2_4", "conv2_6", "conv2_8"))170# block3 åªæè¿ç¨171pruner.init_layer('conv3_1_1')172pruner.init_layer('conv3_2_1')173pruner.init_layer('conv3_3_1')174pruner.init_layer('conv3_4_1')175pruner.init_layer('conv3_5_1')176pruner.init_layer('conv3_6_1')177pruner.prune_conv_rude('conv3_1_1b')178pruner.prune_conv_rude("conv3_1_2", "conv3_1_1", )179pruner.prune_conv_rude("conv3_2_1", "conv3_1_2", not_del_filters=True)180pruner.prune_conv_rude("conv3_2_2", "conv3_2_1", )181pruner.prune_conv_rude("conv3_3_1", "conv3_2_2", not_del_filters=True)182pruner.prune_conv_rude("conv3_3_2", "conv3_3_1", )183pruner.prune_conv_rude("conv3_4_1", "conv3_3_2", not_del_filters=True)184pruner.prune_conv_rude("conv3_4_2", "conv3_4_1", )185pruner.prune_conv_rude("conv3_5_1", "conv3_4_2", not_del_filters=True)186pruner.prune_conv_rude("conv3_5_2", "conv3_5_1", )187pruner.prune_conv_rude("conv3_6_1", "conv3_5_2", not_del_filters=True)188pruner.prune_conv_rude("conv3_6_2", "conv3_6_1", )189pruner.prune_concat("conv4_1_1", ("conv3_2_2", "conv3_4_2", "conv3_6_2", ))190pruner.prune_concat("conv4_1_1b", ("conv3_2_2", "conv3_4_2", "conv3_6_2",))191# # block4 åªæè¿ç¨192#193# pruner.init_layer('conv4_1_1')194# pruner.init_layer('conv4_2_1')195# pruner.init_layer('conv4_3_1')196# pruner.init_layer('conv4_4_1')197# pruner.init_layer('conv4_5_1')198# pruner.init_layer('conv4_6_1')199#200# pruner.prune_conv_rude('conv4_1_1b')201# pruner.prune_conv_rude("conv4_1_2", "conv4_1_1", )202# pruner.prune_conv_rude("conv4_2_1", "conv4_1_2", not_del_filters=True)203# pruner.prune_conv_rude("conv4_2_2", "conv4_2_1", )204# pruner.prune_conv_rude("conv4_3_1", "conv4_2_2", not_del_filters=True)205# pruner.prune_conv_rude("conv4_3_2", "conv4_3_1", )206# pruner.prune_conv_rude("conv4_4_1", "conv4_3_2", not_del_filters=True)207# pruner.prune_conv_rude("conv4_4_2", "conv4_4_1", )208# pruner.prune_conv_rude("conv4_5_1", "conv4_4_2", not_del_filters=True)209# pruner.prune_conv_rude("conv4_5_2", "conv4_5_1", )210# pruner.prune_conv_rude("conv4_6_1", "conv4_5_2", not_del_filters=True)211# pruner.prune_conv_rude("conv4_6_2", "conv4_6_1", )212#213# pruner.prune_concat("conv5_1_1", ("conv4_2_2", "conv4_4_2", "conv4_6_2", ))214# pruner.prune_concat("conv5_1_1b", ("conv4_2_2", "conv4_4_2", "conv4_6_2",))215#216# # block5 åªæè¿ç¨217# pruner.init_layer('conv5_1_1')218# pruner.init_layer('conv5_2_1')219# pruner.init_layer('conv5_3_1')220# pruner.init_layer('conv5_4_1')221# pruner.init_layer('conv5_5_1')222# pruner.init_layer('conv5_6_1')223#224# pruner.prune_conv_rude('conv5_1_1b')225# pruner.prune_conv_rude("conv5_1_2", "conv5_1_1", )226# pruner.prune_conv_rude("conv5_2_1", "conv5_1_2", not_del_filters=True)227# pruner.prune_conv_rude("conv5_2_2", "conv5_2_1", )228# pruner.prune_conv_rude("conv5_3_1", "conv5_2_2", not_del_filters=True)229# pruner.prune_conv_rude("conv5_3_2", "conv5_3_1", )230# pruner.prune_conv_rude("conv5_4_1", "conv5_3_2", not_del_filters=True)231# pruner.prune_conv_rude("conv5_4_2", "conv5_4_1", )232# pruner.prune_conv_rude("conv5_5_1", "conv5_4_2", not_del_filters=True)233# pruner.prune_conv_rude("conv5_5_2", "conv5_5_1", )234# pruner.prune_conv_rude("conv5_6_1", "conv5_5_2", not_del_filters=True)235# pruner.prune_conv_rude("conv5_6_2", "conv5_6_1", )236# pruner.prune_concat('fc_svd_v', ('conv5_2_2', 'conv5_4_2', 'conv5_6_2'))237pro_new = root + "TestModel_prune_1.prototxt"...
NLP3.py
Source:NLP3.py
1import requests2import re3import requests.packages.urllib3.util.ssl_4import os5import sys6from collections import Counter78requests.packages.urllib3.util.ssl_.DEFAULT_CIPHERS = 'ALL'910global dic_rude, dic_rude_ts, dic_ts, dic_ts_link,all_station,dic_rude_cycle11dic_rude={}#['1å·çº¿':['è¹æåï¼å
¬ä¸»å']]æ¯ä¸ªçº¿ä¸çææç«ç¹12dic_rude_ts={}#['1å·çº¿',[ å
¬ä¸»å,åäºåç©é¦,â¦â¦]â¦â¦]æ¯ä¸ªçº¿ä¸çæææ¢æç«13dic_ts_rude={}#[åäºåç©é¦:['1å·çº¿'ï¼â¦â¦]]æ¢ä¹ç«è¿æ¥ç线路14dic_ts={}#[åäºåç©é¦:[]]æ¯ä¸ªæ¢ä¹ç«å¯ä»¥ç´æ¥å°è¾¾çç«ç¹15dic_ts_link={}#{'åäºåç©é¦',[å
¬ä¸»å]æ¯ä¸ªæ¢æç«å¯ç´æ¥å°è¾¾çæ¢æç«16dic_rude_cycle=['1å·çº¿','10å·çº¿']17all_station=[]1819202122def get_alldata():#ç¬è«è·åææ线路åç«ç¹å½¢æä¸ä¸ªåå
¸23 url = r"https://www.bjsubway.com/e/action/ListInfo/?classid=39&ph=1"24 print('begin get data')25 header = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.1) Gecko/20100101 Firefox/40.1', }26 text = requests.get(url, headers=header, timeout=6, verify=False).text27 #print(text)28 text = re.findall('> \w+</th>|>\w+</th>|\w+线.*é¦', text)29 #print(text)30 firstflage = True31 dic = {}32 for i in text:33 #print(i)34 if re.findall('æ¶é´|å¾|é¦è½¦|æ«|å
¨ç¨|ç»ç¹', i):35 continue36 if re.findall('\w+线.*é¦', i):37 if firstflage:38 x = re.findall('\w+线.*é¦', i)[0][:-1]39 y = []40 firstflage = not firstflage41 else:42 dic[x] = y43 x = re.findall('\w+线.*é¦', i)[0][:-1]44 y = []45 continue46 if re.findall('> \w+</th>|>\w+</th>', i):47 temp=re.findall('> \w+<|>\w+<', i)[0][1:-1]48 y.append(temp.strip())4950 for i in dic:51 mailto = dic[i]52 addr_to = list(set(mailto))53 addr_to.sort(key=mailto.index)54 dic[i] = addr_to5556 #print(len(dic[i]), i, dic[i])57 return dic5859def get_subway_data():60 if "beijingsubway.txt" not in os.listdir():61 dic=get_alldata()62 fw = open("beijingsubway.txt", 'w+')63 fw.write(str(dic)) # æåå
¸è½¬å为str64 fw.close()65 else:66 fr = open("beijingsubway.txt", 'r+')67 dic = eval(fr.read()) # 读åçstr转æ¢ä¸ºåå
¸68 #print(dic)69 fr.close()70 return dic71def get_global_data(dic_rude):72 global dic_rude_ts, dic_ts, dic_ts_link, all_station, dic_rude_cycle73 all_station1= []74 for i in dic_rude:75 all_station1+=dic_rude[i]76 all_station=list(set(all_station1))77 all_station1=Counter(all_station1).most_common()#ç»è®¡ç«ç¹ï¼éå¤ä¸¤æ¬¡ä¸ºæ¢æç«78 all_ts=[ i for i,j in all_station1 if j>1]79 #print(all_ts)80 #计ç®dic_rude_ts81 for i in dic_rude:82 temp=[]83 for j in all_ts:84 if j in dic_rude[i]:temp.append(j)85 dic_rude_ts[i]=temp86 #计ç®dic_ts_rude dic_ts,dic_ts_link87 for i in all_ts:88 temptsrude=[]89 ts=[]90 link=[]91 for j in dic_rude:92 if i in dic_rude[j]:93 temptsrude.append(j)94 ts+=dic_rude[j]95 link+=dic_rude_ts[j]96 dic_ts_rude[i]=temptsrude97 temp= list(set(ts))98 temp.remove(i)99 dic_ts[i] =temp#.remove(i)100 temp=list(set(link))101 temp.remove(i)102 dic_ts_link[i]=temp103104def count_station(start,des):#计ç®æå ç«è·¯105 #ä¸è½ç´è¾¾è¿å-1106 #å¯ä»¥ç´è¾¾è¿åæå°ç«æ°ï¼åè¦åç线路107 result=[start,-1,des,'']#[è¹æå 8ç« åäºåç©é¦ 1å·çº¿]108 for i in dic_rude:109 if start in dic_rude[i] and des in dic_rude[i]:110 tempcount=abs(dic_rude[i].index(start)-dic_rude[i].index(des))111 if i in dic_rude_cycle:#æ¯ç¯çº¿112 tempcount=len(dic_rude[i])-tempcount if len(dic_rude[i])-tempcount<tempcount else tempcount113 if result[1]<0 or tempcount<result[1]:114 result[1]=tempcount115 result[-1]=i116 return result117def count_all_rude_station(rude):#æ´æ¡è·¯çº¿é¿åº¦118119 if len(rude)==1:return 0120 if len(rude) < 1: return -1121 return count_station(rude[0],rude[1])[1]+count_all_rude_station(rude[1:])122def say_all_rude_station(rude):#说ææ´æ¡è·¯çº¿æä¹èµ°123124 if len(rude)==1:return ''125 if len(rude) < 1: return '-1'126 res=count_station(rude[0],rude[1])127 return "ä» {} åºåå {} ç»è¿ {} ç«å° {} ä¸è½¦\n".format(res[0],res[-1],res[1],res[2])+say_all_rude_station(rude[1:])128129def searchpath(start,des,stragegy):130 if start not in all_station:return 'åå§ç«ç¹ä¸åå¨'131 if des not in all_station:return 'ç»ç¹ä¸åå¨'132 #æ¯å¦å¨ä¸æ¡çº¿ä¸133 result=count_station(start,des)134 if result[1]>0:return result135 #åå§ç«136 path=[]137 pathfinish=[]138 besearch={}#åå±æ£ç´¢ï¼åä¸å±å¯ä»¥åæ¶å°è¾¾ä¸ä¸ªç«ç¹ï¼ä½æ¯ä¸ç¨æ£ç´¢åä¸å±çç«ç¹139 if start not in dic_ts:140 for i in dic_rude:141 if start in dic_rude[i]:142 temp=[[start,j] for j in dic_rude_ts[i]]143 path.append(temp)144 else:145 path=[[[start]]]146 #print(besearch)147 while path[0]:148 Temp=[]149 temppath=path.pop()150 while temppath:151 temppathone=temppath.pop()152 laststation=temppathone[-1]153 if laststation in besearch and count_all_rude_station(temppathone)>besearch[laststation]:154 continue155 if des in dic_ts[laststation]:156 pathfinish.append(temppathone+[des])157 continue158 else:159 besearch[laststation]=count_all_rude_station(temppathone)160 for i in dic_ts_link[laststation]:161 Temp.append(temppathone+[i])162 path.append(Temp)163 #print(pathfinish)164 if stragegy=='shortts':#æå°æ¢æ165 pathfinish=[i for i in pathfinish if len(i)==len(pathfinish[0])]166 sorted(pathfinish,key=count_all_rude_station)167 else:168 sorted(pathfinish, key=count_all_rude_station)169170 print(say_all_rude_station(pathfinish[0]))171172 return 0#没ææ¾å°è·¯å¾173174if __name__=="__main__":175176 flagep=False177 dic_rude=get_subway_data()178 get_global_data(dic_rude)179180 if flagep: print(dic_rude)181 if flagep: print(dic_rude_ts)182 if flagep: print(dic_ts)183 if flagep: print(dic_ts_rude)184 if flagep: print(dic_ts_link)185 if flagep: print(all_station)186
...
D.py
Source:D.py
1#!/usr/bin/env pypy32import math3n,d,m = input().split()4n = int(n)5d = int(d)6m = int(m)7A = list(map(int, input().split()))8rude = []9polite = []10for a in A:11 if a > m:12 rude += [a]13 else:14 polite += [a]15rude = sorted(rude)[::-1]16polite = sorted(polite)[::-1]17rude_prefix = [0]18for r in rude:19 rude_prefix += [rude_prefix[-1] + r]20for _ in range(n):21 rude_prefix += [rude_prefix[-1]]22polite_prefix = [0]23for p in polite:24 polite_prefix += [polite_prefix[-1] + p]25for _ in range(n):26 polite_prefix += [polite_prefix[-1]]27ans = float("-inf")28for np in range(len(polite)+1):29 polite_score = polite_prefix[np]30 rude_cells = n - np31 num_rude = math.ceil(rude_cells / (d+1))32 rude_score = rude_prefix[num_rude]33 ans = max(ans, polite_score + rude_score)...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!