Best JavaScript code snippet using storybook-root
convert_main.py
Source:convert_main.py
1import sys, os, zipfile,re, requests,shutil,json,glob2sys.path.append(os.path.realpath(os.curdir+"/hello_app/"))3sys.path.append(os.path.realpath(os.curdir+"/hnclic/"))4sys.path.append(os.path.realpath(os.curdir+"/data_adapter/"))5from bs4 import BeautifulSoup6import lxml7import lxml.html8import threading9import numpy as np10import pandas as pd11import pandasql,excel2img12from numpy import nan as NaN13from functools import reduce14import jinja2,datetime,chardet,time,traceback,locale,copy15from pptx import Presentation16from pptx.enum.shapes import MSO_SHAPE_TYPE#MSO_SHAPE_TYPE.EMBEDDED_OLE_OBJECT17import comtypes.client18from openpyxl.formula.translate import Translator19from openpyxl import load_workbook20import asyncio21import aiohttp22import yaml23from handle_file import convert_file_for_xlsx,convert_file_for_txt,convert_file_for_pptx,convert_html24from utils import get_jinja2_Environment,is_number,guess_col_names,exec_template,repeat_rearrange25import data_adapter26import glb27'''28âå¨Windowséï¼time.strftime使ç¨Cè¿è¡æ¶çå¤åèå符串å½æ°strftimeï¼29è¿ä¸ªå½æ°å¿
é¡»å
æ ¹æ®å½ålocaleé
ç½®æ¥ç¼ç æ ¼å¼åå符串ï¼ä½¿ç¨PyUnicode_EncodeLocaleï¼ãâ30å¦æä¸è®¾ç½®å¥½localeçè¯ï¼æ ¹æ®é»è®¤ç"C" localeï¼åºå±çwcstombså½æ°ä¼ä½¿ç¨latin-1ç¼ç ï¼ååèç¼ç ï¼31æ¥ç¼ç æ ¼å¼åå符串ï¼ç¶å导è´é¢ä¸»æä¾çå¤åèç¼ç çå符串å¨ç¼ç æ¶åºéã32'''33locale.setlocale(locale.LC_CTYPE, 'chinese')34#æ¾ç¤ºææå35pd.set_option('display.max_columns', None)36#æ¾ç¤ºææè¡37pd.set_option('display.max_rows', None)38pd.set_option('display.width', None)39pd.options.display.float_format = '{:.2f}'.format40def func_time(func):41 def inner(*args,**kw):42 start_time = time.time()43 ret=func(*args,**kw)44 end_time = time.time()45 print(str(func)+'å½æ°è¿è¡æ¶é´ä¸ºï¼'+str(end_time-start_time)+'s')46 return ret47 return inner48def pd_read_csv(url):49 with open(url, 'rb') as f:50 data = f.read()51 f_charInfo=chardet.detect(data) 52 try:53 return pd.read_csv(url,encoding=f_charInfo['encoding'])54 except:55 return pd.read_csv(url,encoding='gb2312')56 #return pd.read_csv(url,encoding=f_charInfo['encoding'])57def load_from_file(url,d_p):58 ret=dict()59 if(os.path.isfile(url)):60 if url.find(".xlsx")>0:61 data=pd.read_excel(url)62 data[data.columns[0]]=data[data.columns[0]].astype(str)63 else:64 with open(url, 'rb') as f:65 data = f.read()66 f_charInfo=chardet.detect(data) 67 data=pd_read_csv(url)68 data[data.columns[0]]=data[data.columns[0]].astype(str)69 if (len(d_p)==0):70 d_p.append({"t": "html","start": "1","end": "10000","columns": "auto","view_columns": "","sort": "","name": "ä¿®æ¹è¿é",'data_is_json':True})71 for p in d_p:72 p['data_is_json']=True73 ret[p['name']]={'data':data,'p':p,'header':data.columns.to_list(),"data_from":{'ds':d_p}}74 return ret75 raise Exception(url +'ä¸åå¨')76async def load_from_url2(data_from=None,config_data=None,upload_path=None,userid=None,user_input_form_data=None):77 '''78 data_from æç½åä¸çåæ°é»è¾æ»æ±79 user_input_form_data å®é
ä¼ è¿æ¥çåæ°80 d_p_1={'t':'json','name':'b','pattern':'#reportDivaaa1thetable tr','start':'reportDivaaa1_data={"rows":','end':"/*-end-*/"}81 d_p_2={'t':'html','name':'a','pattern':'#reportDivmainthetable tr','start':4,'end':18}82 é对t:html çend,+4 表示éè¦4è¡,ä¸å¸¦+å·ç4 ï¼è¡¨ç¤ºç¬¬4è¡ï¼è´æ°è¡¨ç¤ºä»åé¢å¼å§åæ°å è¡ï¼å¦æ为空ï¼è¡¨ç¤ºå
¨é¨ï¼æå¤100ä¸è¡83 '''84 data_from['exec_stat']="1:å¼å§æ§è¡"85 adapter=data_adapter.get(data_from,userid)86 await adapter.load_data_from_url(config_data.get('form_input',{}),user_input_form_data)87 # if soup is not None and isinstance(soup,BeautifulSoup):88 # soup.decompose()89 # soup=None90 data_from['exec_stat']="9:æå"91 print("æå")92 ret=dict()93 for p in data_from['ds']:94 p['exec_stat']="1:å¼å§è£åª"95 resultModel,header,data,json_props=adapter.load_data_for_p(p)96 if resultModel=="TableModel":97 data=pd.DataFrame(data,columns=header)98 elif resultModel=="JsonModel":99 data=pd.DataFrame(data)[json_props] #æjson_propsä¸æå®ç顺åºéæ100 data.columns=header#æheaderé设åå101 if 'id' in json_props:102 data=data.drop(['id'], axis=1)103 elif resultModel=="DataFrame":104 pass105 else:106 raise RuntimeError("éé
æ¥å£åªè½è¿åTableModelæè
JsonModelã请è系管çåä¿®æ¹ç¨åº")107 # ååå»é108 data.columns=repeat_rearrange(list(data.columns))109 header=repeat_rearrange(header)110 #å é¤NULLå111 if adapter.dropNaNColumn:112 data=data.replace('',NaN).dropna(axis = 1, how = "all")113 header=list(data.columns)114 #缺çåå为ï¼s+æ°å115 if isinstance(p['view_columns'],str):116 data.columns=header if p['columns']=='' or p['columns'].startswith("auto") else [('s'+str(x) if is_number(x) else x) for x in range(len(data.loc[0]))]117 view_columns=str.strip(p['view_columns'])118 if view_columns!="" :119 view_set=set(view_columns.split(',') )120 data_set=set(data.columns)121 if len(view_set- data_set )>0:122 raise RuntimeError(f"{p['name']}以ä¸åå·²ç»è¢«å é¤ï¼{str(view_set- data_set)}ãæ°å¢çå:{str(data_set - set(p['old_columns']))} " )123 data=data[[ (data.columns[int(x)] if x.isdigit() else x) for x in view_columns.split(',')]]124 if isinstance(p['view_columns'],list):125 if len(p['view_columns'])!=0 :126 data=data[p['view_columns']]127 key_column=p.get('key_column')128 if key_column is None:129 for key in data.columns :130 if str(data[key].dtype)=='object' and len(data[key].unique())==len(data):131 key_column=key132 break133 if key_column is None:134 key_column=data.columns[0]135 if key_column not in data.columns:136 raise Exception(f"{p['name']}çå
³é®å{key_column} ä¸å¨å¯è§å表ä¸ï¼é常æ¯åå§æ¥è¡¨çåå被修æ¹äºï¼ä½ å¯ä»¥å¨ãæ¥çååãçå°æ¹ï¼å°ä¸»é®é为keyï¼æ¸
空ãæç»æ°æ®æ´çãä¸çä¿¡æ¯ï¼éæ°æ§è¡å°±å¯ä»¥äºã")137 p['key_column']=key_column138 data=data[(data[key_column]!='') & (data[key_column].isnull()==False)].reset_index(drop=True)139 data=data.replace('','None')#å
å é¤ä¸»é®ä¸ºç©ºçæ
åµï¼ç¶åå
¶ä»ä¸ºå符串空æ¿æ¢ä¸ºNone140 if p.get("backup",'').strip()!='':141 rptid=os.path.realpath(upload_path).split("\\")[-1]142 bak_file=os.path.realpath(os.path.join(upload_path+"../../../è¿å¾æ°æ®/", f"{rptid}_{p['name']}"))143 if os.path.exists(f"{bak_file}_ä¸æ¬¡.json"):144 os.remove(f"{bak_file}_ä¸æ¬¡.json")145 if os.path.exists(f"{bak_file}_ä¸æ¬¡_new.json"):146 os.rename(f"{bak_file}_ä¸æ¬¡_new.json",f"{bak_file}_ä¸æ¬¡.json")147 cur_bak_list=sorted(glob.glob(f"{bak_file}*"),key=os.path.getmtime)148 cur_bak_list=[x for x in cur_bak_list if x!=f"{bak_file}_ä¸æ¬¡_new.json"]149 if len(cur_bak_list)!=0:#åªè®°å½å¤©çå¢é150 if (datetime.datetime.now() - datetime.datetime.fromtimestamp(os.path.getmtime(cur_bak_list[-1]) )).days ==0:151 shutil.copyfile( cur_bak_list[-1] , f"{bak_file}_ä¸æ¬¡_new.json")152 for one_file in cur_bak_list: #å é¤ è¶
è¿2天çåå²æ°æ®153 if (datetime.datetime.now() - datetime.datetime.fromtimestamp(os.path.getmtime(one_file) )).days >2:154 os.remove(one_file)155 #if datetime.date.today()==one['from'][2:8]:#å¤ä»½å½åæ°æ®156 with open(f"{bak_file}_{datetime.date.today().isoformat()}.json", 'w') as f:157 f.write(data.to_json(orient='records',force_ascii=False))158 #data.to_csv(f"{bak_file}.csv",index=False)159 p['old_columns']=header160 p['exec_stat']="2:è£åªæå"161 ret[p['name']]={'data':data,'p':p,'header':header,'form_input':data_from['form_input'],"data_from":data_from} 162 adapter=None163 return ret164def appendData_and_execLastSql(one_ds,ret,upload_path):165 k=one_ds['name']166 v=ret[k]167 one_ds['exec_stat']="2:å¼å§å并åæ§è¡æç»sql"168 key_column=one_ds.get('key_column')169 if key_column is None:170 for key in v['data'].columns :171 if str(v['data'][key].dtype)=='object' and len(v['data'][key].unique())==len(v['data']):172 key_column=key173 break174 if key_column is None and v['data'].empty==False:175 key_column=v['data'].columns[0]176 one_ds['key_column']=key_column177 178 #t_append=v['p'].get('append')179 #if t_append is not None and isinstance( t_append,dict):180 # v['p']['append']=[t_append,]181 for one in one_ds.get('append',list()):182 one_ds['exec_stat']="3:å¼å§å并"+one['from']183 if one.get('from','')=='':184 continue185 elif one['from'].find(".xlsx")>0:186 data=pd.read_excel(os.path.join(upload_path, one['from']))187 data[data.columns[0]]=data[data.columns[0]].astype(str)188 right_key_column=data.columns[0]189 elif one['from'].find(".csv")>0:190 data=pd_read_csv(os.path.join(upload_path, one['from']))191 right_key_column=data.columns[0]192 elif one['from'][0:2] in ['ä¸æ¬¡','å¤ä»½']:#å¤ä»½22æ¶05å193 other=one['from'].split(":")194 backup_name=other[1] if len(other)>1 else k195 rptid=os.path.realpath(upload_path).split("\\")[-1]196 qushu_date=datetime.date.today()+datetime.timedelta(days=-1)197 if one['from'][0:2]=='ä¸æ¬¡':198 bak_file=os.path.realpath(os.path.join(upload_path+"../../../è¿å¾æ°æ®/", f"{rptid}_{backup_name}_ä¸æ¬¡"))199 else:200 bak_file=os.path.realpath(os.path.join(upload_path+"../../../è¿å¾æ°æ®/", f"{rptid}_{backup_name}_{qushu_date.isoformat()}"))201 if os.path.exists(f"{bak_file}.json"):202 with open(f"{bak_file}.json", 'r') as f:203 data = f.read()204 data=pd.read_json(data)205 elif not os.path.exists(f"{bak_file}.csv"):206 data=pd.DataFrame(columns=ret[backup_name]['data'].columns)207 else:208 data=pd_read_csv(f"{bak_file}.csv")209 if data.empty:210 data=pd.DataFrame(columns=ret[backup_name]['data'].columns)211 right_key_column=ret[backup_name]['p']['key_column']212 elif ret.get(one['from']):213 data=ret[one['from']]['data']214 right_key_column=ret[one['from']]['p']['key_column']215 else:216 continue217 if right_key_column not in data.columns:218 right_key_column=data.columns[0]219 if v['data'].empty:220 v['data']=data221 key_column=right_key_column222 continue 223 data=data[(data[right_key_column]!='') & (data[right_key_column].isnull()==False)].reset_index(drop=True)224 data[right_key_column]=data[right_key_column].astype(str)225 if len(data[right_key_column].unique())!=len(data):226 raise Exception(f"æ°æ®éã{v['p']['name']}ã çå并æ°æ®éã{one['from']}ãç[{right_key_column}]åæ°æ®ä¸å¯ä¸ï¼")227 v['data']=v['data'].merge(data,how ="left", left_on=key_column, right_on=right_key_column,suffixes=('', f"_{one['from']}")).fillna(0)228 229 230 one_ds['exec_stat']="4:å并æåï¼å¼å§æ°æ®è½¬æ¢"231 data=v['data']232 one_ds['after_append_columns']=list(data.columns)233 if True:# v['p'].get('data_is_json',False)==False:234 start_number=False235 for x in data.columns:#å°½å¯è½çå°å
³é®ååä¹åçæ°æ®è®¾ç½®ä¸ºfloatç±»å236 if x==one_ds['key_column']:237 start_number=True238 continue239 if start_number==False:240 continue241 if data[x].dtype.name=='object':242 try:243 data[x]=data[x].astype(int)244 except:245 try:246 data[x]=data[x].astype(float)247 except:248 pass249 pass250 one_ds['exec_stat']="5:å¼å§æ§è¡æç»sql"251 sql=one_ds.get('sql','').strip()252 if sql!="" :253 exec_sql=exec_template(None,sql,[])254 data=pandasql.sqldf(exec_sql,dict({key:value['data'] for key,value in ret.items()}))255 if(one_ds.get('vis_sql_conf') is not None and one_ds['vis_sql_conf'].get('expr','').strip()!=''):256 data=eval(k+one_ds['vis_sql_conf']['expr'],{k:data})257 v['data']=data.round(2)258 one_ds['last_columns']=data.columns.values.tolist() 259 one_ds['exec_stat']="9:å®æsqlæ§è¡"260@func_time261async def load_all_data(config_data,id,appendFunDict=None,upload_path=None,userid=None,user_input_form_data=None):262 print(threading.currentThread().name)263 config_data_reset_exec_stat(config_data)264 config_data['exec_stat']='1:å¼å§è½½å
¥æ°æ®'265 ret={}266 start_time = time.time()267 cur_time=time.strftime("%Hæ¶%Må")268 #åhtmlåcsvä¸çæ°æ®269 tasks=[]270 #loop = asyncio.get_event_loop()271 async def _inner_task(ret):272 for one in config_data['data_from']:273 if one['type']=='sql' or one['url'].startswith('ç»æ://'):274 continue 275 elif one['type']=='file':276 filename=os.path.join(upload_path, one['url'])277 ret={**ret,**load_from_file(filename,one['ds'])}278 continue279 #elif one['type'] in ['json','html']:280 tasks.append(load_from_url2(one,config_data,upload_path,userid,user_input_form_data=user_input_form_data))281 return await asyncio.gather(*tasks,return_exceptions=True),ret282 #status_list = loop.run_until_complete(asyncio.gather(*tasks))283 #https://yanbin.blog/how-flask-work-with-asyncio/#more-10368 å
³äºflaskä¸çå¼æ¥ï¼è¿é讲çæ¯è¾è¯¦ç»284 #status_list,ret=asyncio.run(_inner_task(ret)) 285 status_list,ret=await _inner_task(ret)286 287 for t in status_list:288 if isinstance(t,dict):289 ret={**ret,**t} 290 if isinstance(t,Exception):291 raise t292 print( f"å
¨é¨åæ°ç»æï¼ç¨æ¶ï¼ {time.time()-start_time}")293 config_data['exec_stat']='1:å¼å§è®¡ç®å并æ°æ®åæ§è¡æç»sql'294 #ç¨å·²å®ä¹çå
¨å±åæ°ï¼è¦çææååæ°çåæ°åé295 form_input={}296 for one in config_data['data_from']:297 form_input={**form_input,** {x['name']:x['value'] for x in one['form_input']} }298 form_input={**form_input,** {x['name']:x['value'] for x in config_data['form_input']} } 299 config_data['form_input']=[{'name':k,'value':v} for (k,v) in form_input.items()]300 config_data['exec_stat']='2:å¼å§è®¡ç®åç¬sqlçç»æé'301 #追å æ°æ®å°htmlæ°æ®ä¸ï¼ä¸è¬æ¯csvæå¤ä»½æå
¶ä»æ°æ®é302 if config_data.get('ds_queue') is None:303 config_data['ds_queue']=[]304 for one_data_from in config_data['data_from']:305 if one_data_from['url'].startswith('ç»æ://') or one_data_from['type']=='sql':306 continue307 for one_ds in one_data_from['ds']:308 config_data['ds_queue'].append(one_ds['name'])309 for one_data_from in config_data['data_from']:310 if one_data_from['type']=='sql':311 for one_ds in one_data_from['ds']:312 config_data['ds_queue'].append(one_ds['name'])313 for one_data_from in config_data['data_from']:314 if one_data_from['url'].startswith('ç»æ://'):315 for one_ds in one_data_from['ds']:316 config_data['ds_queue'].append(one_ds['name'])317 for ds_name in config_data['ds_queue']:318 for one_data_from in config_data['data_from']:319 if one_data_from['url'].startswith('ç»æ://'):320 continue321 for one_ds in one_data_from['ds']:322 if one_ds['name']==ds_name:323 if one_data_from['type']=='sql':324 ret[one_ds['name']]={'data':pd.DataFrame(),'p':one_ds}325 appendData_and_execLastSql(one_ds,ret,upload_path)326 print(f"appendData_and_execLastSqlï¼ç¨æ¶ï¼ {time.time()-start_time}")327 328 config_data['exec_stat']='4:æåº'329 for k,v in ret.items():330 data=v['data']331 sort_name=str.strip(v['p'].get('sort',''))332 if sort_name!="" :333 if isinstance(sort_name,int) or is_number(str(sort_name)):334 sort_name=data.columns[int(sort_name)]335 data=data.sort_values(sort_name,ascending= False).reset_index(drop=True)336 else:337 data[[ sort_name]]=data[[sort_name]].astype(float)338 data=data.sort_values(by=sort_name,ascending= False).reset_index(drop=True)339 v['data']=data340 print(f"æåºåï¼ç¨æ¶ï¼ {time.time()-start_time}")341 ds_dict={k:v['data'] for k,v in ret.items()}342 config_data['exec_stat']='5:计ç®åé'343 if(config_data.get("vars") is not None):#å
计ç®ææä¸æ¯ä¾èµexcelç»æçåéï¼è¿æ ·å¨excelä¸å°±ä¹å¯ä»¥ä½¿ç¨åéäº344 for one_var in config_data["vars"]: 345 if(ds_dict.get(one_var["name"]) is not None):346 raise SyntaxError(f'åéåå<{one_var["name"]}>已被使ç¨ï¼')347 # 表示çå°±æ¯å½ååéå¼ç¨çä¸æ¯excelç»æã348 if ds_dict.get(one_var["ds"]) is None:349 continue350 one_var['exec_stat']='1:å¼å§è®¡ç®åé'351 try:352 exec(one_var["name"]+"="+one_var["last_statement"],ds_dict)353 val= ds_dict[one_var["name"]]354 if isinstance(val , float):355 val=round(val,2)356 if float(val)-int(val)==0:357 val=int(val)358 ds_dict[one_var["name"]] =val 359 except SyntaxError as e:360 raise SyntaxError(f'åé<{one_var["name"]}>å®ä¹è¯æ³é误ï¼'+str(e))361 except Exception as e:362 raise SyntaxError(f'åé<{one_var["name"]}>å®ä¹è¯æ³é误ï¼'+str(e))363 one_var['exec_stat']='9:计ç®åéæå'364 365 print(f"åéåï¼ç¨æ¶ï¼ {time.time()-start_time}")366 ret_files=[]367 result=''368 config_data['exec_stat']='6:æ模æ¿çæç»æ'369 out_file=f"{upload_path}/../../tmp/{id}/"370 if os.path.exists(out_file):371 shutil.rmtree(out_file)372 os.makedirs(out_file)373 print(f"rmåï¼ç¨æ¶ï¼ {time.time()-start_time}")374 # 为äºè½ç´æ¥å¼ç¨æ¨¡æ¿ç»æï¼å
æ模æ¿çæç»æ375 for one_part in config_data.get('template_output_act',[]):376 if one_part["canOutput"]==False or one_part["canOutput"]=="false":377 continue378 one_file=one_part["file"]379 one_part['exec_stat']='1:å¼å§æ模æ¿çæ'380 template_file=f"{upload_path}/{one_file}" 381 if not os.path.exists(template_file):382 ret_files.append({'name':one_file,'errcode':'1','message' :'æ æ¤æ件ï¼è¯·è¯¦ç»æ£æ¥æ件å','url':''})383 continue384 out_file=f"{upload_path}/../../tmp/{id}/{one_file}"385 if(one_file[-4:]=='pptx' ): 386 loopForDS=one_part.get("loopForDS",'').strip()387 if loopForDS=='':388 convert_file_for_pptx(out_file,template_file,ds_dict)389 else:390 t_ds_dict=ds_dict.copy()391 for idx,row in ds_dict[loopForDS ].iterrows():392 t_ds_dict["_loop_"]=row393 t_ds_dict["_idx_"]=idx 394 convert_file_for_pptx(out_file,template_file,t_ds_dict)395 elif(one_file[-4:]=='xlsx' ):396 convert_file_for_xlsx(out_file,template_file,ds_dict, appendFunDict=appendFunDict)397 ret_files.append({'name':one_file,'errcode':'0','message' :'æåçæ','url':f'/mg/file/download_t/{id}/{one_file}'})398 one_part['exec_stat']='9:æ模æ¿çææå'399 print(f"模æ¿åï¼ç¨æ¶ï¼ {time.time()-start_time}")400 config_data['exec_stat']='7:ä»excel模æ¿ç»æä¸åæ°'401 for data_from in config_data['data_from']:402 if not data_from['url'].startswith('ç»æ://'):403 continue404 data_from['exec_stat']="1:ä»excel模æ¿ç»æä¸åæ°å¼å§"405 get_excel_data(data_from,id,upload_path,ds_dict,ret)406 config_data['exec_stat']='9:æå'407 return ds_dict408def get_excel_data(data_from,id,upload_path,ds_dict={},ret={}):409 ds_needInit=True if data_from['ds'] is None or len(data_from['ds'])==0 else False410 excel_result_file=f"{upload_path}/../../tmp/{id}/{data_from['url'][len('ç»æ://'):]}"411 if os.path.exists(excel_result_file)==False:412 raise RuntimeError("ã"+data_from['url'][len('ç»æ://'):]+"ãä¸åå¨ï¼å¦æåå¨è¯¥excel模æ¿ï¼è¯·å
è¿è¡æ¥çæ°æ®ï¼ç¶ååæ·»å å
¶ä½ä¸ºæ°æ®æºï¼")413 wb = load_workbook(excel_result_file,data_only=True)414 try:415 if len(wb.defined_names.definedName)==0:416 return417 if ds_needInit:# 没ædså®ä¹ï¼éè¦åå§ådsï¼è¿éè¦å¤ææ¯ä¸æ¯è½ä¸è½ä½ä¸ºdsï¼todo418 data_from['ds']=[{"t": "json","pattern": wb.defined_names.definedName[0].name,"end": '',419 "start": '',"columns": "auto","view_columns": "","sort": "","name": "ä¿®æ¹è¿é",420 "old_columns":[]}]421 for one_ds in data_from['ds']:422 has_define=False423 for my_range in wb.defined_names.definedName:424 if my_range.name != one_ds['pattern']:425 continue426 one_ds['exec_stat']='1:å¼å§åæ°'427 has_define=True428 for title, coord in my_range.destinations: # returns a generator of (worksheet title, cell range) tuples429 ws = wb[title]430 cell_ranges=ws[coord]431 col_nums=cell_ranges[-1][-1].column -cell_ranges[0][0].column +1 432 row_nums=cell_ranges[-1][-1].row -cell_ranges[0][0].row +1433 excel_results= [[None] * col_nums for i in range(row_nums)]434 for row in cell_ranges:435 for cell in row:436 if cell.value is None:437 continue438 for one_merged_cell in ws.merged_cells:439 if cell.row == one_merged_cell.min_row and cell.column ==one_merged_cell.min_col :440 for i_row in range(one_merged_cell.max_row-one_merged_cell.min_row +1):441 for i_col in range(one_merged_cell.max_col-one_merged_cell.min_col+1 ):442 excel_results[i_row + cell.row - cell_ranges[0][0].row][i_col+cell.column -cell_ranges[0][0].column]=cell.value443 continue444 excel_results[cell.row - cell_ranges[0][0].row][cell.column -cell_ranges[0][0].column]=cell.value445 header,end_line=guess_col_names(excel_results,"auto")446 data=pd.DataFrame(excel_results[end_line:],columns=header)447 ret[one_ds['name']]={'data':data,'header':header,'p':one_ds}448 449 one_ds['last_columns']=header450 one_ds['old_columns']=header451 appendData_and_execLastSql(one_ds,ret,upload_path)452 ds_dict[one_ds['name']]=ret[one_ds['name']]['data'] # æ·»å å°åé表ä¸453 break454 one_ds['exec_stat']='9:æå'455 if has_define==False:456 raise Exception(data_from['url'] +'ï¼ä¸åå¨å称ï¼'+one_ds['pattern'])457 data_from['exec_stat']="9:ä»excel模æ¿ç»æä¸åæ°æå" 458 return ret 459 finally:460 wb.close()461 wb=None462async def files_template_exec(id,config_data,userid,app_save_path,appendFunDict=None,wx_queue=None):463 '''464 çæ模æ¿æ件465 '''466 if wx_queue is None:467 wx_queue=glb.msg_queue468 upload_path=f"{app_save_path}\\{userid}\\{id}"469 config_data_reset_exec_stat(config_data)470 ds_dict=await load_all_data(config_data,id,appendFunDict,upload_path=upload_path,userid=userid)471 #ds_dict={**{k:v['data'] for k,v in ret_dataset.items()},**ds_dict}472 config_data['exec_stat']='7:计ç®excel模æ¿ç»æä¸çåé'473 if(config_data.get("vars") is not None):474 for one_var in config_data["vars"]:475 # åªè®¡ç®æ²¡æ计ç®è¿çåé476 if(ds_dict.get(one_var["name"]) is not None):477 continue478 one_var['exec_stat']='1:å¼å§è®¡ç®åé'479 try:480 exec(one_var["name"] +"="+one_var["last_statement"],ds_dict)481 val= ds_dict[one_var["name"]]482 if isinstance(val , float):483 val=round(val,2)484 if float(val)-int(val)==0:485 val=int(val)486 ds_dict[one_var["name"]]=val487 488 except SyntaxError as e:489 raise SyntaxError(f'åé<{one_var["name"]}>å®ä¹è¯æ³é误ï¼'+e.text)490 except Exception as e:491 raise SyntaxError(f'åé<{one_var["name"]}>å®ä¹è¯æ³é误ï¼'+str(e))492 one_var['exec_stat']='9:计ç®åéæå'493 config_data['exec_stat']='8:åéç»æ'494 ret_files=[]495 result=''496 for one_part in config_data.get('template_output_act',[]):497 if one_part["canOutput"]==False or one_part["canOutput"]=="false":498 continue499 one_file=one_part["file"]500 template_file=f"{upload_path}/{one_file}" 501 if not os.path.exists(template_file):502 ret_files.append({'name':one_file,'errcode':'1','message' :'æ æ¤æ件ï¼è¯·è¯¦ç»æ£æ¥æ件å','url':''})503 continue504 out_file=f"{upload_path}/../../tmp/{id}/{one_file}"505 if(one_file[-4:]=='pptx' ):506 out_file=out_file.replace("\\","/") 507 for wx_user in one_part["wx_msg"].strip().split(",") :508 for one_img in glob.glob(out_file[:-5]+"_*/*.JPG"):509 one_img=one_img.replace("\\","/")510 wx_queue.put({'type':'sendImage',"wxid":wx_user,"content":one_img})511 for wx_user in one_part["wx_file"].strip().split(","):512 wx_queue.put({'type':'sendFile',"wxid":wx_user,"content":out_file}) 513 elif(one_file[-4:]=='xlsx' ):514 out_file=out_file.replace("\\","/")515 for wx_user in one_part["wx_msg"].strip().split(","):516 for one_img in glob.glob(out_file+"*.png"):517 one_img=one_img.replace("\\","/")518 wx_queue.put({'type':'sendImage',"wxid":wx_user,"content":one_img})519 for wx_user in one_part["wx_file"].strip().split(","):520 wx_queue.put({'type':'sendFile',"wxid":wx_user,"content":out_file})521 elif(one_file[-3:]=='txt' ):522 message=convert_file_for_txt(out_file,template_file,ds_dict)523 out_file=out_file.replace("\\","/")524 for wx_user in one_part["wx_file"].strip().split(","):525 wx_queue.put({'type':'sendFile',"wxid":wx_user,"content":out_file})526 #ç¥å¥çä½ç¨ï¼emojiå¯ä»¥åéå°å¾®ä¿¡ä¸äº527 #message=json.dumps(message)[1:-1].encode().decode('unicode_escape') 528 for wx_user in one_part["wx_msg"].strip().split(","):529 wx_queue.put({'type':'sendMessage',"wxid":wx_user,"content":message})530 ret_files.append({'name':one_file,'errcode':'0','message' :'æåçæ','url':f'/mg/file/download_t/{id}/{one_file}'})531 tpl_results=[]532 def loop_one_txt(one_part,t_ds_dict,idx=0):533 expr_html=lxml.html.fromstring(one_part['txt']).text_content()534 if expr_html.startswith("http"):535 last_append=datetime.datetime.now().strftime("%#d%#H%#M%S")536 txt_tpl=f"http://hnapp.e-chinalife.com/weixin2/RedirctHandler2.aspx/637A7394-C8FE-4A8B-9D3A-7E7ADA492CE4/a{id}_{last_append}_{idx}.html"537 convert_html(f"{upload_path}/../../tmp/html/a{id}_{last_append}_{idx}.html",expr_html.getText(),t_ds_dict)538 else:539 txt_tpl=exec_template(None,expr_html,t_ds_dict) 540 tpl_results.append({'name': one_part['name'],"result":txt_tpl.replace('\n','\n<br>'),541 "img": 'https://gw.alipayobjects.com/zos/rmsportal/WdGqmHpayyMjiEhcKoVE.png'}542 )543 message=txt_tpl#json.dumps(txt_tpl)[1:-1].encode().decode('unicode_escape')544 for wx_user in one_part.get("wx_msg",'').strip().split(","):545 if wx_user.strip()!='':546 wx_queue.put({'type':'sendMessage',"wxid":wx_user,"content":message})547 548 for one_part in config_data.get('text_tpls',[]):549 t_ds_dict=ds_dict.copy()550 one_part['exec_stat']='1:å¼å§æææ¬æ¨¡æ¿çæ'551 loopForDS=one_part.get("loopForDS",'').strip()552 if loopForDS=='':553 loop_one_txt(one_part,t_ds_dict)554 else:555 for idx,row in ds_dict[loopForDS ].iterrows():556 t_ds_dict["_loop_"]=row557 t_ds_dict["_idx_"]=idx+1558 loop_one_txt(one_part,t_ds_dict,idx)559 one_part['exec_stat']='9:æææ¬æ¨¡æ¿çææå'560 out_files=f"{upload_path}/../../tmp/{id}/"561 all_files=[]562 if os.path.exists(out_files):563 #out_files=os.listdir(out_files)564 for maindir, subdir, file_name_list in os.walk(out_files):565 for filename in file_name_list:566 apath = os.path.join(maindir, filename)[len(out_files):]#å并æä¸ä¸ªå®æ´è·¯å¾567 all_files.append(apath)568 569 570 config_data['exec_stat']='9:æå'571 return ret_files,tpl_results,all_files,config_data # ,ds_dict572def config_data_reset_exec_stat(config_data):573 for data_from in config_data.get('data_from',[]):574 data_from['exec_stat']='0:æªå¼å§'575 for ds in data_from.get('ds',[]):576 ds['exec_stat']='0:æªå¼å§'577 for one in config_data.get('vars',[]):578 one['exec_stat']='0:æªå¼å§'579 for one in config_data.get('template_output_act',[]):580 one['exec_stat']='0:æªå¼å§'581 for one in config_data.get('text_tpls',[]):582 one['exec_stat']='0:æªå¼å§'583if __name__ == '__main__':584 import glb585 import objgraph 586 import gc,tracemalloc587 tracemalloc.start()588 b_snapshot = tracemalloc.take_snapshot()589 for i in range(10):590 with glb.db_connect() as conn:591 with conn.cursor(as_dict=True) as cursor:592 cursor.execute("SELECT * FROM zhanbao_tbl WHERE id=4274 order by id asc")593 row = cursor.fetchone()594 while row:595 b1_snapshot = tracemalloc.take_snapshot()596 try:597 print('worker_no:'+ row['worker_no']+"\t"+ str(row['id']) +" "+ str(tracemalloc.get_traced_memory()))598 files_template_exec(row['id'],json.loads(row['config_txt']),row['worker_no'],glb.config['UPLOAD_FOLDER'] ,wx_queue=glb.msg_queue) 599 print("====================================")600 print('worker_no:'+ row['worker_no']+"\t"+ str(row['id']) +" "+ str(tracemalloc.get_traced_memory()))601 print("====================================")602 snapshot2 = tracemalloc.take_snapshot()603 top_stats = snapshot2.compare_to(b1_snapshot, 'lineno')604 for stat in top_stats[:10]:605 print(stat)606 print("====================================")607 except Exception as e:608 print(e)609 row = cursor.fetchone()610 gc.collect() 611 objgraph.show_most_common_types(limit=5) 612 ### æå°åºå¯¹è±¡æ°ç®æå¤ç 50 个类åä¿¡æ¯ 613 gc.collect() ...
test_utils.py
Source:test_utils.py
1# -*- coding: utf-8 -*-2from datetime import datetime3import sys4from unittest import mock5import pytest6from awxkit import utils7from awxkit import exceptions as exc8@pytest.mark.parametrize('inp, out',9 [[True, True],10 [False, False],11 [1, True],12 [0, False],13 [1.0, True],14 [0.0, False],15 ['TrUe', True],16 ['FalSe', False],17 ['yEs', True],18 ['No', False],19 ['oN', True],20 ['oFf', False],21 ['asdf', True],22 ['0', False],23 ['', False],24 [{1: 1}, True],25 [{}, False],26 [(0,), True],27 [(), False],28 [[1], True],29 [[], False]])30def test_to_bool(inp, out):31 assert utils.to_bool(inp) == out32@pytest.mark.parametrize('inp, out',33 [["{}", {}],34 ["{'null': null}", {"null": None}],35 ["{'bool': true}", {"bool": True}],36 ["{'bool': false}", {"bool": False}],37 ["{'int': 0}", {"int": 0}],38 ["{'float': 1.0}", {"float": 1.0}],39 ["{'str': 'abc'}", {"str": "abc"}],40 ["{'obj': {}}", {"obj": {}}],41 ["{'list': []}", {"list": []}],42 ["---", None],43 ["---\n'null': null", {'null': None}],44 ["---\n'bool': true", {'bool': True}],45 ["---\n'bool': false", {'bool': False}],46 ["---\n'int': 0", {'int': 0}],47 ["---\n'float': 1.0", {'float': 1.0}],48 ["---\n'string': 'abc'", {'string': 'abc'}],49 ["---\n'obj': {}", {'obj': {}}],50 ["---\n'list': []", {'list': []}],51 ["", None],52 ["'null': null", {'null': None}],53 ["'bool': true", {'bool': True}],54 ["'bool': false", {'bool': False}],55 ["'int': 0", {'int': 0}],56 ["'float': 1.0", {'float': 1.0}],57 ["'string': 'abc'", {'string': 'abc'}],58 ["'obj': {}", {'obj': {}}],59 ["'list': []", {'list': []}]])60def test_load_valid_json_or_yaml(inp, out):61 assert utils.load_json_or_yaml(inp) == out62@pytest.mark.parametrize('inp', [True, False, 0, 1.0, {}, [], None])63def test_load_invalid_json_or_yaml(inp):64 with pytest.raises(TypeError):65 utils.load_json_or_yaml(inp)66@pytest.mark.parametrize('non_ascii', [True, False])67@pytest.mark.skipif(68 sys.version_info < (3, 6),69 reason='this is only intended to be used in py3, not the CLI'70)71def test_random_titles_are_unicode(non_ascii):72 assert isinstance(utils.random_title(non_ascii=non_ascii), str)73@pytest.mark.parametrize('non_ascii', [True, False])74@pytest.mark.skipif(75 sys.version_info < (3, 6),76 reason='this is only intended to be used in py3, not the CLI'77)78def test_random_titles_generates_correct_characters(non_ascii):79 title = utils.random_title(non_ascii=non_ascii)80 if non_ascii:81 with pytest.raises(UnicodeEncodeError):82 title.encode('ascii')83 title.encode('utf-8')84 else:85 title.encode('ascii')86 title.encode('utf-8')87@pytest.mark.parametrize('inp, out',88 [['ClassNameShouldChange', 'class_name_should_change'],89 ['classnameshouldntchange', 'classnameshouldntchange'],90 ['Classspacingshouldntchange', 'classspacingshouldntchange'],91 ['Class1Name2Should3Change', 'class_1_name_2_should_3_change'],92 ['Class123name234should345change456', 'class_123_name_234_should_345_change_456']])93def test_class_name_to_kw_arg(inp, out):94 assert utils.class_name_to_kw_arg(inp) == out95@pytest.mark.parametrize('first, second, expected',96 [['/api/v2/resources/', '/api/v2/resources/', True],97 ['/api/v2/resources/', '/api/v2/resources/?test=ignored', True],98 ['/api/v2/resources/?one=ignored', '/api/v2/resources/?two=ignored', True],99 ['http://one.com', 'http://one.com', True],100 ['http://one.com', 'http://www.one.com', True],101 ['http://one.com', 'http://one.com?test=ignored', True],102 ['http://one.com', 'http://www.one.com?test=ignored', True],103 ['http://one.com', 'https://one.com', False],104 ['http://one.com', 'https://one.com?test=ignored', False]])105def test_are_same_endpoint(first, second, expected):106 assert utils.are_same_endpoint(first, second) == expected107@pytest.mark.parametrize('endpoint, expected',108 [['/api/v2/resources/', 'v2'],109 ['/api/v2000/resources/', 'v2000'],110 ['/api/', 'common']])111def test_version_from_endpoint(endpoint, expected):112 assert utils.version_from_endpoint(endpoint) == expected113class OneClass:114 pass115class TwoClass:116 pass117class ThreeClass:118 pass119class FourClass(ThreeClass):120 pass121def test_filter_by_class_with_subclass_class():122 filtered = utils.filter_by_class((OneClass, OneClass), (FourClass, ThreeClass))123 assert filtered == [OneClass, FourClass]124def test_filter_by_class_with_subclass_instance():125 one = OneClass()126 four = FourClass()127 filtered = utils.filter_by_class((one, OneClass), (four, ThreeClass))128 assert filtered == [one, four]129def test_filter_by_class_no_arg_tuples():130 three = ThreeClass()131 filtered = utils.filter_by_class((True, OneClass), (False, TwoClass), (three, ThreeClass))132 assert filtered == [OneClass, None, three]133def test_filter_by_class_with_arg_tuples_containing_class():134 one = OneClass()135 three = (ThreeClass, dict(one=1, two=2))136 filtered = utils.filter_by_class((one, OneClass), (False, TwoClass), (three, ThreeClass))137 assert filtered == [one, None, three]138def test_filter_by_class_with_arg_tuples_containing_subclass():139 one = OneClass()140 three = (FourClass, dict(one=1, two=2))141 filtered = utils.filter_by_class((one, OneClass), (False, TwoClass), (three, ThreeClass))142 assert filtered == [one, None, three]143@pytest.mark.parametrize('truthy', (True, 123, 'yes'))144def test_filter_by_class_with_arg_tuples_containing_truthy(truthy):145 one = OneClass()146 three = (truthy, dict(one=1, two=2))147 filtered = utils.filter_by_class((one, OneClass), (False, TwoClass), (three, ThreeClass))148 assert filtered == [one, None, (ThreeClass, dict(one=1, two=2))]149@pytest.mark.parametrize('date_string,now,expected', [150 ('2017-12-20T00:00:01.5Z', datetime(2017, 12, 20, 0, 0, 2, 750000), 1.25),151 ('2017-12-20T00:00:01.5Z', datetime(2017, 12, 20, 0, 0, 1, 500000), 0.00),152 ('2017-12-20T00:00:01.5Z', datetime(2017, 12, 20, 0, 0, 0, 500000), -1.00),153])154def test_seconds_since_date_string(date_string, now, expected):155 with mock.patch('awxkit.utils.utcnow', return_value=now):156 assert utils.seconds_since_date_string(date_string) == expected157class RecordingCallback(object):158 def __init__(self, value=True):159 self.call_count = 0160 self.value = value161 def __call__(self):162 self.call_count += 1163 return self.value164def test_suppress():165 callback = RecordingCallback()166 with utils.suppress(ZeroDivisionError, IndexError):167 raise ZeroDivisionError168 callback()169 raise IndexError170 raise KeyError171 assert callback.call_count == 0172 with utils.suppress(ZeroDivisionError, IndexError):173 raise IndexError174 callback()175 raise ZeroDivisionError176 raise KeyError177 assert callback.call_count == 0178 with pytest.raises(KeyError):179 with utils.suppress(ZeroDivisionError, IndexError):180 raise KeyError181 callback()182 raise ZeroDivisionError183 raise IndexError184 assert callback.call_count == 0185class TestPollUntil(object):186 @pytest.mark.parametrize('timeout', [0, 0.0, -0.5, -1, -9999999])187 def test_callback_called_once_for_non_positive_timeout(self, timeout):188 with mock.patch('awxkit.utils.logged_sleep') as sleep:189 callback = RecordingCallback()190 utils.poll_until(callback, timeout=timeout)191 assert not sleep.called192 assert callback.call_count == 1193 def test_exc_raised_on_timeout(self):194 with mock.patch('awxkit.utils.logged_sleep'):195 with pytest.raises(exc.WaitUntilTimeout):196 utils.poll_until(lambda: False, timeout=0)197 @pytest.mark.parametrize('callback_value', [{'hello': 1}, 'foo', True])198 def test_non_falsey_callback_value_is_returned(self, callback_value):199 with mock.patch('awxkit.utils.logged_sleep'):200 assert utils.poll_until(lambda: callback_value) == callback_value201class TestPseudoNamespace(object):202 def test_set_item_check_item(self):203 pn = utils.PseudoNamespace()204 pn['key'] = 'value'205 assert pn['key'] == 'value'206 def test_set_item_check_attr(self):207 pn = utils.PseudoNamespace()208 pn['key'] = 'value'209 assert pn.key == 'value'210 def test_set_attr_check_item(self):211 pn = utils.PseudoNamespace()212 pn.key = 'value'213 assert pn['key'] == 'value'214 def test_set_attr_check_attr(self):215 pn = utils.PseudoNamespace()216 pn.key = 'value'217 assert pn.key == 'value'218 def test_auto_dicts_cast(self):219 pn = utils.PseudoNamespace()220 pn.one = dict()221 pn.one.two = dict(three=3)222 assert pn.one.two.three == 3223 assert pn == dict(one=dict(two=dict(three=3)))224 def test_auto_list_of_dicts_cast(self):225 pn = utils.PseudoNamespace()226 pn.one = [dict(two=2), dict(three=3)]227 assert pn.one[0].two == 2228 assert pn == dict(one=[dict(two=2), dict(three=3)])229 def test_auto_tuple_of_dicts_cast(self):230 pn = utils.PseudoNamespace()231 pn.one = (dict(two=2), dict(three=3))232 assert pn.one[0].two == 2233 assert pn == dict(one=(dict(two=2), dict(three=3)))234 def test_instantiation_via_dict(self):235 pn = utils.PseudoNamespace(dict(one=1, two=2, three=3))236 assert pn.one == 1237 assert pn == dict(one=1, two=2, three=3)238 assert len(pn.keys()) == 3239 def test_instantiation_via_kwargs(self):240 pn = utils.PseudoNamespace(one=1, two=2, three=3)241 assert pn.one == 1242 assert pn == dict(one=1, two=2, three=3)243 assert len(pn.keys()) == 3244 def test_instantiation_via_dict_and_kwargs(self):245 pn = utils.PseudoNamespace(dict(one=1, two=2, three=3), four=4, five=5)246 assert pn.one == 1247 assert pn.four == 4248 assert pn == dict(one=1, two=2, three=3, four=4, five=5)249 assert len(pn.keys()) == 5250 def test_instantiation_via_nested_dict(self):251 pn = utils.PseudoNamespace(dict(one=1, two=2), three=dict(four=4, five=dict(six=6)))252 assert pn.one == 1253 assert pn.three.four == 4254 assert pn.three.five.six == 6255 assert pn == dict(one=1, two=2, three=dict(four=4, five=dict(six=6)))256 def test_instantiation_via_nested_dict_with_list(self):257 pn = utils.PseudoNamespace(dict(one=[dict(two=2), dict(three=3)]))258 assert pn.one[0].two == 2259 assert pn.one[1].three == 3260 assert pn == dict(one=[dict(two=2), dict(three=3)])261 def test_instantiation_via_nested_dict_with_lists(self):262 pn = utils.PseudoNamespace(dict(one=[dict(two=2),263 dict(three=dict(four=4,264 five=[dict(six=6),265 dict(seven=7)]))]))266 assert pn.one[1].three.five[1].seven == 7267 def test_instantiation_via_nested_dict_with_tuple(self):268 pn = utils.PseudoNamespace(dict(one=(dict(two=2), dict(three=3))))269 assert pn.one[0].two == 2270 assert pn.one[1].three == 3271 assert pn == dict(one=(dict(two=2), dict(three=3)))272 def test_instantiation_via_nested_dict_with_tuples(self):273 pn = utils.PseudoNamespace(dict(one=(dict(two=2),274 dict(three=dict(four=4,275 five=(dict(six=6),276 dict(seven=7)))))))277 assert pn.one[1].three.five[1].seven == 7278 def test_update_with_nested_dict(self):279 pn = utils.PseudoNamespace()280 pn.update(dict(one=1, two=2, three=3), four=4, five=5)281 assert pn.one == 1282 assert pn.four == 4283 assert pn == dict(one=1, two=2, three=3, four=4, five=5)284 assert len(pn.keys()) == 5285 def test_update_with_nested_dict_with_lists(self):286 pn = utils.PseudoNamespace()287 pn.update(dict(one=[dict(two=2),288 dict(three=dict(four=4,289 five=[dict(six=6),290 dict(seven=7)]))]))291 assert pn.one[1].three.five[1].seven == 7292 def test_update_with_nested_dict_with_tuples(self):293 pn = utils.PseudoNamespace()294 pn.update(dict(one=(dict(two=2),295 dict(three=dict(four=4,296 five=(dict(six=6),297 dict(seven=7)))))))298 assert pn.one[1].three.five[1].seven == 7299class TestUpdatePayload(object):300 def test_empty_payload(self):301 fields = ('one', 'two', 'three', 'four')302 kwargs = dict(two=2, four=4)303 payload = {}304 utils.update_payload(payload, fields, kwargs)305 assert payload == kwargs306 def test_untouched_payload(self):307 fields = ('not', 'in', 'kwargs')308 kwargs = dict(one=1, two=2)309 payload = dict(three=3, four=4)310 utils.update_payload(payload, fields, kwargs)311 assert payload == dict(three=3, four=4)312 def test_overwritten_payload(self):313 fields = ('one', 'two')314 kwargs = dict(one=1, two=2)315 payload = dict(one='one', two='two')316 utils.update_payload(payload, fields, kwargs)317 assert payload == kwargs318 def test_falsy_kwargs(self):319 fields = ('one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight')320 kwargs = dict(one=False, two=(), three='', four=None, five=0, six={}, seven=set(), eight=[])321 payload = {}322 utils.update_payload(payload, fields, kwargs)323 assert payload == kwargs324 def test_not_provided_strips_payload(self):325 fields = ('one', 'two')326 kwargs = dict(one=utils.not_provided)327 payload = dict(one=1, two=2)328 utils.update_payload(payload, fields, kwargs)329 assert payload == dict(two=2)330def test_to_ical():331 now = datetime.utcnow()332 ical_datetime = utils.to_ical(now)333 date = str(now.date()).replace('-', '')334 time = str(now.time()).split('.')[0].replace(':', '')...
test_registry.py
Source:test_registry.py
1import pytest2from awxkit.api.registry import URLRegistry3class One(object):4 pass5class Two(object):6 pass7@pytest.fixture8def reg():9 return URLRegistry()10def test_url_pattern(reg):11 desired = r'^/some/resources/\d+/(\?.*)*$'12 assert reg.url_pattern(r'/some/resources/\d+/').pattern == desired13def test_methodless_get_from_empty_registry(reg):14 assert reg.get('nonexistent') is None15def test_method_get_from_empty_registry(reg):16 assert reg.get('nonexistent', 'method') is None17def test_methodless_setdefault_methodless_get(reg):...
animationLogic.js
Source:animationLogic.js
1//ÐозвÑаÑÐ°ÐµÑ ÑÑÑÐ¾ÐºÑ Ð°Ð½Ð¸Ð¼Ð°Ñий полÑÑеннÑÑ Ð¸Ð· Ñого ÑÑо дали2export let shiftRowGetAnimationOnePhase = (row) => {3 let thisAnimeRow = {4 one: 0, two: 0, three: 0, four: 05 }6 let variant= {7 ax000 : ((row.one !== 0) && (row.two === 0) && (row.four === 0) && (row.three === 0)),8 a0000 : ((row.one === 0) && (row.two === 0) && (row.four === 0) && (row.three === 0)),9 a000x : ((row.one === 0) && (row.two === 0) && (row.four !== 0) && (row.three === 0)),10 a0x00 : ((row.two !== 0) && (row.one === 0) && (row.three === 0) && (row.four === 0)),11 axx00 : ((row.one !== 0) && (row.two !== 0) && (row.four === 0) && (row.three === 0)),12 ax00x : ((row.one !== 0) && (row.four !== 0) && (row.two === 0) && (row.three === 0)),13 a00x0 : ((row.three !== 0) && (row.one === 0) && (row.two === 0) && (row.four === 0)),14 ax0x0 : ((row.one !== 0) && (row.two === 0) && (row.three !== 0) && (row.four === 0)),15 axxx0 : ((row.one !== 0) && (row.two !== 0) && (row.three !== 0) && (row.four === 0)),16 axx0x : ((row.one !== 0) && (row.two !== 0) && (row.four !== 0) && (row.three === 0)),17 ax0xx : ((row.one !== 0) && (row.three !== 0) && (row.four !== 0) && (row.two === 0)),18 a00xx : (((row.one === 0) && (row.two === 0) && (row.three !== 0) && (row.four !== 0))),19 a0x0x : ((row.one === 0) && (row.two !== 0) && (row.three === 0) && (row.four !== 0)),20 a0xx0 : ((row.one === 0) && (row.two !== 0) && (row.three !== 0) && (row.four === 0)),21 a0xxx : ((row.one === 0) && (row.two !== 0) && (row.three !== 0) && (row.four !== 0)),22 axxxx : ((row.one !== 0) && (row.two !== 0) && (row.three !== 0) && (row.four !== 0))23 }24 let correspondenceObject={25 a0000: {one: 0, two: 0, three: 0, four: 0},26 ax000: {one: 3, two: 0, three: 0, four: 0},27 a000x: {one: 0, two: 0, three: 0, four: 0},28 a0x00: {one: 0, two: 2, three: 0, four: 0},29 axx00: (row.one === row.two)?{one: 3, two: 2, three: 0, four: 0}:{one: 2, two: 2, three: 0, four: 0},30 ax00x: (row.one === row.four)?{one: 3, two: 0, three: 0, four: 0}:{one: 2, two: 0, three: 0, four: 0},31 a00x0: {one: 0, two: 0, three: 1, four: 0} ,32 ax0x0: (row.one === row.three)?{one: 3, two: 0, three: 1, four: 0}:{one: 2, two: 0, three: 1, four: 0},33 axxx0: (row.two === row.three)?{one: 2, two: 2, three: 1, four: 0}:(row.one === row.two)?{one: 2, two: 1, three: 1, four: 0}:{one: 1, two: 1, three: 1, four: 0},34 axx0x: (row.four === row.two)?{one: 2, two: 2, three: 0, four: 0}:(row.one === row.two)?{one: 2, two: 1, three: 0, four:0}:{one: 1, two: 1, three: 0, four: 0},35 ax0xx: (row.three === row.four)? {one: 2, two: 0, three: 1, four: 0}:(row.one === row.three)?{one: 2, two: 0, three: 0, four: 0}:{one: 1, two: 0, three: 0, four: 0},36 a00xx: (row.three === row.four)? {one: 0, two: 0, three: 1, four: 0}:{one: 0, two: 0, three: 0, four: 0},37 a0x0x: (row.two === row.four)?{one: 0, two: 2, three: 0, four: 0}:{one: 0, two: 1, three: 0, four: 0},38 a0xx0: (row.two === row.three)? {one: 0, two: 2, three: 1, four: 0}: {one: 0, two: 1, three: 1, four: 0},39 a0xxx: (row.three === row.four)?{one: 0, two: 1, three: 1, four: 0}:(row.two === row.three)?{one: 0, two: 1, three: 0, four: 0}:{one: 0, two: 0, three: 0, four: 0},40 axxxx: ((row.one === row.two) && (row.three === row.four))? {one: 2, two: 1, three: 1, four: 0}:(row.three === row.four)?{one: 1, two: 1, three: 1, four: 0}41 :(row.three === row.two)?{one: 1, two: 1, three: 0, four: 0}:(row.two === row.one)?{one: 1, two: 0, three: 0, four: 0}:{one: 0, two: 0, three: 0, four: 0}}4243 for (let variantKey in variant) {44 if (variant[variantKey]){45 thisAnimeRow=correspondenceObject[variantKey]46 return thisAnimeRow47 }48 }4950 return thisAnimeRow51}52// for result state usability53// вÑ
Ð¾Ð´Ð½Ð°Ñ ÑÑÑока OLD !!!54export let shiftRowGetAnimationTwoPhase = (row) => {5556 let thisAnimeRow = {57 one: 0, two: 0, three: 0, four: 058 }59 let variant= {60 ax000 : ((row.one !== 0) && (row.two === 0) && (row.four === 0) && (row.three === 0)),61 a0000 : ((row.one === 0) && (row.two === 0) && (row.four === 0) && (row.three === 0)),62 a000x : ((row.one === 0) && (row.two === 0) && (row.four !== 0) && (row.three === 0)),63 a0x00 : ((row.two !== 0) && (row.one === 0) && (row.three === 0) && (row.four === 0)),64 axx00 : ((row.one !== 0) && (row.two !== 0) && (row.four === 0) && (row.three === 0)),65 ax00x : ((row.one !== 0) && (row.four !== 0) && (row.two === 0) && (row.three === 0)),66 a00x0 : ((row.three !== 0) && (row.one === 0) && (row.two === 0) && (row.four === 0)),67 ax0x0 : ((row.one !== 0) && (row.two === 0) && (row.three !== 0) && (row.four === 0)),68 axxx0 : ((row.one !== 0) && (row.two !== 0) && (row.three !== 0) && (row.four === 0)),69 axx0x : ((row.one !== 0) && (row.two !== 0) && (row.four !== 0) && (row.three === 0)),70 ax0xx : ((row.one !== 0) && (row.three !== 0) && (row.four !== 0) && (row.two === 0)),71 a00xx : (((row.one === 0) && (row.two === 0) && (row.three !== 0) && (row.four !== 0))),72 a0x0x : ((row.one === 0) && (row.two !== 0) && (row.three === 0) && (row.four !== 0)),73 a0xx0 : ((row.one === 0) && (row.two !== 0) && (row.three !== 0) && (row.four === 0)),74 a0xxx : ((row.one === 0) && (row.two !== 0) && (row.three !== 0) && (row.four !== 0)),75 axxxx : ((row.one !== 0) && (row.two !== 0) && (row.three !== 0) && (row.four !== 0))76 }7778 let correspondenceObject={79 a0000: {one: 0, two: 0, three: 0, four: 0},80 ax000: {one: 0, two: 0, three: 0, four: 0},81 a000x: {one: 0, two: 0, three: 0, four: 0},82 a0x00: {one: 0, two: 0, three: 0, four: 0},83 axx00: (row.one === row.two)?{one: 0, two: 0, three: 0, four: 4}:{one: 0, two: 0, three: 0, four: 0},84 ax00x: (row.one === row.four)?{one: 0, two: 0, three: 0, four: 4}:{one: 0, two: 0, three: 0, four: 0},85 a00x0: {one: 0, two: 0, three: 0, four: 0} ,86 ax0x0: (row.one === row.three)?{one: 0, two: 0, three: 0, four: 4}:{one: 0, two: 0, three: 0, four: 0},87 axxx0: (row.two === row.three)?{one: 0, two: 0, three: 0, four: 4}:(row.one === row.two)?{one: 0, two: 0, three: 4, four: 0}:{one: 0, two: 0, three: 0, four: 0},88 axx0x: (row.four === row.two)?{one: 0, two: 0, three: 0, four: 4}:(row.one === row.two)?{one: 0, two: 0, three: 4, four: 0}:{one: 0, two: 0, three: 0, four: 0},89 ax0xx: (row.three === row.four)? {one: 0, two: 0, three: 0, four: 4}:(row.one === row.three)?{one: 0, two: 0, three: 4, four: 0}:{one: 0, two: 0, three: 0, four: 0},90 a00xx: (row.three === row.four)? {one: 0, two: 0, three: 0, four: 4}:{one: 0, two: 0, three: 0, four: 0},91 a0x0x: (row.two === row.four)?{one: 0, two: 0, three: 0, four: 4}:{one: 0, two: 0, three: 0, four: 0},92 a0xx0: (row.two === row.three)?{one: 0, two: 0, three: 0, four: 4}: {one: 0, two: 0, three: 0, four: 0},93 a0xxx: (row.three === row.four)?{one: 0, two: 0, three: 0, four: 4}:(row.two === row.three)?{one: 0, two: 0, three: 4, four: 0}:{one: 0, two: 0, three: 0, four: 0},94 axxxx: ((row.one === row.two) && (row.three === row.four))?{one: 0, two: 0, three: 4, four: 4}:(row.three === row.four)?{one: 0, two: 0, three: 0, four: 4}95 :(row.three === row.two)?{one: 0, two: 0, three: 4, four: 0}:(row.two === row.one)?{one: 0, two: 4, three: 0, four: 0}:{one: 0, two: 0, three: 0, four: 0}}9697 for (let variantKey in variant) {98 if (variant[variantKey]){99 thisAnimeRow=correspondenceObject[variantKey]100 return thisAnimeRow101 }102 }103104 return thisAnimeRow105}
...
angular-locale_en-xa.js
Source:angular-locale_en-xa.js
1'use strict';2angular.module("ngLocale", [], ["$provide", function($provide) {3var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};4function getDecimals(n) {5 n = n + '';6 var i = n.indexOf('.');7 return (i == -1) ? 0 : n.length - i - 1;8}9function getVF(n, opt_precision) {10 var v = opt_precision;11 if (undefined === v) {12 v = Math.min(getDecimals(n), 3);13 }14 var base = Math.pow(10, v);15 var f = ((n * base) | 0) % base;16 return {v: v, f: f};17}18$provide.value("$locale", {19 "DATETIME_FORMATS": {20 "AMPMS": [21 "[\u00c5\u1e40 one]",22 "[\u00de\u1e40 one]"23 ],24 "DAY": [25 "[\u0160\u00fb\u00f1\u00f0\u00e5\u00fd one]",26 "[\u1e40\u00f6\u00f1\u00f0\u00e5\u00fd one]",27 "[\u0162\u00fb\u00e9\u0161\u00f0\u00e5\u00fd one]",28 "[\u0174\u00e9\u00f0\u00f1\u00e9\u0161\u00f0\u00e5\u00fd one two]",29 "[\u0162\u0125\u00fb\u0155\u0161\u00f0\u00e5\u00fd one]",30 "[\u0191\u0155\u00ee\u00f0\u00e5\u00fd one]",31 "[\u0160\u00e5\u0163\u00fb\u0155\u00f0\u00e5\u00fd one]"32 ],33 "ERANAMES": [34 "[\u0181\u00e9\u0192\u00f6\u0155\u00e9\u2003\u00c7\u0125\u0155\u00ee\u0161\u0163 one two]",35 "[\u00c5\u00f1\u00f1\u00f6\u2003\u00d0\u00f6\u0271\u00ee\u00f1\u00ee one two]"36 ],37 "ERAS": [38 "[\u0181\u00c7 one]",39 "[\u00c5\u00d0 one]"40 ],41 "FIRSTDAYOFWEEK": 0,42 "MONTH": [43 "[\u0134\u00e5\u00f1\u00fb\u00e5\u0155\u00fd one]",44 "[\u0191\u00e9\u0180\u0155\u00fb\u00e5\u0155\u00fd one]",45 "[\u1e40\u00e5\u0155\u00e7\u0125 one]",46 "[\u00c5\u00fe\u0155\u00ee\u013c one]",47 "[\u1e40\u00e5\u00fd one]",48 "[\u0134\u00fb\u00f1\u00e9 one]",49 "[\u0134\u00fb\u013c\u00fd one]",50 "[\u00c5\u00fb\u011d\u00fb\u0161\u0163 one]",51 "[\u0160\u00e9\u00fe\u0163\u00e9\u0271\u0180\u00e9\u0155 one two]",52 "[\u00d6\u00e7\u0163\u00f6\u0180\u00e9\u0155 one]",53 "[\u00d1\u00f6\u1e7d\u00e9\u0271\u0180\u00e9\u0155 one]",54 "[\u00d0\u00e9\u00e7\u00e9\u0271\u0180\u00e9\u0155 one]"55 ],56 "SHORTDAY": [57 "[\u0160\u00fb\u00f1 one]",58 "[\u1e40\u00f6\u00f1 one]",59 "[\u0162\u00fb\u00e9 one]",60 "[\u0174\u00e9\u00f0 one]",61 "[\u0162\u0125\u00fb one]",62 "[\u0191\u0155\u00ee one]",63 "[\u0160\u00e5\u0163 one]"64 ],65 "SHORTMONTH": [66 "[\u0134\u00e5\u00f1 one]",67 "[\u0191\u00e9\u0180 one]",68 "[\u1e40\u00e5\u0155 one]",69 "[\u00c5\u00fe\u0155 one]",70 "[\u1e40\u00e5\u00fd one]",71 "[\u0134\u00fb\u00f1 one]",72 "[\u0134\u00fb\u013c one]",73 "[\u00c5\u00fb\u011d one]",74 "[\u0160\u00e9\u00fe one]",75 "[\u00d6\u00e7\u0163 one]",76 "[\u00d1\u00f6\u1e7d one]",77 "[\u00d0\u00e9\u00e7 one]"78 ],79 "STANDALONEMONTH": [80 "[\u0134\u00e5\u00f1\u00fb\u00e5\u0155\u00fd one]",81 "[\u0191\u00e9\u0180\u0155\u00fb\u00e5\u0155\u00fd one]",82 "[\u1e40\u00e5\u0155\u00e7\u0125 one]",83 "[\u00c5\u00fe\u0155\u00ee\u013c one]",84 "[\u1e40\u00e5\u00fd one]",85 "[\u0134\u00fb\u00f1\u00e9 one]",86 "[\u0134\u00fb\u013c\u00fd one]",87 "[\u00c5\u00fb\u011d\u00fb\u0161\u0163 one]",88 "[\u0160\u00e9\u00fe\u0163\u00e9\u0271\u0180\u00e9\u0155 one two]",89 "[\u00d6\u00e7\u0163\u00f6\u0180\u00e9\u0155 one]",90 "[\u00d1\u00f6\u1e7d\u00e9\u0271\u0180\u00e9\u0155 one]",91 "[\u00d0\u00e9\u00e7\u00e9\u0271\u0180\u00e9\u0155 one]"92 ],93 "WEEKENDRANGE": [94 5,95 696 ],97 "fullDate": "[EEEE, MMMM d, y]",98 "longDate": "[MMMM d, y]",99 "medium": "[MMM d, y] [h:mm:ss a]",100 "mediumDate": "[MMM d, y]",101 "mediumTime": "[h:mm:ss a]",102 "short": "[M/d/yy] [h:mm a]",103 "shortDate": "[M/d/yy]",104 "shortTime": "[h:mm a]"105 },106 "NUMBER_FORMATS": {107 "CURRENCY_SYM": "$",108 "DECIMAL_SEP": ".",109 "GROUP_SEP": ",",110 "PATTERNS": [111 {112 "gSize": 3,113 "lgSize": 3,114 "maxFrac": 3,115 "minFrac": 0,116 "minInt": 1,117 "negPre": "-",118 "negSuf": "",119 "posPre": "",120 "posSuf": ""121 },122 {123 "gSize": 3,124 "lgSize": 3,125 "maxFrac": 2,126 "minFrac": 2,127 "minInt": 1,128 "negPre": "-\u00a4",129 "negSuf": "",130 "posPre": "\u00a4",131 "posSuf": ""132 }133 ]134 },135 "id": "en-xa",136 "localeID": "en_XA",137 "pluralCat": function(n, opt_precision) { var i = n | 0; var vf = getVF(n, opt_precision); if (i == 1 && vf.v == 0) { return PLURAL_CATEGORY.ONE; } return PLURAL_CATEGORY.OTHER;}138});...
smi-negative-zero.js
Source:smi-negative-zero.js
1// Copyright 2008 the V8 project authors. All rights reserved.2// Redistribution and use in source and binary forms, with or without3// modification, are permitted provided that the following conditions are4// met:5//6// * Redistributions of source code must retain the above copyright7// notice, this list of conditions and the following disclaimer.8// * Redistributions in binary form must reproduce the above9// copyright notice, this list of conditions and the following10// disclaimer in the documentation and/or other materials provided11// with the distribution.12// * Neither the name of Google Inc. nor the names of its13// contributors may be used to endorse or promote products derived14// from this software without specific prior written permission.15//16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.27// Ensure that operations on small integers handle -0.28var zero = 0;29var one = 1;30var minus_one = -1;31var two = 2;32var four = 4;33var minus_two = -2;34var minus_four = -4;35// variable op variable36assertEquals(-Infinity, one / (-zero), "one / -0 I");37assertEquals(-Infinity, one / (zero * minus_one), "one / -1");38assertEquals(-Infinity, one / (minus_one * zero), "one / -0 II");39assertEquals(Infinity, one / (zero * zero), "one / 0 I");40assertEquals(1, one / (minus_one * minus_one), "one / 1");41assertEquals(-Infinity, one / (zero / minus_one), "one / -0 III");42assertEquals(Infinity, one / (zero / one), "one / 0 II");43assertEquals(-Infinity, one / (minus_four % two), "foo1");44assertEquals(-Infinity, one / (minus_four % minus_two), "foo2");45assertEquals(Infinity, one / (four % two), "foo3");46assertEquals(Infinity, one / (four % minus_two), "foo4");47// literal op variable48assertEquals(-Infinity, one / (0 * minus_one), "bar1");49assertEquals(-Infinity, one / (-1 * zero), "bar2");50assertEquals(Infinity, one / (0 * zero), "bar3");51assertEquals(1, one / (-1 * minus_one), "bar4");52assertEquals(-Infinity, one / (0 / minus_one), "baz1");53assertEquals(Infinity, one / (0 / one), "baz2");54assertEquals(-Infinity, one / (-4 % two), "baz3");55assertEquals(-Infinity, one / (-4 % minus_two), "baz4");56assertEquals(Infinity, one / (4 % two), "baz5");57assertEquals(Infinity, one / (4 % minus_two), "baz6");58// variable op literal59assertEquals(-Infinity, one / (zero * -1), "fizz1");60assertEquals(-Infinity, one / (minus_one * 0), "fizz2");61assertEquals(Infinity, one / (zero * 0), "fizz3");62assertEquals(1, one / (minus_one * -1), "fizz4");63assertEquals(-Infinity, one / (zero / -1), "buzz1");64assertEquals(Infinity, one / (zero / 1), "buzz2");65assertEquals(-Infinity, one / (minus_four % 2), "buzz3");66assertEquals(-Infinity, one / (minus_four % -2), "buzz4");67assertEquals(Infinity, one / (four % 2), "buzz5");68assertEquals(Infinity, one / (four % -2), "buzz6");69// literal op literal70assertEquals(-Infinity, one / (-0), "fisk1");71assertEquals(-Infinity, one / (0 * -1), "fisk2");72assertEquals(-Infinity, one / (-1 * 0), "fisk3");73assertEquals(Infinity, one / (0 * 0), "fisk4");74assertEquals(1, one / (-1 * -1), "fisk5");75assertEquals(-Infinity, one / (0 / -1), "hest1");76assertEquals(Infinity, one / (0 / 1), "hest2");77assertEquals(-Infinity, one / (-4 % 2), "fiskhest1");78assertEquals(-Infinity, one / (-4 % -2), "fiskhest2");79assertEquals(Infinity, one / (4 % 2), "fiskhest3");80assertEquals(Infinity, one / (4 % -2), "fiskhest4");81// This tests against a singleton -0.0 object being overwritten.gc82x = 0;83z = 3044;84function foo(x) {85 var y = -x + z;86 return -x;87}88assertEquals(-0, foo(x));...
Using AI Code Generation
1import { one } from 'storybook-root'2import { two } from 'storybook-root'3import { three } from 'storybook-root'4import { four } from 'storybook-root'5import { five } from 'storybook-root'6import { six } from 'storybook-root'7import { seven } from 'storybook-root'8import { eight } from 'storybook-root'9import { nine } from 'storybook-root'10import { ten } from 'storybook-root'11import { eleven } from 'storybook-root'12import { twelve } from 'storybook-root'13import { thirteen } from 'storybook-root'14import { fourteen } from 'storybook-root'15import { fifteen } from 'storybook-root'16import { sixteen } from 'storybook-root'17import { seventeen } from 'storybook-root'18import { eighteen } from 'storybook-root'19import { nineteen } from 'storybook-root'20import { twenty } from 'storybook-root'21import { twentyOne } from 'storybook-root'22import { twentyTwo } from 'storybook-root'23import { twentyThree } from 'storybook-root'24import { twentyFour } from 'storybook-root'25import {
Using AI Code Generation
1import { storiesOf } from '@storybook/react';2import { withInfo } from '@storybook/addon-info';3import { withKnobs, text } from '@storybook/addon-knobs';4import { action } from '@storybook/addon-actions';5import { withState } from '@dump247/storybook-state';6import { storiesOf } from '@storybook/react';7import { withInfo } from '@storybook/addon-info';8import { withKnobs, text } from '@storybook/addon-knobs';9import { action } from '@storybook/addon-actions';10import { withState } from '@dump247/storybook-state';11import { storiesOf } from '@storybook/react';12import { withInfo } from '@storybook/addon-info';13import { withKnobs, text } from '@storybook/addon-knobs';14import { action } from '@storybook/addon-actions';15import { withState } from '@dump247/storybook-state';16import { storiesOf } from '@storybook/react';17import { withInfo } from '@storybook/addon-info';18import { withKnobs, text } from '@storybook/addon-knobs';19import { action } from '@storybook/addon-actions';20import { withState } from '@dump247/storybook-state';21import { storiesOf } from '@storybook/react';22import { withInfo } from '@storybook/addon-info';23import { withKnobs, text } from '@storybook/addon-knobs';24import { action } from '@storybook/addon-actions';25import { withState } from '@dump247/storybook-state';26import { storiesOf } from '@storybook/react';27import { withInfo } from '@storybook/addon-info';28import { withKnobs, text } from '@storybook/addon-knobs';29import { action } from '@storybook/addon-actions';30import { withState } from '@dump247/storybook-state';31import { storiesOf } from '@storybook/react';32import { withInfo } from '@storybook/addon-info';33import { withKnobs, text } from '@storybook/addon-knobs';34import { action } from '@storybook/addon-actions';35import { withState }
Using AI Code Generation
1import { storiesOf } from '@storybook/react';2import { withKnobs } from '@storybook/addon-knobs';3storiesOf('Button', module)4 .addDecorator(withKnobs)5 .add('with text', () => (6 <Button onClick={action('clicked')}>Hello Button</Button>7 .add('with some emoji', () => (8 <Button onClick={action('clicked')}>9 ));10import { configure } from '@storybook/react';11configure(require.context('../src', true, /\.stories\.js$/), module);12import '@storybook/addon-actions/register';13import '@storybook/addon-knobs/register';14const path = require('path');15module.exports = {16 module: {17 {18 {19 options: {20 includePaths: [path.resolve(__dirname, '../src/styles')],21 },22 },23 include: path.resolve(__dirname, '../'),24 },25 },26};27const path = require('path');28module.exports = {29 module: {30 {31 {32 options: {33 includePaths: [path.resolve(__dirname, '../src/styles')],34 },35 },36 include: path.resolve(__dirname, '../'),37 },38 },39};40const path = require('path');41module.exports = {42 module: {43 {
Using AI Code Generation
1import * as React from 'react';2import { withKnobs, text } from '@storybook/addon-knobs';3import { storiesOf } from '@storybook/react';4import { Button } from './Button';5storiesOf('Button', module)6 .addDecorator(withKnobs)7 .add('with text', () => (8 <Button onClick={action('clicked')}>{text('Label', 'Hello Button')}</Button>9 .add('with some emoji', () => (10 <Button onClick={action('clicked')}>11 ));12import React from 'react';13import PropTypes from 'prop-types';14import './Button.css';15export const Button = ({ onClick, children }) => (16 <button onClick={onClick} className="storybook-button">17 {children}18);19Button.propTypes = {20 size: PropTypes.oneOf(['small', 'medium', 'large']),21};22Button.defaultProps = {23};24.storybook-button {25 font-family: 'Roboto', sans-serif;26 font-weight: 700;27 border: 0;28 border-radius: 3em;29 cursor: pointer;30 display: inline-block;31 line-height: 1;32}33.storybook-button--primary {34 color: white;35 background-color: #1ea7fd;36}37.storybook-button--secondary {38 color: #333;39 background-color: transparent;40 box-shadow: rgba(0, 0, 0, 0.15) 0px 0px 0px 1px inset;41}42.storybook-button--small {43 font-size: 12px;
Using AI Code Generation
1import { Button } from '@storybook/react/demo';2import { Button } from 'storybook-root';3import { Button } from '@storybook/react/demo';4import { Button } from 'storybook-root/dist/other';5import { Button } from '@storybook/react/demo';6import { Button } from 'storybook-root';7import { Button } from '@storybook/react/demo';8import { Button } from 'storybook-root/dist/other';9import { Button } from '@storybook/react/demo';10import { Button } from 'storybook-root';11import { Button } from '@storybook/react/demo';12import { Button } from 'storybook-root/dist/other';13import { Button } from '@storybook/react/demo';14import { Button } from 'storybook-root';15import { Button } from '@storybook/react/demo';16import { Button } from 'storybook-root/dist/other';17import { Button } from '@storybook/react/demo';18import { Button } from 'storybook-root';19import { Button } from '@storybook/react/demo';20import { Button } from 'storybook-root/dist/other';21import { Button } from '@storybook/react/demo';22import { Button } from 'storybook-root';23import { Button } from '@storybook/react/demo';24import { Button } from 'storybook-root/dist/other';25import { Button } from '@storybook/react/demo';26import { Button } from 'storybook-root';27import { Button } from '@storybook/react/demo';28import { Button } from 'storybook-root/dist/other';29import { Button } from '@storybook/react/demo';30import { Button } from '
Using AI Code Generation
1import * as storybookRoot from 'storybook-root';2const App = () => {3 return (4 );5};6export default App;
Using AI Code Generation
1import { storiesOf } from 'storybook-root';2storiesOf('My Component', module)3 .add('My Component', () => <MyComponent />);4import { storiesOf } from 'storybook-root';5storiesOf('My Component', module)6 .add('My Component', () => <MyComponent />);7import { storiesOf } from 'storybook-root';8storiesOf('My Component', module)9 .add('My Component', () => <MyComponent />);10import { storiesOf } from 'storybook-root';11storiesOf('My Component', module)12 .add('My Component', () => <MyComponent />);13import { storiesOf } from 'storybook-root';14storiesOf('My Component', module)15 .add('My Component', () => <MyComponent />);16import { storiesOf } from 'storybook-root';17storiesOf('My Component', module)18 .add('My Component', () => <MyComponent />);19import { storiesOf } from 'storybook-root';20storiesOf('My Component', module)21 .add('My Component', () => <MyComponent />);22import { storiesOf } from 'storybook-root';23storiesOf('My Component', module)24 .add('My Component', () => <MyComponent />);25import { storiesOf } from 'storybook-root';26storiesOf('My Component', module)27 .add('My Component', () => <MyComponent />);28import { storiesOf } from 'storybook-root';29storiesOf('My Component', module)30 .add('My Component', () => <MyComponent />);31import { storiesOf } from 'storybook-root';32storiesOf('My Component', module)33 .add('My Component', () => <MyComponent />);34import { storiesOf } from 'storybook-root';35storiesOf('My Component', module)36 .add('My Component', ()
Using AI Code Generation
1import { storiesOf } from 'storybook-root';2storiesOf('my-story', module).add('my-story', () => 'my-story');3import { storiesOf } from 'storybook-root';4storiesOf('my-story', module).add('my-story', () => 'my-story');5import { storiesOf } from 'storybook-root';6storiesOf('my-story', module).add('my-story', () => 'my-story');7import { storiesOf } from 'storybook-root';8storiesOf('my-story', module).add('my-story', () => 'my-story');9import { storiesOf } from 'storybook-root';10storiesOf('my-story', module).add('my-story', () => 'my-story');11import { storiesOf } from 'storybook-root';12storiesOf('my-story', module).add('my-story', () => 'my-story');13import { storiesOf } from 'storybook-root';14storiesOf('my-story', module).add('my-story', () => 'my-story');15import { storiesOf } from 'storybook-root';16storiesOf('my-story', module).add('my-story', () => 'my-story');17import { storiesOf } from 'storybook-root';18storiesOf('my-story', module).add('my-story', () => 'my-story');19import { storiesOf } from 'storybook-root';20storiesOf('my-story', module).add('my-story', () => 'my-story');21import { storiesOf } from 'storybook-root';22storiesOf('my-story', module).add('my-story', () => 'my-story');23import { storiesOf } from 'storybook-root';
Using AI Code Generation
1import { configure } from 'storybook-root';2configure(() => {3 require('./stories');4}, module);5import { configure } from '@storybook/react';6import { configure as configure2 } from 'storybook-root';7configure2(() => {8 require('./stories');9}, module);10module.exports = (baseConfig, env, config) => {11 config.resolve.alias = {12 'storybook-root': path.resolve(__dirname, '../storybook-root'),13 };14 return config;15};
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!