Best JavaScript code snippet using root
DisposeBeanText.py
Source:DisposeBeanText.py
1import json2from paohuan.DaHuaInterfaceUtil import huishou_path3from paohuan.DisposeBean import TalkBean4"""5å¨è¿éæ·»å ä»»å¡ä¿¡æ¯6"""7# list = []8# list.append(DisposeBean(1, 'çç§æåå¦å¤æï¼ä»£æé®ä¸ä»èç è¡¥ï¼', 'wangxiucai.jpg', 'çç§æ', 530, 172, 'é¿å®å(199,72)').__dict__)9# list.append(DisposeBean(1, 'å¬è¯´ç¯çæªï¼æ£æç¯çç
ï¼ä½ ç»ä»éä¸ä¸ªç¾ç¾è§è¿å»æ¢æä¸ä¸ã', 'fengniuguai.jpg', 'ç¯çæª', 533, 332, 'ç®é©¼å²(34,58)').__dict__)10# list.append(DisposeBean(1, 'ä½å°å§ï¼è¯´èªå·±å¾å欢è§ä¸¾äººï¼ä¸ç´å¨çå¾
ä»ç表ç½ã', 'hexiaojie.jpg', 'ä½å°å§', 235, 225, 'æ´é³å(63,130)').__dict__)11# list.append(DisposeBean(1, 'è¡å·§å¿çææåºç¥å
¥ååï¼ä½ è¦ä¸è¦å»ççã', 'huqiaoer.jpg', 'è¡å·§å¿', 296, 175, 'æ´é³å(154,95)').__dict__)12# list.append(DisposeBean(1, 'å¬è¯´é»ç«çåå 天被人æäºï¼ä½ å»ççä»ä¼¤å¿å¦ä½ã', 'huanghuoniu.jpg', 'é»ç«ç', 285, 262, 'æ´é³å(256,13)').__dict__)13# list.append(DisposeBean(1, '天ç¯è人æ¯å½æ³å»å¥³å¿å°±ä¼åå¼ä¸æ¢ï¼ä½ å»é®åä¸ä¸å§ã', 'tiandenglaoren.jpg', '天ç¯è人', 279, 290, 'é¿å®å(80,138)').__dict__)14# list.append(DisposeBean(1, 'é¿å®çè¡å½¹å 为æè¿ç«å²å¤ªéé·ï¼å¼å¯¼ä¸ä¸ä»ã', 'yayi.jpg', 'è¡å½¹', 305, 265, 'é¿å®å(126,13)').__dict__)15# list.append(DisposeBean(1, 'æ¸ææé¿è¯´ç»è®¡æ°æ®ï¼ä½ å»æ¾ä»æ¥ä¸ªåå§ã', 'yucuncunzhang.jpg', 'æ¸ææé¿', 312, 283, 'ä¸æµ·æ¸æ(30,17)').__dict__)16# list.append(DisposeBean(1, 'æèä¹è¯´å¸æå¨æçä¹å¹´è§å°å¤§ä¾ ãä½ ä¸å¦¨å»è§è§ä»å§ã', 'lilaojiu.jpg', 'æèä¹', 300, 266, 'é¿å®å(16,135)').__dict__)17# list.append(DisposeBean(1, 'æ¸æçä¹ä¸å¿éæ³å¿µäº²äººï¼ä½ å»é®åä¸ç»ä»ç¹äººæ
温æã', 'qigai.jpg', 'ä¹ä¸', 577, 180, 'ä¸æµ·æ¸æ(43,52)').__dict__)18# list.append(DisposeBean(1, 'ä¼ é»è¢å¤©ç½¡ç¥éåçæ¥ä¸ï¼åæ¯å¯ä»¥å»æ¾å°ä»ã', 'yuantiangang.jpg', 'è¢å¤©ç½¡', 284, 270, 'é¿å®å(138,44)').__dict__)19# list.append(DisposeBean(1, 'å°é»è¯´ï¼æ³æ¾ä¸ªäººèè天ï¼ä½ å»éªéªå¥¹å§ã', 'xiaohei.jpg', 'å°é»', 309, 419, 'é¿å¯¿æ(14,145)').__dict__)20# list.append(DisposeBean(1, 'å¬è¯´ççä»è¯´ï¼ä½ å»æ¥ææ
åµã', 'yuhuxian.jpg', 'ççä»', 378, 366, 'æ®éå±±(15,55)').__dict__)21# list.append(DisposeBean(1, 'çå¥åäºå¤©å±å说æäºæ¾ä½ ï¼ä½ 赶快è¿å»ççã', 'xuanzang.jpg', 'çå¥', 442, 158, 'ç空å§æ¿(14,14)').__dict__)22# list.append(DisposeBean(1, 'é®é®æ´é³çè¡å¤§åæå¬ä¸ç¾æççä¸è½ã', 'hudali.jpg', 'è¡å¤§å', 302, 316, 'æ´é³å(232,21)').__dict__)23# list.append(DisposeBean(1, 'æ¡ååå°ï¼è¯´æè¿éé·æ³æ¾ä½ èèã', 'pantaoyuantudi.jpg', 'æ¡ååå°', 278, 224, 'è æ¡å(15,49)').__dict__)24# list.append(DisposeBean(1, 'éèæï¼è¯´ä»æ³ä¼ æä½ ï¼çæç»ä½ 赶快å»å§ã', 'chenlaocai.jpg', 'éèæ', 341, 281, 'é¿å¯¿æ(46,70)').__dict__)25# list.append(DisposeBean(1, 'ç头ï¼è¯´ä»å¤è¦å¾èè´¾çééï¼ä½ éå»é»æ¢ä¸ä¸å§ã', 'niutou.jpg', 'ç头', 311, 275, 'å°ç±è¿·å®«2(25,15)').__dict__)26# list.append(DisposeBean(1, 'èç´ç²¾å¹´äºå·²é«èº«ä½æ±æï¼ä½ 带ä¸æå¤è±å»ççä»å§ã', 'laohoujing.jpg', 'èç´ç²¾', 325, 279, 'äºæå±±(21,43)').__dict__)27# list.append(DisposeBean(1, '马é¢ï¼è¯´æ³è®©èªå·±åå¸
æ°ç¹ï¼ä½ å»æ¾ä»è®¨è®ºä¸ã', 'mamian.jpg', '马é¢', 88, 262, 'å°ç±è¿·å®«4(6,19)').__dict__)28# list.append(DisposeBean(1, 'é夫人说èè´¾å欢ä»ï¼å¯æä¸ç´çä¸å°èè´¾ç表ç½ï¼ç¶åçæ°ä¸è§èè´¾ã', 'chenfuren.jpg', 'é夫人', 325, 279, 'é¿å®å(44,124)').__dict__)29# list.append(DisposeBean(1, 'è§ä¸¾äººï¼è¯´èªå·±å¾å欢ä½å°å§ï¼ä½æ¯è¦äºè¡¨ç½ï¼ä½ å»ççå§ã', 'xiaojuren.jpg', 'è§ä¸¾äºº', 560, 240, 'æ´é³å(63,130)').__dict__)30# list.append(DisposeBean(1, '顶天æ±ï¼è¯´è¦åä½ è¯·æå¦ä½è®ç»éç æï¼ä½ ä¸å¦¨å»çç', 'dingtianzhu.jpg', '顶天æ±', 269, 265, 'æ´é³å(198,69)').__dict__)31# list.append(DisposeBean(1, '顶天æ±åå 天表æ¼å¼ä¼¤èº«ä½äºï¼å¸®æéç¹éåè¯ç»ä»ã', 'dingtianzhu1.jpg', '顶天æ±', 269, 265, 'æ´é³å(198,69)').__dict__)32# list.append(DisposeBean(1, 'ä½èæï¼å¨æ¶éç¾ç¾è§ï¼ä½ ä¹°ä¸ä¸ªéç»ä»å§ã', 'helaocai.jpg', 'ä½èæ', 296, 256, 'æ´é³å(248,118)').__dict__)33# list.append(DisposeBean(1, '游æ¹æ¯å£«ï¼è¯´æ³æ¶é¨å¾è¦äºæ¾ä¸å°å¾å¼ï¼ä½ å»å¸®å©ä»ä¸ä¸å§ã', 'youfangshushi.jpg', '游æ¹æ¯å£«', 282, 247, 'æ¹å¯¸å±±(21,23)').__dict__)34# list.append(DisposeBean(1, 'äºæå±±åï¼å°è¯´æ¾ä½ æç¹äºæ
ï¼ä½ å»ççå§ã', 'wuzhishantudi.jpg', 'äºæå±±åå°', 467, 270, 'äºæå±±(163,43)').__dict__)35# list.append(DisposeBean(1, 'é²å¤§å©¶ï¼æè¿å¾è¦æ¼ä½ å»é®é®ä¸ºä½å¦æ¤è¦æ¼ã', 'ludashen.jpg', 'é²å¤§å©¶', 331, 213, 'æ´é³å(157,138)').__dict__)36# list.append(DisposeBean(1, 'åå 天æ´é³æ¡¥åçæ¤åï¼æ¸¸å®¢åå°äºæåï¼ä½ å»ççé£å§ã', 'youke.jpg', '游客', 332, 235, 'æ´é³å(175,102)').__dict__)37# list.append(DisposeBean(1, '满å æ¥ï¼å¨æ¾äº²ç¶ä½ æä»ä¹æ¶æ¯èµ¶å¿«åè¯ä»å§ã', 'mantangchun.jpg', '满å æ¥', 307, 261, 'æ´é³å(214,113)').__dict__)38#39# list.append(DisposeBean(2, 'æ¢
è±ä»ï¼è¯´èªå·±æ³è¦ä¸ªç°ªåï¼ä½ ä¹°ä¸ä¸ªéç»ä»å§ã', 'meihuaxian.jpg', 'æ¢
è±ä»', 300, 252, 'é¿å®ä¸(64,26)').__dict__)40# list.append(DisposeBean(2, '秦ç¼å¨æ¾ä¸ä»¶æ¿æçæ¦å¨ï¼ä½ å»ä¹°ä¸æé¿æªéç»ä»ã', 'qinqiong.jpg', '秦ç¼', 559, 174, 'é¿å®å(255,52)').__dict__)41# list.append(DisposeBean(2, 'èè´¾æ³å¦ä¹ æ¦èºï¼ä¸ç´å¤´çæ¾ä¸å°éæ³å¥ï¼ä½ å»ä¹°ä¸ªéç»ä»å§ã', 'laojia.jpg', 'èè´¾', 292, 255, 'é¿å®ä¸(34,36)').__dict__)42# list.append(DisposeBean(2, 'åºå¤«äººåå 天çä¸äºä¸ä»¶å¸è£ï¼ä½ 买个ä»å½å礼ç©å§ã', 'pangfuren.jpg', 'åºå¤«äºº', 329, 270, 'é¿å®å(24,91)').__dict__)43# list.append(DisposeBean(2, 'æ
è±ä»åçéå丢äºï¼æ£ä¸ºæ¤äºåæå¢ï¼ä½ å»ä¹°åå¸ééç»ä»ã', 'qinghuaxianzi.jpg', 'æ
è±ä»å', 210, 383, 'ä¸æµ·æ¸æ(43,52)').__dict__)44#45# list.append(DisposeBean(3, 'è æ¡ç¥çµå·åäºè æ¡å»æè®ä»ä¸çªã', 'shenling.jpg', 'ç¥çµ', 558, 110, 'è æ¡å(81,85)').__dict__)46# list.append(DisposeBean(3, 'è æ¡ç¥çµå·åäºè æ¡å»æè®ä»ä¸çªã', 'shenling1.jpg', 'ç¥çµ', 558, 110, 'è æ¡å(81,85)').__dict__)47# list.append(DisposeBean(3, 'è æ¡å¥³å¨²å·åäºè æ¡å»æè®ä»ä¸çªã', 'nvwa.jpg', '女娲', 172, 480, 'è æ¡å(81,85)').__dict__)48# list.append(DisposeBean(3, 'è æ¡å¥³å¨²å·åäºè æ¡å»æè®ä»ä¸çªã', 'nvwa1.jpg', '女娲', 172, 480, 'è æ¡å(81,85)').__dict__)49# list.append(DisposeBean(3, 'è æ¡å¤å°å·åäºè æ¡å»æè®ä»ä¸çªã', 'fenghuang.jpg', 'å¤å°', 228, 266, 'è æ¡å(39,68)').__dict__)50# list.append(DisposeBean(3, 'è æ¡å¤å°å·åäºè æ¡å»æè®ä»ä¸çªã', 'fenghuang1.jpg', 'å¤å°', 228, 266, 'è æ¡å(39,68)').__dict__)51# str = json.dumps(list).encode('utf-8').decode('unicode_escape')52# with open(file_path + 'TaskProfile.txt', 'w', encoding='utf-8') as f:53# f.write(str)54# list = []55# list.append(FlightBean(1, 1, 1, 'é¿å®å(199,72)', 15).__dict__)56# list.append(FlightBean(1, 1, 2, 'é¿å®å(138,44)', 10).__dict__)57# list.append(FlightBean(1, 1, 3, 'é¿å®å(126,13)', 83).__dict__)58# list.append(FlightBean(1, 1, 4, 'é¿å®å(16,135)', 95).__dict__)59# list.append(FlightBean(1, 1, 5, 'é¿å®å(44,124)', 90).__dict__)60# list.append(FlightBean(1, 1, 6, 'é¿å®å(80,138)', 14).__dict__)61#62# list.append(FlightBean(1, 2, 1, 'é¿å®å(24,91)', 18).__dict__)63# list.append(FlightBean(1, 2, 2, 'é¿å®å(255,52)', 87).__dict__)64# list.append(FlightBean(1, 2, 3, 'æ´é³å(256,13)', 10).__dict__)65# list.append(FlightBean(1, 2, 4, 'æ´é³å(232,21)', 18).__dict__)66# list.append(FlightBean(1, 2, 5, 'æ´é³å(154,95)', 95).__dict__)67# list.append(FlightBean(1, 2, 6, 'æ´é³å(175,102)', 91).__dict__)68#69# list.append(FlightBean(1, 3, 1, 'æ´é³å(214,113)', 9).__dict__)70# list.append(FlightBean(1, 3, 2, 'æ´é³å(157,138)', 90).__dict__)71# list.append(FlightBean(1, 3, 3, 'æ´é³å(63,130)', 30).__dict__)72# list.append(FlightBean(1, 3, 4, 'æ´é³å(248,118)', 17).__dict__)73# list.append(FlightBean(1, 3, 5, 'æ´é³å(198,69)', 93).__dict__)74# list.append(FlightBean(1, 3, 6, 'è æ¡å(15,49)', 95).__dict__)75#76# list.append(FlightBean(1, 4, 1, 'è æ¡å(81,85)', 99).__dict__)77# list.append(FlightBean(1, 4, 2, 'è æ¡å(39,68)', 38).__dict__)78# list.append(FlightBean(1, 4, 3, 'ä¸æµ·æ¸æ(30,17)', 98).__dict__)79# list.append(FlightBean(1, 4, 4, 'ä¸æµ·æ¸æ(43,52)', 98).__dict__)80# list.append(FlightBean(1, 4, 5, 'é¿å®ä¸(34,36)', 9).__dict__)81# list.append(FlightBean(1, 4, 6, 'é¿å®ä¸(64,26)', 3).__dict__)82#83# list.append(FlightBean(2, 1, 1, 'é¿å¯¿æ(46,70)', 9).__dict__)84# list.append(FlightBean(2, 1, 2, 'é¿å¯¿æ(14,145)', 3).__dict__)85# list.append(FlightBean(2, 1, 3, 'å°ç±è¿·å®«2(25,15)', 18).__dict__)86# list.append(FlightBean(2, 1, 4, 'å°ç±è¿·å®«4(6,19)', 10).__dict__)87# list.append(FlightBean(2, 1, 5, 'äºæå±±(163,43)', 8).__dict__)88# list.append(FlightBean(2, 1, 6, 'äºæå±±(21,43)', 12).__dict__)89#90# list.append(FlightBean(2, 2, 1, 'æ®éå±±(15,55)', 1).__dict__)91# list.append(FlightBean(2, 2, 2, 'æ¹å¯¸å±±(21,23)', 95).__dict__)92# list.append(FlightBean(2, 2, 3, 'ç空å§æ¿(14,14)', 8).__dict__)93# list.append(FlightBean(2, 2, 4, 'ç®é©¼å²(34,58)', 10).__dict__)94#95# list.append(FlightBean(2, 3, 2, 'å¤ç¨æ£', 99).__dict__)96# list.append(FlightBean(2, 3, 3, 'å¤ç¨æ£', 99).__dict__)97# list.append(FlightBean(2, 3, 4, 'å¤ç¨æ£', 99).__dict__)98# list.append(FlightBean(2, 3, 5, 'å¤ç¨æ£', 99).__dict__)99# list.append(FlightBean(2, 3, 6, 'å¤ç¨æ£', 99).__dict__)100#101# list.append(FlightBean(2, 4, 1, 'å¤ç¨æ£', 99).__dict__)102# list.append(FlightBean(2, 4, 2, 'å¤ç¨æ£', 99).__dict__)103# list.append(FlightBean(2, 4, 3, 'å¤ç¨æ£', 99).__dict__)104# list.append(FlightBean(2, 4, 4, 'å¤ç¨æ£', 99).__dict__)105# list.append(FlightBean(2, 4, 5, 'å¤ç¨æ£', 99).__dict__)106# list.append(FlightBean(2, 4, 6, 'å¤ç¨æ£', 99).__dict__)107#108# str = json.dumps(list).encode('utf-8').decode('unicode_escape')109# with open(file_path + 'FlightChessProfile.txt', 'w', encoding='utf-8') as f:110# f.write(str)111# list = []112# list.append(TalkBean(2, 'è¿é¿å®åä¸æä¸å¤§éå¡ï¼å¬è¯´éé¢éåäºåä¸å¦éï¼è¦æ¯æ²¡æ两æå·åå¯å«è¿å»éæ»ã', 'talk_2_1.jpg', ).__dict__)113# list.append(TalkBean(2, 'å¨ææç¥éçå
³äºåæ空çæ
äºä¸ï¼ä»å¨ä¸ä¸å¤å¹´éæç»ä¸å¤©å°ç¥ä½ä¸ºæï¼è¿å°åºæ¯ä¸ºä»ä¹å¢ï¼ç©¶ç«ä¸ºäºä»ä¹ï¼ä»è¦ç ´åä¸çç平衡ï¼', 'talk_2_2.jpg', ).__dict__)114# list.append(TalkBean(2, 'æ们ç®é©¼å²æä¸ä½å¤§çï¼å¤§å¤§çè½ååä¸å¤©å
µï¼äºå¤§çä¸èº«é身éèï¼ä¸å¤§çæé£è¿é¾ï¼è°äººè½æï¼', 'talk_2_3.jpg', ).__dict__)115# list.append(TalkBean(0, '', 'talk_0_1.jpg', ).__dict__)116# str = json.dumps(list).encode('utf-8').decode('unicode_escape')117# with open(file_path + 'TalkProfile.txt', 'w', encoding='utf-8') as f:118# f.write(str)119# list = []120# list.append(FlightBean(1, 1, 1, 'é¿å®å(199,72)', 15).__dict__)121# list.append(FlightBean(1, 1, 2, 'å¤ç¨æ£', 99).__dict__)122# list.append(FlightBean(1, 1, 3, 'å¤ç¨æ£', 99).__dict__)123# list.append(FlightBean(1, 1, 4, 'å¤ç¨æ£', 99).__dict__)124# list.append(FlightBean(1, 1, 5, 'å¤ç¨æ£', 99).__dict__)125#126# myClassReBuild = json.loads(read_ispose(file_path + 'FlightChessProfile.txt'))127# flight_chess_datas = []128# for letter in myClassReBuild:129# flight_chess_datas.append(130# FlightBean(letter['goods_position'], letter['goods_position_x'], letter['goods_position_y'],131# letter['flight_chess_info'], letter['times_left']))132# list = []133# for temp in flight_chess_datas:134# list.append(temp.__dict__)135# list.append(FlightBean(2, 3, 5, 'å¤ç¨æ£', 99).__dict__)136# list.append(FlightBean(2, 3, 6, 'å¤ç¨æ£', 99).__dict__)137#138# list.append(FlightBean(2, 4, 1, 'å¤ç¨æ£', 99).__dict__)139# list.append(FlightBean(2, 4, 2, 'å¤ç¨æ£', 99).__dict__)140# list.append(FlightBean(2, 4, 3, 'å¤ç¨æ£', 99).__dict__)141# list.append(FlightBean(2, 4, 4, 'å¤ç¨æ£', 99).__dict__)142# list.append(FlightBean(2, 4, 5, 'å¤ç¨æ£', 99).__dict__)143# list.append(FlightBean(2, 4, 6, 'å¤ç¨æ£', 99).__dict__)144# str = json.dumps(list).encode('utf-8').decode('unicode_escape')145# with open(file_path + 'FlightChessProfile.txt', 'w', encoding='utf-8') as f:146# f.write(str)147# ææ§è¥¿æ¸¸148# list = []149# # list.append(DisposeBean(1, 'æèä¹è¯´å¸æå¨æçä¹å¹´è§å°å¤§ä¾ ãä½ ä¸å¦¨å»è§è§ä»å§ã', 'lilaojiu.jpg', 'æèä¹', 300, 266, 'é¿å®å(16,135)').__dict__)150# # list.append(DisposeBean(1, 'é夫人说èè´¾å欢ä»ï¼å¯æä¸ç´çä¸å°èè´¾ç表ç½ï¼ç¶åçæ°ä¸è§èè´¾ã', 'chenfuren.jpg', 'é夫人', 326, 282, 'é¿å®å(64,112)').__dict__)151# # list.append(DisposeBean(1, 'å¬è¯´é»ç«çåå 天被人æäºï¼ä½ å»ççä»ä¼¤å¿å¦ä½ã', 'huanghuoniu.jpg', 'é»ç«ç', 335, 278, 'æ´é³å(254,13)').__dict__)152# # list.append(DisposeBean(1, '满å æ¥ï¼å¨æ¾äº²ç¶ä½ æä»ä¹æ¶æ¯èµ¶å¿«åè¯ä»å§ã', 'mantangchun.jpg', '满å æ¥', 337, 297, 'æ´é³å(213,112)').__dict__)153# # list.append(DisposeBean(1, 'é¿å®çè¡å½¹å 为æè¿ç«å²å¤ªéé·ï¼å¼å¯¼ä¸ä¸ä»ã', 'yayi.jpg', 'è¡å½¹', 242, 306, 'é¿å®å(126,13)').__dict__)154# # list.append(DisposeBean(1, '天ç¯è人æ¯å½æ³å»å¥³å¿å°±ä¼åå¼ä¸æ¢ï¼ä½ å»é®åä¸ä¸å§ã', 'tiandenglaoren.jpg', '天ç¯è人', 338, 290, 'é¿å®å(79,138)').__dict__)155# # list.append(DisposeBean(1, 'ä¼ é»è¢å¤©ç½¡ç¥éåçæ¥ä¸ï¼åæ¯å¯ä»¥å»æ¾å°ä»ã', 'yuantiangang.jpg', 'è¢å¤©ç½¡', 284, 270, 'é¿å®å(138,44)').__dict__)156# # list.append(DisposeBean(1, 'é®é®æ´é³çè¡å¤§åæå¬ä¸ç¾æççä¸è½ã', 'hudali.jpg', 'è¡å¤§å', 334, 265, 'æ´é³å(231,22)').__dict__)157# # list.append(DisposeBean(1, 'äºæå±±åï¼å°è¯´æ¾ä½ æç¹äºæ
ï¼ä½ å»ççå§ã', 'wuzhishantudi.jpg', 'äºæå±±åå°', 321, 255, 'äºæå±±(167,43)').__dict__)158# # list.append(DisposeBean(1, 'è§ä¸¾äººï¼è¯´èªå·±å¾å欢ä½å°å§ï¼ä½æ¯è¦äºè¡¨ç½ï¼ä½ å»ççå§ã', 'xiaojuren.jpg', 'è§ä¸¾äºº', 572, 240, 'æ´é³å(63,130)').__dict__)159# # list.append(DisposeBean(1, 'å°é»è¯´ï¼æ³æ¾ä¸ªäººèè天ï¼ä½ å»éªéªå¥¹å§ã', 'xiaohei.jpg', 'å°é»', 339, 420, 'é¿å¯¿æ(13,145)').__dict__)160# # list.append(DisposeBean(1, 'æ¡ååå°ï¼è¯´æè¿éé·æ³æ¾ä½ èèã', 'pantaoyuantudi.jpg', 'æ¡ååå°', 304, 270, 'è æ¡å(13,48)').__dict__)161# # list.append(DisposeBean(1, 'ä½èæï¼å¨æ¶éç¾ç¾è§ï¼ä½ ä¹°ä¸ä¸ªéç»ä»å§ã', 'helaocai.jpg', 'ä½èæ', 327, 260, 'æ´é³å(247,118)').__dict__)162# # list.append(DisposeBean(1, 'å¬è¯´ççä»è¯´ï¼ä½ å»æ¥ææ
åµã', 'yuhuxian.jpg', 'ççä»', 384, 366, 'æ®éå±±(15,54)').__dict__)163# # list.append(DisposeBean(1, 'æ¸ææé¿è¯´ç»è®¡æ°æ®ï¼ä½ å»æ¾ä»æ¥ä¸ªåå§ã', 'yucuncunzhang.jpg', 'æ¸ææé¿', 309, 271, 'ä¸æµ·æ¸æ(57,43)').__dict__)164#165# list.append(DisposeBean(1, 'çç§æåå¦å¤æï¼ä»£æé®ä¸ä»èç è¡¥ï¼', 'wangxiucai.jpg', 'çç§æ', 530, 172, 'é¿å®å(199,72)').__dict__)166# list.append(DisposeBean(1, 'å¬è¯´ç¯çæªï¼æ£æç¯çç
ï¼ä½ ç»ä»éä¸ä¸ªç¾ç¾è§è¿å»æ¢æä¸ä¸ã', 'fengniuguai.jpg', 'ç¯çæª', 533, 332, 'ç®é©¼å²(34,58)').__dict__)167# list.append(DisposeBean(1, 'ä½å°å§ï¼è¯´èªå·±å¾å欢è§ä¸¾äººï¼ä¸ç´å¨çå¾
ä»ç表ç½ã', 'hexiaojie.jpg', 'ä½å°å§', 235, 225, 'æ´é³å(63,130)').__dict__)168# list.append(DisposeBean(1, 'è¡å·§å¿çææåºç¥å
¥ååï¼ä½ è¦ä¸è¦å»ççã', 'huqiaoer.jpg', 'è¡å·§å¿', 296, 175, 'æ´é³å(154,95)').__dict__)169# list.append(DisposeBean(1, 'æ¸æçä¹ä¸å¿éæ³å¿µäº²äººï¼ä½ å»é®åä¸ç»ä»ç¹äººæ
温æã', 'qigai.jpg', 'ä¹ä¸', 577, 180, 'ä¸æµ·æ¸æ(43,52)').__dict__)170# list.append(DisposeBean(1, 'çå¥åäºå¤©å±å说æäºæ¾ä½ ï¼ä½ 赶快è¿å»ççã', 'xuanzang.jpg', 'çå¥', 442, 158, 'ç空å§æ¿(14,14)').__dict__)171# list.append(DisposeBean(1, 'éèæï¼è¯´ä»æ³ä¼ æä½ ï¼çæç»ä½ 赶快å»å§ã', 'chenlaocai.jpg', 'éèæ', 341, 281, 'é¿å¯¿æ(46,70)').__dict__)172# list.append(DisposeBean(1, 'ç头ï¼è¯´ä»å¤è¦å¾èè´¾çééï¼ä½ éå»é»æ¢ä¸ä¸å§ã', 'niutou.jpg', 'ç头', 311, 275, 'å°ç±è¿·å®«2(25,15)').__dict__)173# list.append(DisposeBean(1, 'èç´ç²¾å¹´äºå·²é«èº«ä½æ±æï¼ä½ 带ä¸æå¤è±å»ççä»å§ã', 'laohoujing.jpg', 'èç´ç²¾', 325, 279, 'äºæå±±(21,43)').__dict__)174# list.append(DisposeBean(1, '马é¢ï¼è¯´æ³è®©èªå·±åå¸
æ°ç¹ï¼ä½ å»æ¾ä»è®¨è®ºä¸ã', 'mamian.jpg', '马é¢', 88, 262, 'å°ç±è¿·å®«4(6,19)').__dict__)175# list.append(DisposeBean(1, '顶天æ±ï¼è¯´è¦åä½ è¯·æå¦ä½è®ç»éç æï¼ä½ ä¸å¦¨å»çç', 'dingtianzhu.jpg', '顶天æ±', 269, 265, 'æ´é³å(198,69)').__dict__)176# list.append(DisposeBean(1, '顶天æ±åå 天表æ¼å¼ä¼¤èº«ä½äºï¼å¸®æéç¹éåè¯ç»ä»ã', 'dingtianzhu1.jpg', '顶天æ±', 269, 265, 'æ´é³å(198,69)').__dict__)177# list.append(DisposeBean(1, '游æ¹æ¯å£«ï¼è¯´æ³æ¶é¨å¾è¦äºæ¾ä¸å°å¾å¼ï¼ä½ å»å¸®å©ä»ä¸ä¸å§ã', 'youfangshushi.jpg', '游æ¹æ¯å£«', 282, 247, 'æ¹å¯¸å±±(21,23)').__dict__)178# list.append(DisposeBean(1, 'é²å¤§å©¶ï¼æè¿å¾è¦æ¼ä½ å»é®é®ä¸ºä½å¦æ¤è¦æ¼ã', 'ludashen.jpg', 'é²å¤§å©¶', 331, 213, 'æ´é³å(157,138)').__dict__)179# list.append(DisposeBean(1, 'åå 天æ´é³æ¡¥åçæ¤åï¼æ¸¸å®¢åå°äºæåï¼ä½ å»ççé£å§ã', 'youke.jpg', '游客', 332, 235, 'æ´é³å(175,102)').__dict__)180#181# # list.append(DisposeBean(2, '秦ç¼å¨æ¾ä¸ä»¶æ¿æçæ¦å¨ï¼ä½ å»ä¹°ä¸æé¿æªéç»ä»ã', 'qinqiong.jpg', '秦ç¼', 336, 321, 'é¿å®å(262,48)').__dict__)182# # list.append(DisposeBean(2, 'æ
è±ä»åçéå丢äºï¼æ£ä¸ºæ¤äºåæå¢ï¼ä½ å»ä¹°åå¸ééç»ä»ã', 'qinghuaxianzi.jpg', 'æ
è±ä»å', 435, 130, 'ä¸æµ·æ¸æ(27,93)').__dict__)183# # list.append(DisposeBean(2, 'èè´¾æ³å¦ä¹ æ¦èºï¼ä¸ç´å¤´çæ¾ä¸å°éæ³å¥ï¼ä½ å»ä¹°ä¸ªéç»ä»å§ã', 'laojia.jpg', 'èè´¾', 331, 292, 'é¿å®ä¸(45,36)').__dict__)184# # list.append(DisposeBean(2, 'æ¢
è±ä»ï¼è¯´èªå·±æ³è¦ä¸ªç°ªåï¼ä½ ä¹°ä¸ä¸ªéç»ä»å§ã', 'meihuaxian.jpg', 'æ¢
è±ä»', 329, 302, 'é¿å®ä¸(63,26)').__dict__)185#186# list.append(DisposeBean(2, 'åºå¤«äººåå 天çä¸äºä¸ä»¶å¸è£ï¼ä½ 买个ä»å½å礼ç©å§ã', 'pangfuren.jpg', 'åºå¤«äºº', 329, 270, 'é¿å®å(24,91)').__dict__)187#188# # list.append(DisposeBean(3, 'è æ¡ç¥çµå·åäºè æ¡å»æè®ä»ä¸çªã', 'shenling.jpg', 'ç¥çµ', 527, 110, 'è æ¡å(82,85)').__dict__)189# # list.append(DisposeBean(3, 'è æ¡ç¥çµå·åäºè æ¡å»æè®ä»ä¸çªã', 'shenling1.jpg', 'ç¥çµ', 527, 110, 'è æ¡å(82,85)').__dict__)190# list.append(DisposeBean(3, 'è æ¡å¥³å¨²å·åäºè æ¡å»æè®ä»ä¸çªã', 'nvwa.jpg', '女娲', 172, 480, 'è æ¡å(81,85)').__dict__)191# list.append(DisposeBean(3, 'è æ¡å¥³å¨²å·åäºè æ¡å»æè®ä»ä¸çªã', 'nvwa1.jpg', '女娲', 172, 480, 'è æ¡å(81,85)').__dict__)192# list.append(DisposeBean(3, 'è æ¡å¤å°å·åäºè æ¡å»æè®ä»ä¸çªã', 'fenghuang.jpg', 'å¤å°', 248, 308, 'è æ¡å(38,67)').__dict__)193# list.append(DisposeBean(3, 'è æ¡å¤å°å·åäºè æ¡å»æè®ä»ä¸çªã', 'fenghuang1.jpg', 'å¤å°', 248, 308, 'è æ¡å(38,67)').__dict__)194# str = json.dumps(list).encode('utf-8').decode('unicode_escape')195# with open(file_path + 'TaskProfile.txt', 'w', encoding='utf-8') as f:196# f.write(str)197# list = []198# list.append(FlightBean(1, 1, 1, 'é¿å®å(199,72)', 991).__dict__)199# list.append(FlightBean(1, 1, 2, 'é¿å®å(138,44)', 60).__dict__)200# list.append(FlightBean(1, 1, 3, 'é¿å®å(126,13)', 68).__dict__)201# list.append(FlightBean(1, 1, 4, 'é¿å®å(16,135)', 50).__dict__)202# list.append(FlightBean(1, 1, 5, 'é¿å®å(64,112)', 960).__dict__)203# list.append(FlightBean(1, 1, 6, 'é¿å®å(79,138)', 959).__dict__)204#205# list.append(FlightBean(1, 2, 1, 'é¿å®å(24,91)', 61).__dict__)206# list.append(FlightBean(1, 2, 2, 'é¿å®å(262,48)', 68).__dict__)207# list.append(FlightBean(1, 2, 3, 'æ´é³å(254,13)', 957).__dict__)208# list.append(FlightBean(1, 2, 4, 'æ´é³å(231,22)', 920).__dict__)209# list.append(FlightBean(1, 2, 5, 'æ´é³å(153,92)', 970).__dict__)210# list.append(FlightBean(1, 2, 6, 'æ´é³å(174,101)', 975).__dict__)211#212# list.append(FlightBean(1, 3, 1, 'æ´é³å(213,112)', 962).__dict__)213# list.append(FlightBean(1, 3, 2, 'æ´é³å(157,136)', 960).__dict__)214# list.append(FlightBean(1, 3, 3, 'æ´é³å(63,130)', 931).__dict__)215# list.append(FlightBean(1, 3, 4, 'æ´é³å(247,118)', 61).__dict__)216# list.append(FlightBean(1, 3, 5, 'æ´é³å(196,69)', 40).__dict__)217# list.append(FlightBean(1, 3, 6, 'è æ¡å(13,48)', 969).__dict__)218#219# list.append(FlightBean(1, 4, 1, 'è æ¡å(82,85)', 773).__dict__)220# list.append(FlightBean(1, 4, 2, 'è æ¡å(38,67)', 19).__dict__)221# list.append(FlightBean(1, 4, 3, 'ä¸æµ·æ¸æ(57,43)', 966).__dict__)222# list.append(FlightBean(1, 4, 4, 'ä¸æµ·æ¸æ(112,70)', 958).__dict__)223# list.append(FlightBean(1, 4, 5, 'ä¸æµ·æ¸æ(27,93)', 65).__dict__)224# list.append(FlightBean(1, 4, 6, 'é¿å®ä¸(45,36)', 58).__dict__)225#226# list.append(FlightBean(2, 1, 1, 'é¿å®ä¸(63,26)', 57).__dict__)227# list.append(FlightBean(2, 1, 2, 'é¿å¯¿æ(47,70)', 961).__dict__)228# list.append(FlightBean(2, 1, 3, 'é¿å¯¿æ(13,145)', 954).__dict__)229# list.append(FlightBean(2, 1, 4, 'å°ç±è¿·å®«2(24,15)', 956).__dict__)230# list.append(FlightBean(2, 1, 5, 'å°ç±è¿·å®«4(6,18)', 961).__dict__)231# list.append(FlightBean(2, 1, 6, 'äºæå±±(167,43)', 961).__dict__)232#233# list.append(FlightBean(2, 2, 1, 'äºæå±±(21,42)', 57).__dict__)234# list.append(FlightBean(2, 2, 2, 'æ®éå±±(15,54)', 965).__dict__)235# list.append(FlightBean(2, 2, 3, 'æ¹å¯¸å±±(19,22)', 959).__dict__)236# list.append(FlightBean(2, 2, 4, 'ç空å§æ¿(14,14)', 963).__dict__)237# list.append(FlightBean(2, 2, 5, 'ç®é©¼å²(34,58)', 64).__dict__)238# list.append(FlightBean(2, 2, 6, 'å¤ç¨æ£', 99).__dict__)239#240# list.append(FlightBean(2, 3, 1, 'å¤ç¨æ£', 83).__dict__)241# list.append(FlightBean(2, 3, 2, 'å¤ç¨æ£', 69).__dict__)242# list.append(FlightBean(2, 3, 3, 'å¤ç¨æ£', 72).__dict__)243# list.append(FlightBean(2, 3, 4, 'å¤ç¨æ£', 81).__dict__)244# list.append(FlightBean(2, 3, 5, 'å¤ç¨æ£', 74).__dict__)245# list.append(FlightBean(2, 3, 6, 'å¤ç¨æ£', 84).__dict__)246#247# list.append(FlightBean(2, 4, 1, 'å¤ç¨æ£', 44).__dict__)248# list.append(FlightBean(2, 4, 2, 'å¤ç¨æ£', 37).__dict__)249# list.append(FlightBean(2, 4, 3, 'å¤ç¨æ£', 17).__dict__)250# list.append(FlightBean(2, 4, 4, 'å¤ç¨æ£', 994).__dict__)251# list.append(FlightBean(2, 4, 5, 'å¤ç¨æ£', 999).__dict__)252# list.append(FlightBean(2, 4, 6, 'å¤ç¨æ£', 993).__dict__)253# str = json.dumps(list).encode('utf-8').decode('unicode_escape')254# with open(file_path + 'FlightChessProfile.txt', 'w', encoding='utf-8') as f:255# f.write(str)256#257# list = []258# list.append(TalkBean(2, 'è¿é¿å®åä¸æä¸å¤§éå¡ï¼å¬è¯´éé¢éåäºåä¸å¦éï¼è¦æ¯æ²¡æ两æå·åå¯å«è¿å»éæ»ã', 'talk_2_1.jpg', ).__dict__)259# list.append(TalkBean(2, 'å¨ææç¥éçå
³äºåæ空çæ
äºä¸ï¼ä»å¨ä¸ä¸å¤å¹´éæç»ä¸å¤©å°ç¥ä½ä¸ºæï¼è¿å°åºæ¯ä¸ºä»ä¹å¢ï¼ç©¶ç«ä¸ºäºä»ä¹ï¼ä»è¦ç ´åä¸çç平衡ï¼', 'talk_2_2.jpg', ).__dict__)260# list.append(TalkBean(2, 'æ们ç®é©¼å²æä¸ä½å¤§çï¼å¤§å¤§çè½ååä¸å¤©å
µï¼äºå¤§çä¸èº«é身éèï¼ä¸å¤§çæé£è¿é¾ï¼è°äººè½æï¼', 'talk_2_3.jpg', ).__dict__)261# list.append(TalkBean(2, 'éé±æ¿çä¸å¤ªæ¹ä¾¿äºï¼æ¨è½å¦ç»ç¹é¶åï¼é¶å没æï¼è½ä¸è½ç»ç¹é票åï¼ç¾å
ä¹å¯ä»¥å¦', 'talk_2_4.jpg', ).__dict__)262# list.append(TalkBean(2, '欢è¿æ¨æ¥å°å¤§è¯è¥¿æ¸¸2ä¸çï¼æ¨å¯ä»¥é请æ¨çæåä¸èµ·åå½', 'talk_2_5.jpg', ).__dict__)263# list.append(TalkBean(0, '', 'talk_0_1.jpg', ).__dict__)264# str = json.dumps(list).encode('utf-8').decode('unicode_escape')265# with open(file_path + 'TalkProfile.txt', 'w', encoding='utf-8') as f:266# f.write(str)267# list = []268# list.append(FlightBean(1, 1, 1, 'é¿å®å(199,72)', 15).__dict__)269# list.append(FlightBean(1, 1, 2, 'å¤ç¨æ£', 99).__dict__)270# list.append(FlightBean(1, 1, 3, 'å¤ç¨æ£', 99).__dict__)271# list.append(FlightBean(1, 1, 4, 'å¤ç¨æ£', 99).__dict__)272# list.append(FlightBean(1, 1, 5, 'å¤ç¨æ£', 99).__dict__)273# myClassReBuild = json.loads(read_ispose(file_path + 'FlightChessProfile.txt'))274# flight_chess_datas = []275# for letter in myClassReBuild:276# flight_chess_datas.append(277# FlightBean(letter['goods_position'], letter['goods_position_x'], letter['goods_position_y'],278# letter['flight_chess_info'], letter['times_left']))279# list = []280# for temp in flight_chess_datas:281# list.append(temp.__dict__)282# # list.append(FlightBean(2, 3, 4, 'å¤ç¨æ£', 99).__dict__)283# list.append(FlightBean(2, 3, 5, 'å¤ç¨æ£', 99).__dict__)284# list.append(FlightBean(2, 3, 6, 'å¤ç¨æ£', 99).__dict__)285#286# list.append(FlightBean(2, 4, 1, 'å¤ç¨æ£', 99).__dict__)287# # list.append(FlightBean(2, 4, 2, 'å¤ç¨æ£', 99).__dict__)288# # list.append(FlightBean(2, 4, 3, 'å¤ç¨æ£', 99).__dict__)289# # list.append(FlightBean(2, 4, 4, 'å¤ç¨æ£', 99).__dict__)290# # list.append(FlightBean(2, 4, 5, 'å¤ç¨æ£', 99).__dict__)291# # list.append(FlightBean(2, 4, 6, 'å¤ç¨æ£', 99).__dict__)292# str = json.dumps(list).encode('utf-8').decode('unicode_escape')293# with open(file_path + 'FlightChessProfile.txt', 'w', encoding='utf-8') as f:294# f.write(str)295# list = []296# list.append(DisposeBean(1, 'æèä¹è¯´å¸æå¨æçä¹å¹´è§å°å¤§ä¾ ãä½ ä¸å¦¨å»è§è§ä»å§ã', 'lilaojiu.jpg', 'æèä¹', 310, 280, 'é¿å®å(16,135)').__dict__)297# list.append(DisposeBean(1, 'é夫人说èè´¾å欢ä»ï¼å¯æä¸ç´çä¸å°èè´¾ç表ç½ï¼ç¶åçæ°ä¸è§èè´¾ã', 'chenfuren.jpg', 'é夫人', 326, 282, 'é¿å®å(64,112)').__dict__)298# list.append(DisposeBean(1, 'å¬è¯´é»ç«çåå 天被人æäºï¼ä½ å»ççä»ä¼¤å¿å¦ä½ã', 'huanghuoniu.jpg', 'é»ç«ç', 335, 278, 'æ´é³å(254,13)').__dict__)299# list.append(DisposeBean(1, '满å æ¥ï¼å¨æ¾äº²ç¶ä½ æä»ä¹æ¶æ¯èµ¶å¿«åè¯ä»å§ã', 'mantangchun.jpg', '满å æ¥', 337, 297, 'æ´é³å(213,112)').__dict__)300# list.append(DisposeBean(1, 'é¿å®çè¡å½¹å 为æè¿ç«å²å¤ªéé·ï¼å¼å¯¼ä¸ä¸ä»ã', 'yayi.jpg', 'è¡å½¹', 242, 306, 'é¿å®å(126,13)').__dict__)301# list.append(DisposeBean(1, '天ç¯è人æ¯å½æ³å»å¥³å¿å°±ä¼åå¼ä¸æ¢ï¼ä½ å»é®åä¸ä¸å§ã', 'tiandenglaoren.jpg', '天ç¯è人', 338, 290, 'é¿å®å(79,138)').__dict__)302# list.append(DisposeBean(1, 'ä¼ é»è¢å¤©ç½¡ç¥éåçæ¥ä¸ï¼åæ¯å¯ä»¥å»æ¾å°ä»ã', 'yuantiangang.jpg', 'è¢å¤©ç½¡', 284, 270, 'é¿å®å(138,44)').__dict__)303# list.append(DisposeBean(1, 'é®é®æ´é³çè¡å¤§åæå¬ä¸ç¾æççä¸è½ã', 'hudali.jpg', 'è¡å¤§å', 334, 265, 'æ´é³å(231,22)').__dict__)304# list.append(DisposeBean(1, 'äºæå±±åï¼å°è¯´æ¾ä½ æç¹äºæ
ï¼ä½ å»ççå§ã', 'wuzhishantudi.jpg', 'äºæå±±åå°', 321, 255, 'äºæå±±(167,43)').__dict__)305# list.append(DisposeBean(1, 'è§ä¸¾äººï¼è¯´èªå·±å¾å欢ä½å°å§ï¼ä½æ¯è¦äºè¡¨ç½ï¼ä½ å»ççå§ã', 'xiaojuren.jpg', 'è§ä¸¾äºº', 572, 240, 'æ´é³å(63,130)').__dict__)306# list.append(DisposeBean(1, 'å°é»è¯´ï¼æ³æ¾ä¸ªäººèè天ï¼ä½ å»éªéªå¥¹å§ã', 'xiaohei.jpg', 'å°é»', 339, 420, 'é¿å¯¿æ(13,145)').__dict__)307# list.append(DisposeBean(1, 'æ¡ååå°ï¼è¯´æè¿éé·æ³æ¾ä½ èèã', 'pantaoyuantudi.jpg', 'æ¡ååå°', 304, 270, 'è æ¡å(13,48)').__dict__)308# list.append(DisposeBean(1, 'ä½èæï¼å¨æ¶éç¾ç¾è§ï¼ä½ ä¹°ä¸ä¸ªéç»ä»å§ã', 'helaocai.jpg', 'ä½èæ', 327, 260, 'æ´é³å(247,118)').__dict__)309# list.append(DisposeBean(1, 'å¬è¯´ççä»è¯´ï¼ä½ å»æ¥ææ
åµã', 'yuhuxian.jpg', 'ççä»', 384, 367, 'æ®éå±±(15,54)').__dict__)310# list.append(DisposeBean(1, 'æ¸ææé¿è¯´ç»è®¡æ°æ®ï¼ä½ å»æ¾ä»æ¥ä¸ªåå§ã', 'yucuncunzhang.jpg', 'æ¸ææé¿', 309, 271, 'ä¸æµ·æ¸æ(57,43)').__dict__)311# list.append(DisposeBean(1, 'çç§æåå¦å¤æï¼ä»£æé®ä¸ä»èç è¡¥ï¼', 'wangxiucai.jpg', 'çç§æ', 532, 196, 'é¿å®å(199,72)').__dict__)312# list.append(DisposeBean(1, 'æ¸æçä¹ä¸å¿éæ³å¿µäº²äººï¼ä½ å»é®åä¸ç»ä»ç¹äººæ
温æã', 'qigai.jpg', 'ä¹ä¸', 336, 270, 'ä¸æµ·æ¸æ(112,70)').__dict__)313# list.append(DisposeBean(1, 'ä½å°å§ï¼è¯´èªå·±å¾å欢è§ä¸¾äººï¼ä¸ç´å¨çå¾
ä»ç表ç½ã', 'hexiaojie.jpg', 'ä½å°å§', 247, 225, 'æ´é³å(63,130)').__dict__)314# list.append(DisposeBean(1, 'èç´ç²¾å¹´äºå·²é«èº«ä½æ±æï¼ä½ 带ä¸æå¤è±å»ççä»å§ã', 'laohoujing.jpg', 'èç´ç²¾', 328, 300, 'äºæå±±(21,42)').__dict__)315# list.append(DisposeBean(1, 'éèæï¼è¯´ä»æ³ä¼ æä½ ï¼çæç»ä½ 赶快å»å§ã', 'chenlaocai.jpg', 'éèæ', 338, 296, 'é¿å¯¿æ(47,70)').__dict__)316# list.append(DisposeBean(1, 'é²å¤§å©¶ï¼æè¿å¾è¦æ¼ä½ å»é®é®ä¸ºä½å¦æ¤è¦æ¼ã', 'ludashen.jpg', 'é²å¤§å©¶', 320, 265, 'æ´é³å(157,136)').__dict__)317# list.append(DisposeBean(1, 'å¬è¯´ç¯çæªï¼æ£æç¯çç
ï¼ä½ ç»ä»éä¸ä¸ªç¾ç¾è§è¿å»æ¢æä¸ä¸ã', 'fengniuguai.jpg', 'ç¯çæª', 533, 332, 'ç®é©¼å²(34,58)').__dict__)318# list.append(DisposeBean(1, '顶天æ±ï¼è¯´è¦åä½ è¯·æå¦ä½è®ç»éç æï¼ä½ ä¸å¦¨å»çç', 'dingtianzhu.jpg', '顶天æ±', 328, 278, 'æ´é³å(196,69)').__dict__)319# list.append(DisposeBean(1, '游æ¹æ¯å£«ï¼è¯´æ³æ¶é¨å¾è¦äºæ¾ä¸å°å¾å¼ï¼ä½ å»å¸®å©ä»ä¸ä¸å§ã', 'youfangshushi.jpg', '游æ¹æ¯å£«', 333, 273, 'æ¹å¯¸å±±(19,22)').__dict__)320# list.append(DisposeBean(1, '顶天æ±åå 天表æ¼å¼ä¼¤èº«ä½äºï¼å¸®æéç¹éåè¯ç»ä»ã', 'dingtianzhu1.jpg', '顶天æ±', 328, 278, 'æ´é³å(196,69)').__dict__)321# list.append(DisposeBean(1, 'çå¥åäºå¤©å±å说æäºæ¾ä½ ï¼ä½ 赶快è¿å»ççã', 'xuanzang.jpg', 'çå¥', 442, 158, 'ç空å§æ¿(14,14)').__dict__)322# list.append(DisposeBean(1, 'ç头ï¼è¯´ä»å¤è¦å¾èè´¾çééï¼ä½ éå»é»æ¢ä¸ä¸å§ã', 'niutou.jpg', 'ç头', 330, 278, 'å°ç±è¿·å®«2(24,15)').__dict__)323# list.append(DisposeBean(1, 'è¡å·§å¿çææåºç¥å
¥ååï¼ä½ è¦ä¸è¦å»ççã', 'huqiaoer.jpg', 'è¡å·§å¿', 338, 260, 'æ´é³å(153,92)').__dict__)324# list.append(DisposeBean(1, 'åå 天æ´é³æ¡¥åçæ¤åï¼æ¸¸å®¢åå°äºæåï¼ä½ å»ççé£å§ã', 'youke.jpg', '游客', 333, 257, 'æ´é³å(174,101)').__dict__)325# list.append(DisposeBean(1, '马é¢ï¼è¯´æ³è®©èªå·±åå¸
æ°ç¹ï¼ä½ å»æ¾ä»è®¨è®ºä¸ã', 'mamian.jpg', '马é¢', 88, 269, 'å°ç±è¿·å®«4(6,18)').__dict__)326#327# list.append(DisposeBean(2, '秦ç¼å¨æ¾ä¸ä»¶æ¿æçæ¦å¨ï¼ä½ å»ä¹°ä¸æé¿æªéç»ä»ã', 'qinqiong.jpg', '秦ç¼', 336, 321, 'é¿å®å(262,48)').__dict__)328# list.append(DisposeBean(2, 'æ
è±ä»åçéå丢äºï¼æ£ä¸ºæ¤äºåæå¢ï¼ä½ å»ä¹°åå¸ééç»ä»ã', 'qinghuaxianzi.jpg', 'æ
è±ä»å', 435, 130, 'ä¸æµ·æ¸æ(27,93)').__dict__)329# list.append(DisposeBean(2, 'èè´¾æ³å¦ä¹ æ¦èºï¼ä¸ç´å¤´çæ¾ä¸å°éæ³å¥ï¼ä½ å»ä¹°ä¸ªéç»ä»å§ã', 'laojia.jpg', 'èè´¾', 331, 292, 'é¿å®ä¸(45,36)').__dict__)330# list.append(DisposeBean(2, 'æ¢
è±ä»ï¼è¯´èªå·±æ³è¦ä¸ªç°ªåï¼ä½ ä¹°ä¸ä¸ªéç»ä»å§ã', 'meihuaxian.jpg', 'æ¢
è±ä»', 329, 302, 'é¿å®ä¸(63,26)').__dict__)331# list.append(DisposeBean(2, 'åºå¤«äººåå 天çä¸äºä¸ä»¶å¸è£ï¼ä½ 买个ä»å½å礼ç©å§ã', 'pangfuren.jpg', 'åºå¤«äºº', 329, 278, 'é¿å®å(24,91)').__dict__)332#333# list.append(DisposeBean(3, 'è æ¡ç¥çµå·åäºè æ¡å»æè®ä»ä¸çªã', 'shenling.jpg', 'ç¥çµ', 527, 110, 'è æ¡å(82,85)').__dict__)334# list.append(DisposeBean(3, 'è æ¡ç¥çµå·åäºè æ¡å»æè®ä»ä¸çªã', 'shenling1.jpg', 'ç¥çµ', 527, 110, 'è æ¡å(82,85)').__dict__)335# list.append(DisposeBean(3, 'è æ¡å¤å°å·åäºè æ¡å»æè®ä»ä¸çªã', 'fenghuang.jpg', 'å¤å°', 248, 307, 'è æ¡å(38,67)').__dict__)336# list.append(DisposeBean(3, 'è æ¡å¤å°å·åäºè æ¡å»æè®ä»ä¸çªã', 'fenghuang1.jpg', 'å¤å°', 248, 307, 'è æ¡å(38,67)').__dict__)337# list.append(DisposeBean(3, 'è æ¡å¥³å¨²å·åäºè æ¡å»æè®ä»ä¸çªã', 'nvwa.jpg', '女娲', 142, 480, 'è æ¡å(82,85)').__dict__)338# list.append(DisposeBean(3, 'è æ¡å¥³å¨²å·åäºè æ¡å»æè®ä»ä¸çªã', 'nvwa1.jpg', '女娲', 142, 480, 'è æ¡å(82,85)').__dict__)339#340# str = json.dumps(list).encode('utf-8').decode('unicode_escape')341# with open(file_path + 'TaskProfile.txt', 'w', encoding='utf-8') as f:342# f.write(str)343# å天344# list = []345# list.append(FlightBean(1, 1, 1, 'æé', 99).__dict__)346# list.append(FlightBean(1, 1, 2, 'ä¸å¤´éç', 99).__dict__)347# list.append(FlightBean(1, 1, 3, 'é»å±±å¦ç', 99).__dict__)348# list.append(FlightBean(1, 1, 4, 'èè²å¦ç', 99).__dict__)349# list.append(FlightBean(1, 1, 5, 'ä¸å¹´å¦ç', 99).__dict__)350# list.append(FlightBean(1, 2, 1, 'ç³ç ´ç', 99).__dict__)351# str = json.dumps(list).encode('utf-8').decode('unicode_escape')352# with open(tian_path + 'ZuoTianFlightChess.txt', 'w', encoding='utf-8') as f:353# f.write(str)354list = []355list.append(TalkBean(1, '亲å¯ä¸¹', 'qinmidan11.jpg', ).__dict__)356list.append(TalkBean(1, '亲å¯ä¸¹', 'qinmidan12.jpg', ).__dict__)357list.append(TalkBean(1, '亲å¯ä¸¹', 'qinmidan13.jpg', ).__dict__)358list.append(TalkBean(1, '亲å¯ä¸¹', 'qinmidan14.jpg', ).__dict__)359list.append(TalkBean(1, '亲å¯ä¸¹', 'qinmidan15.jpg', ).__dict__)360list.append(TalkBean(1, '亲å¯ä¸¹', 'qinmidan16.jpg', ).__dict__)361list.append(TalkBean(1, '亲å¯ä¸¹', 'qinmidan21.jpg', ).__dict__)362list.append(TalkBean(1, '亲å¯ä¸¹', 'qinmidan22.jpg', ).__dict__)363list.append(TalkBean(1, '亲å¯ä¸¹', 'qinmidan23.jpg', ).__dict__)364list.append(TalkBean(1, '亲å¯ä¸¹', 'qinmidan24.jpg', ).__dict__)365list.append(TalkBean(1, '亲å¯ä¸¹', 'qinmidan25.jpg', ).__dict__)366list.append(TalkBean(1, '亲å¯ä¸¹', 'qinmidan26.jpg', ).__dict__)367list.append(TalkBean(1, '亲å¯ä¸¹', 'qinmidan31.jpg', ).__dict__)368list.append(TalkBean(1, '亲å¯ä¸¹', 'qinmidan32.jpg', ).__dict__)369list.append(TalkBean(1, '亲å¯ä¸¹', 'qinmidan33.jpg', ).__dict__)370list.append(TalkBean(1, '亲å¯ä¸¹', 'qinmidan34.jpg', ).__dict__)371list.append(TalkBean(1, '亲å¯ä¸¹', 'qinmidan35.jpg', ).__dict__)372list.append(TalkBean(1, '亲å¯ä¸¹', 'qinmidan36.jpg', ).__dict__)373list.append(TalkBean(1, '亲å¯ä¸¹', 'qinmidan41.jpg', ).__dict__)374list.append(TalkBean(1, '亲å¯ä¸¹', 'qinmidan42.jpg', ).__dict__)375list.append(TalkBean(1, '亲å¯ä¸¹', 'qinmidan43.jpg', ).__dict__)376list.append(TalkBean(1, '亲å¯ä¸¹', 'qinmidan44.jpg', ).__dict__)377list.append(TalkBean(1, '亲å¯ä¸¹', 'qinmidan45.jpg', ).__dict__)378list.append(TalkBean(1, '亲å¯ä¸¹', 'qinmidan46.jpg', ).__dict__)379list.append(TalkBean(1, 'è¡ç²ç', 'xuelinglong11.jpg', ).__dict__)380list.append(TalkBean(1, 'è¡ç²ç', 'xuelinglong12.jpg', ).__dict__)381list.append(TalkBean(1, 'è¡ç²ç', 'xuelinglong13.jpg', ).__dict__)382list.append(TalkBean(1, 'è¡ç²ç', 'xuelinglong14.jpg', ).__dict__)383list.append(TalkBean(1, 'è¡ç²ç', 'xuelinglong15.jpg', ).__dict__)384list.append(TalkBean(1, 'è¡ç²ç', 'xuelinglong16.jpg', ).__dict__)385list.append(TalkBean(1, 'è¡ç²ç', 'xuelinglong21.jpg', ).__dict__)386list.append(TalkBean(1, 'è¡ç²ç', 'xuelinglong22.jpg', ).__dict__)387list.append(TalkBean(1, 'è¡ç²ç', 'xuelinglong23.jpg', ).__dict__)388list.append(TalkBean(1, 'è¡ç²ç', 'xuelinglong24.jpg', ).__dict__)389list.append(TalkBean(1, 'è¡ç²ç', 'xuelinglong25.jpg', ).__dict__)390list.append(TalkBean(1, 'è¡ç²ç', 'xuelinglong26.jpg', ).__dict__)391list.append(TalkBean(1, 'è¡ç²ç', 'xuelinglong31.jpg', ).__dict__)392list.append(TalkBean(1, 'è¡ç²ç', 'xuelinglong32.jpg', ).__dict__)393list.append(TalkBean(1, 'è¡ç²ç', 'xuelinglong33.jpg', ).__dict__)394list.append(TalkBean(1, 'è¡ç²ç', 'xuelinglong34.jpg', ).__dict__)395list.append(TalkBean(1, 'è¡ç²ç', 'xuelinglong35.jpg', ).__dict__)396list.append(TalkBean(1, 'è¡ç²ç', 'xuelinglong36.jpg', ).__dict__)397list.append(TalkBean(1, 'è¡ç²ç', 'xuelinglong41.jpg', ).__dict__)398list.append(TalkBean(1, 'è¡ç²ç', 'xuelinglong42.jpg', ).__dict__)399list.append(TalkBean(1, 'è¡ç²ç', 'xuelinglong43.jpg', ).__dict__)400list.append(TalkBean(1, 'è¡ç²ç', 'xuelinglong44.jpg', ).__dict__)401list.append(TalkBean(1, 'è¡ç²ç', 'xuelinglong45.jpg', ).__dict__)402list.append(TalkBean(1, 'è¡ç²ç', 'xuelinglong46.jpg', ).__dict__)403list.append(TalkBean(1, 'ä¹å½©äºé¾ç ', 'jiucaiyunlongzhu11.jpg', ).__dict__)404list.append(TalkBean(1, 'ä¹å½©äºé¾ç ', 'jiucaiyunlongzhu12.jpg', ).__dict__)405list.append(TalkBean(1, 'ä¹å½©äºé¾ç ', 'jiucaiyunlongzhu13.jpg', ).__dict__)406list.append(TalkBean(1, 'ä¹å½©äºé¾ç ', 'jiucaiyunlongzhu14.jpg', ).__dict__)407list.append(TalkBean(1, 'ä¹å½©äºé¾ç ', 'jiucaiyunlongzhu15.jpg', ).__dict__)408list.append(TalkBean(1, 'ä¹å½©äºé¾ç ', 'jiucaiyunlongzhu16.jpg', ).__dict__)409list.append(TalkBean(1, 'ä¹å½©äºé¾ç ', 'jiucaiyunlongzhu21.jpg', ).__dict__)410list.append(TalkBean(1, 'ä¹å½©äºé¾ç ', 'jiucaiyunlongzhu22.jpg', ).__dict__)411list.append(TalkBean(1, 'ä¹å½©äºé¾ç ', 'jiucaiyunlongzhu23.jpg', ).__dict__)412list.append(TalkBean(1, 'ä¹å½©äºé¾ç ', 'jiucaiyunlongzhu24.jpg', ).__dict__)413list.append(TalkBean(1, 'ä¹å½©äºé¾ç ', 'jiucaiyunlongzhu25.jpg', ).__dict__)414list.append(TalkBean(1, 'ä¹å½©äºé¾ç ', 'jiucaiyunlongzhu26.jpg', ).__dict__)415list.append(TalkBean(1, 'ä¹å½©äºé¾ç ', 'jiucaiyunlongzhu31.jpg', ).__dict__)416list.append(TalkBean(1, 'ä¹å½©äºé¾ç ', 'jiucaiyunlongzhu32.jpg', ).__dict__)417list.append(TalkBean(1, 'ä¹å½©äºé¾ç ', 'jiucaiyunlongzhu33.jpg', ).__dict__)418list.append(TalkBean(1, 'ä¹å½©äºé¾ç ', 'jiucaiyunlongzhu34.jpg', ).__dict__)419list.append(TalkBean(1, 'ä¹å½©äºé¾ç ', 'jiucaiyunlongzhu35.jpg', ).__dict__)420list.append(TalkBean(1, 'ä¹å½©äºé¾ç ', 'jiucaiyunlongzhu36.jpg', ).__dict__)421list.append(TalkBean(1, 'ä¹å½©äºé¾ç ', 'jiucaiyunlongzhu41.jpg', ).__dict__)422list.append(TalkBean(1, 'ä¹å½©äºé¾ç ', 'jiucaiyunlongzhu42.jpg', ).__dict__)423list.append(TalkBean(1, 'ä¹å½©äºé¾ç ', 'jiucaiyunlongzhu43.jpg', ).__dict__)424list.append(TalkBean(1, 'ä¹å½©äºé¾ç ', 'jiucaiyunlongzhu44.jpg', ).__dict__)425list.append(TalkBean(1, 'ä¹å½©äºé¾ç ', 'jiucaiyunlongzhu45.jpg', ).__dict__)426list.append(TalkBean(1, 'ä¹å½©äºé¾ç ', 'jiucaiyunlongzhu46.jpg', ).__dict__)427list.append(TalkBean(1, 'å
丹精å', 'neidanjinghua11.jpg', ).__dict__)428list.append(TalkBean(1, 'å
丹精å', 'neidanjinghua12.jpg', ).__dict__)429list.append(TalkBean(1, 'å
丹精å', 'neidanjinghua13.jpg', ).__dict__)430list.append(TalkBean(1, 'å
丹精å', 'neidanjinghua14.jpg', ).__dict__)431list.append(TalkBean(1, 'å
丹精å', 'neidanjinghua15.jpg', ).__dict__)432list.append(TalkBean(1, 'å
丹精å', 'neidanjinghua16.jpg', ).__dict__)433list.append(TalkBean(1, 'å
丹精å', 'neidanjinghua21.jpg', ).__dict__)434list.append(TalkBean(1, 'å
丹精å', 'neidanjinghua22.jpg', ).__dict__)435list.append(TalkBean(1, 'å
丹精å', 'neidanjinghua23.jpg', ).__dict__)436list.append(TalkBean(1, 'å
丹精å', 'neidanjinghua24.jpg', ).__dict__)437list.append(TalkBean(1, 'å
丹精å', 'neidanjinghua25.jpg', ).__dict__)438list.append(TalkBean(1, 'å
丹精å', 'neidanjinghua26.jpg', ).__dict__)439list.append(TalkBean(1, 'å
丹精å', 'neidanjinghua31.jpg', ).__dict__)440list.append(TalkBean(1, 'å
丹精å', 'neidanjinghua32.jpg', ).__dict__)441list.append(TalkBean(1, 'å
丹精å', 'neidanjinghua33.jpg', ).__dict__)442list.append(TalkBean(1, 'å
丹精å', 'neidanjinghua34.jpg', ).__dict__)443list.append(TalkBean(1, 'å
丹精å', 'neidanjinghua35.jpg', ).__dict__)444list.append(TalkBean(1, 'å
丹精å', 'neidanjinghua36.jpg', ).__dict__)445list.append(TalkBean(1, 'å
丹精å', 'neidanjinghua41.jpg', ).__dict__)446list.append(TalkBean(1, 'å
丹精å', 'neidanjinghua42.jpg', ).__dict__)447list.append(TalkBean(1, 'å
丹精å', 'neidanjinghua43.jpg', ).__dict__)448list.append(TalkBean(1, 'å
丹精å', 'neidanjinghua44.jpg', ).__dict__)449list.append(TalkBean(1, 'å
丹精å', 'neidanjinghua45.jpg', ).__dict__)450list.append(TalkBean(1, 'å
丹精å', 'neidanjinghua46.jpg', ).__dict__)451list.append(TalkBean(1, 'åå¹´å¯é', 'qiannianhantie11.jpg', ).__dict__)452list.append(TalkBean(1, 'åå¹´å¯é', 'qiannianhantie12.jpg', ).__dict__)453list.append(TalkBean(1, 'åå¹´å¯é', 'qiannianhantie13.jpg', ).__dict__)454list.append(TalkBean(1, 'åå¹´å¯é', 'qiannianhantie14.jpg', ).__dict__)455list.append(TalkBean(1, 'åå¹´å¯é', 'qiannianhantie15.jpg', ).__dict__)456list.append(TalkBean(1, 'åå¹´å¯é', 'qiannianhantie16.jpg', ).__dict__)457list.append(TalkBean(1, 'åå¹´å¯é', 'qiannianhantie21.jpg', ).__dict__)458list.append(TalkBean(1, 'åå¹´å¯é', 'qiannianhantie22.jpg', ).__dict__)459list.append(TalkBean(1, 'åå¹´å¯é', 'qiannianhantie23.jpg', ).__dict__)460list.append(TalkBean(1, 'åå¹´å¯é', 'qiannianhantie24.jpg', ).__dict__)461list.append(TalkBean(1, 'åå¹´å¯é', 'qiannianhantie25.jpg', ).__dict__)462list.append(TalkBean(1, 'åå¹´å¯é', 'qiannianhantie26.jpg', ).__dict__)463list.append(TalkBean(1, 'åå¹´å¯é', 'qiannianhantie31.jpg', ).__dict__)464list.append(TalkBean(1, 'åå¹´å¯é', 'qiannianhantie32.jpg', ).__dict__)465list.append(TalkBean(1, 'åå¹´å¯é', 'qiannianhantie33.jpg', ).__dict__)466list.append(TalkBean(1, 'åå¹´å¯é', 'qiannianhantie34.jpg', ).__dict__)467list.append(TalkBean(1, 'åå¹´å¯é', 'qiannianhantie35.jpg', ).__dict__)468list.append(TalkBean(1, 'åå¹´å¯é', 'qiannianhantie36.jpg', ).__dict__)469list.append(TalkBean(1, 'åå¹´å¯é', 'qiannianhantie41.jpg', ).__dict__)470list.append(TalkBean(1, 'åå¹´å¯é', 'qiannianhantie42.jpg', ).__dict__)471list.append(TalkBean(1, 'åå¹´å¯é', 'qiannianhantie43.jpg', ).__dict__)472list.append(TalkBean(1, 'åå¹´å¯é', 'qiannianhantie44.jpg', ).__dict__)473list.append(TalkBean(1, 'åå¹´å¯é', 'qiannianhantie45.jpg', ).__dict__)474list.append(TalkBean(1, 'åå¹´å¯é', 'qiannianhantie46.jpg', ).__dict__)475list.append(TalkBean(1, '天å¤é£ç³', 'tianwaifeishi11.jpg', ).__dict__)476list.append(TalkBean(1, '天å¤é£ç³', 'tianwaifeishi12.jpg', ).__dict__)477list.append(TalkBean(1, '天å¤é£ç³', 'tianwaifeishi13.jpg', ).__dict__)478list.append(TalkBean(1, '天å¤é£ç³', 'tianwaifeishi14.jpg', ).__dict__)479list.append(TalkBean(1, '天å¤é£ç³', 'tianwaifeishi15.jpg', ).__dict__)480list.append(TalkBean(1, '天å¤é£ç³', 'tianwaifeishi16.jpg', ).__dict__)481list.append(TalkBean(1, '天å¤é£ç³', 'tianwaifeishi21.jpg', ).__dict__)482list.append(TalkBean(1, '天å¤é£ç³', 'tianwaifeishi22.jpg', ).__dict__)483list.append(TalkBean(1, '天å¤é£ç³', 'tianwaifeishi23.jpg', ).__dict__)484list.append(TalkBean(1, '天å¤é£ç³', 'tianwaifeishi24.jpg', ).__dict__)485list.append(TalkBean(1, '天å¤é£ç³', 'tianwaifeishi25.jpg', ).__dict__)486list.append(TalkBean(1, '天å¤é£ç³', 'tianwaifeishi26.jpg', ).__dict__)487list.append(TalkBean(1, '天å¤é£ç³', 'tianwaifeishi31.jpg', ).__dict__)488list.append(TalkBean(1, '天å¤é£ç³', 'tianwaifeishi32.jpg', ).__dict__)489list.append(TalkBean(1, '天å¤é£ç³', 'tianwaifeishi33.jpg', ).__dict__)490list.append(TalkBean(1, '天å¤é£ç³', 'tianwaifeishi34.jpg', ).__dict__)491list.append(TalkBean(1, '天å¤é£ç³', 'tianwaifeishi35.jpg', ).__dict__)492list.append(TalkBean(1, '天å¤é£ç³', 'tianwaifeishi36.jpg', ).__dict__)493list.append(TalkBean(1, '天å¤é£ç³', 'tianwaifeishi41.jpg', ).__dict__)494list.append(TalkBean(1, '天å¤é£ç³', 'tianwaifeishi42.jpg', ).__dict__)495list.append(TalkBean(1, '天å¤é£ç³', 'tianwaifeishi43.jpg', ).__dict__)496list.append(TalkBean(1, '天å¤é£ç³', 'tianwaifeishi44.jpg', ).__dict__)497list.append(TalkBean(1, '天å¤é£ç³', 'tianwaifeishi45.jpg', ).__dict__)498list.append(TalkBean(1, '天å¤é£ç³', 'tianwaifeishi46.jpg', ).__dict__)499list.append(TalkBean(1, 'çå¤ç²¾é', 'pangujingtie11.jpg', ).__dict__)500list.append(TalkBean(1, 'çå¤ç²¾é', 'pangujingtie12.jpg', ).__dict__)501list.append(TalkBean(1, 'çå¤ç²¾é', 'pangujingtie13.jpg', ).__dict__)502list.append(TalkBean(1, 'çå¤ç²¾é', 'pangujingtie14.jpg', ).__dict__)503list.append(TalkBean(1, 'çå¤ç²¾é', 'pangujingtie15.jpg', ).__dict__)504list.append(TalkBean(1, 'çå¤ç²¾é', 'pangujingtie16.jpg', ).__dict__)505list.append(TalkBean(1, 'çå¤ç²¾é', 'pangujingtie21.jpg', ).__dict__)506list.append(TalkBean(1, 'çå¤ç²¾é', 'pangujingtie22.jpg', ).__dict__)507list.append(TalkBean(1, 'çå¤ç²¾é', 'pangujingtie23.jpg', ).__dict__)508list.append(TalkBean(1, 'çå¤ç²¾é', 'pangujingtie24.jpg', ).__dict__)509list.append(TalkBean(1, 'çå¤ç²¾é', 'pangujingtie25.jpg', ).__dict__)510list.append(TalkBean(1, 'çå¤ç²¾é', 'pangujingtie26.jpg', ).__dict__)511list.append(TalkBean(1, 'çå¤ç²¾é', 'pangujingtie31.jpg', ).__dict__)512list.append(TalkBean(1, 'çå¤ç²¾é', 'pangujingtie32.jpg', ).__dict__)513list.append(TalkBean(1, 'çå¤ç²¾é', 'pangujingtie33.jpg', ).__dict__)514list.append(TalkBean(1, 'çå¤ç²¾é', 'pangujingtie34.jpg', ).__dict__)515list.append(TalkBean(1, 'çå¤ç²¾é', 'pangujingtie35.jpg', ).__dict__)516list.append(TalkBean(1, 'çå¤ç²¾é', 'pangujingtie36.jpg', ).__dict__)517list.append(TalkBean(1, 'çå¤ç²¾é', 'pangujingtie41.jpg', ).__dict__)518list.append(TalkBean(1, 'çå¤ç²¾é', 'pangujingtie42.jpg', ).__dict__)519list.append(TalkBean(1, 'çå¤ç²¾é', 'pangujingtie43.jpg', ).__dict__)520list.append(TalkBean(1, 'çå¤ç²¾é', 'pangujingtie44.jpg', ).__dict__)521list.append(TalkBean(1, 'çå¤ç²¾é', 'pangujingtie45.jpg', ).__dict__)522list.append(TalkBean(1, 'çå¤ç²¾é', 'pangujingtie46.jpg', ).__dict__)523str = json.dumps(list).encode('utf-8').decode('unicode_escape')524with open(huishou_path + 'KeHuiShouProfile.txt', 'w', encoding='utf-8') as f:...
views.py
Source:views.py
1import json2import numpy3import pandas as pd4import datetime as dt5from django.http import HttpResponse6from django.shortcuts import render7from plotly.offline import plot8import plotly.express as px9from EcoJeju.models import user, worker, wastedata, region, disposedata, workerarea, wasteprediction10from config.settings import DATA_DIRS11map1 = pd.read_csv(DATA_DIRS[0]+'/0816_ì§ìë³_ìì¼ë³.csv', encoding ='cp949')12geo_path2 = DATA_DIRS[0]+'/LSMD_ADM_SECT_UMD_ì 주.zip.geojson'13geo_data2 = json.load(open(geo_path2, encoding='utf-8'))14fig1 = plot(px.choropleth_mapbox(map1, geojson=geo_data2,15 locations='emd_nm',16 color='em_kg',17 color_continuous_scale="matter",18 range_color=(0, 50000),19 mapbox_style="carto-positron",20 featureidkey="properties.EMD_NM",21 zoom=9, center={"lat": 33.39075486566194, "lon": 126.53390204213252},22 opacity=0.5,23 labels={'emd_nm': 'em_kg'},24 animation_frame='week'), output_type='div')25fig2 = plot(px.choropleth_mapbox(map1, geojson=geo_data2,26 locations='emd_nm',27 color='pay_amt',28 color_continuous_scale="matter",29 range_color=(10000, 1200000),30 mapbox_style="carto-positron",31 featureidkey="properties.EMD_NM",32 zoom=9, center={"lat": 33.39075486566194, "lon": 126.53390204213252},33 opacity=0.5,34 labels={'emd_nm': 'pay_amt'},35 animation_frame='week'), output_type='div')36allwaste = wastedata.objects.all();37def home(request):38 context={39 'loginok': False,40 'plot_div1': fig1,41 'plot_div2': fig242 }43 return render(request, 'dashboard.html',context)44def dashboard(request):45 if 'loginok' in request.session:46 context = {47 'loginok': request.session['loginok'],48 'id': request.session['id'],49 'type': request.session['type'],50 'name': request.session['name'],51 'plot_div1': fig1,52 'plot_div2': fig253 }54 else :55 context={ 'loginok': False }56 return render(request, 'dashboard.html',context)57def userboard(request):58 if 'loginok' in request.session:59 context = {60 'loginok': request.session['loginok'],61 'id': request.session['id'],62 'type': request.session['type'],63 'name': request.session['name'],64 }65 else :66 context={67 'loginok': False68 }69 return render(request,'userboard.html',context)70def workerboard(request):71 if 'loginok' in request.session:72 context = {73 'loginok': request.session['loginok'],74 'id': request.session['id'],75 'type': request.session['type'],76 'name': request.session['name'],77 }78 else :79 context={ 'loginok': False,80 }81 return render(request,'workerboard.html',context)82def login(request):83 return render(request, 'login.html')84def loginimpl(request):85 id = request.POST['loginid'];86 pwd = request.POST['loginpwd']87 typecheck = request.POST['logintype'];88 next = 'login.html'89 context={}90 if id == '' or pwd == '':91 context = {92 'result': 'ë¹ì¹¸ì 모ë ì±ì주ì¸ì',93 }94 elif(typecheck =='envir'):95 if worker.objects.filter(worker_id =id).exists():96 getworker = worker.objects.get(worker_id=id);97 if getworker.worker_pwd == pwd:98 context={99 'loginok':True,100 'id': id,101 'type': 00,102 'name': getworker.worker_name103 }104 request.session['loginok']= True;105 request.session['id'] = id;106 request.session['name'] = getworker.worker_name;107 request.session['type'] = 10;108 next = 'dashboard.html'109 else:110 context = {111 "result": "ë¹ë°ë²í¸ê° íë ¸ìµëë¤",112 }113 else:114 context={115 "result": "ì¡´ì¬íì§ ìë ìì´ë ì
ëë¤.",116 }117 elif typecheck =='normal':118 if user.objects.filter(user_id =id).exists():119 getuser = user.objects.get(user_id=id);120 if getuser.user_pwd == pwd:121 context={122 'loginok':True,123 'id': id,124 'type': 10,125 'name': getuser.user_name126 }127 request.session['loginok']= True;128 request.session['id'] = id;129 request.session['name'] = getuser.user_name;130 request.session['type'] = 10;131 next = 'dashboard.html'132 else:133 context = {134 "result": "ë¹ë°ë²í¸ê° íë ¸ìµëë¤"135 }136 else:137 context = {138 "result": "ì¡´ì¬íì§ ìë ìì´ë ì
ëë¤.",139 }140 else :141 context={142 "result": "ë¡ê·¸ì¸ ì¤í¨",143 }144 return render(request,next,context)145def register(request):146 return render(request,'register.html')147def registerimpl(request):148 typecheck = request.POST['inputtype'];149 id = request.POST['inputid'];150 pwd = request.POST['inputpwd'];151 repwd = request.POST['inputrepwd'];152 name = request.POST['inputname'];153 if id=='' or pwd=='' or repwd==''or name =='':154 context = {'result': 'ë¹ì¹¸ì 모ë ì±ì주ì¸ì'}155 elif user.objects.filter(user_id =id).exists() or worker.objects.filter(worker_id =id).exists() :156 context = {'result': 'ì´ë¯¸ ì¡´ì¬íë ìì´ë ì
ëë¤.'}157 elif pwd != repwd:158 context = {'result': 'ë¹ë°ë²í¸ê° ì¼ì¹íì§ ììµëë¤.'}159 else:160 context = {'result': 'íìê°ì
ì ì±ê³µíììµëë¤.'}161 if typecheck == 'envir':162 worker.objects.create(163 worker_id =id,164 worker_name =name,165 worker_pwd =pwd166 ).save()167 else:168 user.objects.create(169 user_id= id,170 user_name= name,171 user_pwd= pwd172 ).save()173 return HttpResponse(json.dumps(context), content_type="application/json")174def recover(request):175 return render(request,'recover.html')176def card1(request):177 todaystr ='2021-06-30'178 today = dt.datetime.strptime(todaystr, '%Y-%m-%d')179 week_ago = today - dt.timedelta(days=6)180 data = pd.DataFrame(allwaste.filter(base_date__gte= week_ago).values_list('base_date','em_g'));181 grouped = data[1].groupby([data[0]]).sum().reset_index()182 result=''183 for d in grouped[1]:184 result = result + ',' + str(int(d/1000))185 result = result[1:]186 print(result);187 context = {188 'data' : result,189 'today': todaystr,190 'today_g': float(grouped[1][grouped.shape[0]-1]/1000)191 }192 return HttpResponse(json.dumps(context), content_type='application/json');193def card2(request):194 todaystr ='2021-06-30'195 today = dt.datetime.strptime(todaystr, '%Y-%m-%d')196 week_ago = today - dt.timedelta(days=6)197 data = pd.DataFrame(allwaste.filter(base_date__gte= week_ago).values_list('base_date','pay_amt'));198 grouped = data[1].groupby([data[0]]).sum().reset_index()199 result=''200 for d in grouped[1]:201 result = result + ',' + str(int(d/10000))202 result = result[1:]203 print();204 context = {205 'data' : result,206 'today': todaystr,207 'today_amt': float(grouped[1][grouped.shape[0]-1]/10000)208 }209 return HttpResponse(json.dumps(context), content_type='application/json');210def plot1(request):211 datas = pd.DataFrame(allwaste.filter(base_date__year=2021).values_list('base_date','pay_amt'));212 datas['Month'] = datas[0].dt.month213 grouped = datas[1].groupby([datas['Month']])214 mon = pd.DataFrame(grouped.sum()).reset_index()215 result = []216 for i in range(len(mon['Month'])):217 result.append([str(mon['Month'][i]) + 'ì', float(mon[1][i]/10000)])218 context = {219 "label": "ì¬ì©ê¸ì¡(ë§ì)",220 "color": "#1f92fe",221 "data" : result222 }223 return HttpResponse(json.dumps(context), content_type='application/json');224def plot3(request):225 datas = pd.DataFrame(allwaste.filter(base_date__year=2021).values_list('base_date','em_g'));226 datas['Month'] = datas[0].dt.month227 grouped = datas[1].groupby([datas['Month']])228 mon = pd.DataFrame(grouped.sum()).reset_index()229 result = []230 for i in range(len(mon['Month'])):231 result.append([str(mon['Month'][i]) + 'ì', float(mon[1][i] / 1000)])232 context = {233 "label": "ë°°ì¶ë(kg)",234 "color": "#1ba3cd",235 "data" : result236 }237 return HttpResponse(json.dumps(context), content_type='application/json');238def usergimpl(request):239 id = 'test01'240 todaystr ='2021-06-30'241 today = dt.datetime.strptime(todaystr, '%Y-%m-%d')242 month_ago = today - dt.timedelta(days=29)243 # disposedata = pd.read_csv(DATA_DIRS[0]+'/regiontabledata.csv', encoding ='cp949')244 datas = pd.DataFrame(disposedata.objects.filter(dispose_user_id=id, dispose_date__gte=month_ago).values('dispose_amount','dispose_weight', 'dispose_date'))245 todayg = int(datas[datas['dispose_date'] == today]['dispose_weight'][0])246 todayamt = int(datas[datas['dispose_date'] == today]['dispose_amount'][0])247 datas['Day'] = datas['dispose_date'].dt.day248 datas.sort_values(by=['Day'], axis=0, inplace=True)249 print(datas);250 first = 7 - int(datas['dispose_date'][0].isoweekday())251 week = [0, 0, 0, 0, 0, 0]252 aweek = [0, 0, 0, 0, 0, 0]253 for i in range(len(datas['Day'])):254 if datas['Day'][i] < first + 1:255 week[0] = week[0] + datas['dispose_weight'][i]256 aweek[0] = aweek[0] + datas['dispose_amount'][i]257 elif datas['Day'][i] < first + 8:258 week[1] = week[1] + datas['dispose_weight'][i]259 aweek[1] = aweek[1] + datas['dispose_amount'][i]260 elif datas['Day'][i] < first + 15:261 week[2] = week[2] + datas['dispose_weight'][i]262 aweek[2] = aweek[2] + datas['dispose_amount'][i]263 elif datas['Day'][i] < first + 22:264 week[3] = week[3] + datas['dispose_weight'][i]265 aweek[3] = aweek[3] + datas['dispose_amount'][i]266 elif datas['Day'][i] < first + 29:267 week[4] = week[4] + datas['dispose_weight'][i]268 aweek[4] = aweek[4] + datas['dispose_amount'][i]269 else:270 week[5] = week[5] + datas['dispose_weight'][i]271 aweek[5] = aweek[5] + datas['dispose_amount'][i]272 print(aweek)273 result = ''274 for d in week:275 if d == 0:276 continue;277 result = result + ',' + str(int(d))278 result = result[1:]279 total_g = int(sum(week)/1000)280 aresult=''281 for a in aweek:282 if a==0:283 continue284 aresult= aresult+','+str(a)285 aresult=aresult[1:]286 total_amt = int(sum(aweek))287 print(aresult)288 context = {289 'gvalues': result,290 'amtvalues':aresult,291 'totalg':total_g,292 'totalamt': total_amt,293 'todayg': todayg,294 'todayamt':todayamt,295 'todaystr':todaystr296 }297 return HttpResponse(json.dumps(context), content_type='application/json');298def bars3(request):299 id='test01'300 data2 = pd.DataFrame(disposedata.objects.filter(dispose_user_id = id).values('dispose_date','dispose_weight'))301 data2['dispose_date'] = pd.to_datetime(data2['dispose_date'])302 data2['dispose_year'] = data2['dispose_date'].dt.year303 data2['dispose_month'] = data2['dispose_date'].dt.month304 data2_1 = data2[data2['dispose_month'] == 1]305 data2_2 = data2[data2['dispose_month'] == 2]306 data2_3 = data2[data2['dispose_month'] == 3]307 data2_4 = data2[data2['dispose_month'] == 4]308 data2_5 = data2[data2['dispose_month'] == 5]309 data2_6 = data2[data2['dispose_month'] == 6]310 test2_1 = data2_1['dispose_weight'].sum()311 test2_2 = data2_2['dispose_weight'].sum()312 test2_3 = data2_3['dispose_weight'].sum()313 test2_4 = data2_4['dispose_weight'].sum()314 test2_5 = data2_5['dispose_weight'].sum()315 test2_6 = data2_6['dispose_weight'].sum()316 context = [{317 "label": "ëì íë¬ ë°°ì¶ë",318 "color": "#FF3700",319 "data": [["1ì", int(test2_1)], ["2ì", int(test2_2)], ["3ì", int(test2_3)],320 ["4ì", int(test2_4)], ["5ì", int(test2_5)], ["6ì", int(test2_6)]]321 }, {322 "label": "ìë³ ê°êµ¬ë¹ ë°°ì¶ íê· ë",323 "color": "#57E9E1",324 "data": [['1ì', 12417], ['2ì', 12810], ['3ì', 13518],325 ['4ì', 11979], ['5ì', 13212], ['6ì', 12027]]326 }]327 return HttpResponse(json.dumps(context), content_type='application/json');328 ##------------ì¤ì
ëê·¸ëí---------------------------329def bars4(request):330 id='test01'331 data2 = pd.DataFrame(disposedata.objects.filter(dispose_user_id = id).values('dispose_date','dispose_amount'))332 data2['dispose_date'] = pd.to_datetime(data2['dispose_date'])333 data2['dispose_year'] = data2['dispose_date'].dt.year334 data2['dispose_month'] = data2['dispose_date'].dt.month335 data2_1 = data2[data2['dispose_month'] == 1]336 data2_2 = data2[data2['dispose_month'] == 2]337 data2_3 = data2[data2['dispose_month'] == 3]338 data2_4 = data2[data2['dispose_month'] == 4]339 data2_5 = data2[data2['dispose_month'] == 5]340 data2_6 = data2[data2['dispose_month'] == 6]341 test2_1 = data2_1['dispose_amount'].sum()342 test2_2 = data2_2['dispose_amount'].sum()343 test2_3 = data2_3['dispose_amount'].sum()344 test2_4 = data2_4['dispose_amount'].sum()345 test2_5 = data2_5['dispose_amount'].sum()346 test2_6 = data2_6['dispose_amount'].sum()347 context = [{348 "label": "ìë³ ê°êµ¬ë¹ ì¬ì© ê¸ì¡ íê· ë",349 "color": "#5ab1ef",350 "data": [['1ì', 369], ['2ì', 381], ['3ì', 372],351 ['4ì', 357], ['5ì', 393], ['6ì', 354]]352 }, {353 "label": "ëì íë¬ ê¸ì¡",354 "color": "#FF007B",355 "data": [["1ì", int(test2_1)], ["2ì", int(test2_2)], ["3ì", int(test2_3)],356 ["4ì", int(test2_4)], ["5ì", int(test2_5)], ["6ì", int(test2_6)]]357 }]358 return HttpResponse(json.dumps(context), content_type='application/json');359def piecharts(request):360 id='worker'361 worklist = pd.DataFrame(workerarea.objects.filter(worker_id=id).values())362 areas=[];363 for area in worklist['area_cd']:364 cur = region.objects.get(region_id=area)365 areas.append({'area':area,'region':cur.region_name,'parent':cur.parent_name, 'city':cur.city_name})366 todaystr = '2021-06-30'367 today = dt.datetime.strptime(todaystr, '%Y-%m-%d')368 for area in areas:369 df =pd.DataFrame(wastedata.objects.filter(base_date = today, area_cd=area['area']).values())370 if df.empty :371 area['percent'] = 0372 else :373 val = numpy.around(df['em_g'][0] / 3000)374 area['percent'] = int(val)375 context={376 'areas': areas,377 'today': todaystr,378 }379 return HttpResponse(json.dumps(context), content_type='application/json');380def tables(request):381 id='worker'382 worklist = pd.DataFrame(workerarea.objects.filter(worker_id=id).values('area_cd'))383 checkp=[];384 for area in worklist['area_cd']:385 cur = region.objects.get(region_id=area)386 parent = cur.parent_name387 if parent not in checkp:388 checkp.append(parent);389 list = [];390 for p in checkp:391 jq = wasteprediction.objects.get(region=p)392 list.append({'region': jq.region, 'seven': int(jq.onemonth), 'eight': int(jq.tw0month)})393 context={394 'list': list395 }396 return HttpResponse(json.dumps(context), content_type='application/json');397# dbì ë°ì´í° ë£ê¸° ìí í¨ì398def insertdata(request):399 #ìì물 ì§ìë³ ë°ì´í° ë£ê¸°400 # df = pd.read_csv(DATA_DIRS[0] + '\\foodwastedata.csv', encoding='cp949')401 # data21 = df[df['base_date'].str.split('-').str[0] == '2021']402 # for index in data21.index:403 # base_date = dt.datetime.strptime(df.loc[index]['base_date'], '%Y-%m-%d')404 # city = df.loc[index]['city']405 # emd_nm = df.loc[index]['emd_nm']406 # area_cd = df.loc[index]['em_area_cd']407 # em_cnt = df.loc[index]['em_cnt']408 # em_g = df.loc[index]['em_g']409 # pay_amt = df.loc[index]['pay_amt']410 # wastedata.objects.create(411 # base_date =base_date,412 # city = city,413 # emd_nm =emd_nm,414 # area_cd =area_cd,415 # em_cnt =em_cnt,416 # em_g =em_g,417 # pay_amt =pay_amt418 # ).save()419 # ì§ì ì½ë ë°ì´í° ë£ê¸°420 # datas = pd.read_csv(DATA_DIRS[0] + '\\regiontabledata.csv', encoding='cp949')421 # for index in datas.index:422 # region_id =datas.loc[index]['region_id']423 # region_name = datas.loc[index]['region_name']424 # parent_name = datas.loc[index]['parent_name']425 # city_name = datas.loc[index]['city_name']426 # region.objects.create(427 # region_id =region_id,428 # region_name=region_name,429 # parent_name = parent_name,430 # city_name = city_name431 # ).save()432 # # ì¬ì©ì ì´ì© ë°ì´í° ë£ê¸°433 # datas = pd.read_csv(DATA_DIRS[0] + '\\disposedata.csv', encoding='cp949')434 # print(datas);435 # for index in range(datas.shape[0]):436 # dispose_date = dt.datetime.strptime(datas.loc[index]['dispose_date'], '%Y-%m-%d'),437 # dispose_weight = datas.loc[index]['dispose_weight'],438 # dispose_amount=datas.loc[index]['dispose_amount'],439 # dispose_region_code=datas.loc[index]['dispose_city_code'],440 # user_id = datas.loc[index]['dispose_id']441 # print(dispose_date[0],dispose_weight[0],dispose_amount[0],dispose_region_code[0],user_id)442 # disposedata.objects.create(443 # dispose_date=dispose_date[0],444 # dispose_weight=dispose_weight[0],445 # dispose_amount= dispose_amount[0],446 # dispose_region_code=dispose_region_code[0],447 # dispose_user_id= user_id448 # ).save()449 datas = pd.read_csv(DATA_DIRS[0] + '\\78ììê³ì´ì측kg.csv', encoding='cp949')450 for index in range(datas.shape[0]):451 region=datas['emd_nm'][index]452 predic_weight7 =datas['em_kg_pred7'][index]453 predic_weight8=datas['em_kg_pred8'][index]454 print(region,predic_weight7,predic_weight8)455 wasteprediction.objects.create(456 region=region,457 onemonth = predic_weight7,458 tw0month = predic_weight8,459 ).save()460 context={'result': 'success'}461 return render(request,'insertdata.html',context)462def into(request):463 return render(request,'insertdata.html')464if __name__ == '__main__' :...
xBrepFace_extendAtUntrimmedEdge.py
Source:xBrepFace_extendAtUntrimmedEdge.py
...20import xBrepFace21import xBrepTrim222324def dispose(listDispose): map(lambda x: x.Dispose, listDispose)252627def getBrepIdAndTrimIdx():28 29 disposeUs = []30 31 # Load sticky.32 stickyKeys = ['bSmooth({})'.format(__file__)]33 stickyValues = [True]34 for i, stickyKey in enumerate(stickyKeys):35 if sc.sticky.has_key(stickyKey): stickyValues[i] = sc.sticky[stickyKey]36 bSmooth, = stickyValues37 38 # Get untrimmed brep edge with optional input.39 go = ri.Custom.GetObject(); disposeUs.append(go)40 go.SetCommandPrompt("Select untrimmed edge to extend")41 go.GeometryFilter = rd.ObjectType.EdgeFilter42 # SurfaceBoundaryEdge doesn't include seams.43 go.GeometryAttributeFilter = (44 ri.Custom.GeometryAttributeFilter.SurfaceBoundaryEdge)45 46 def is1FaceBrep (rdBrep, rgTrim, compIdx):47 return rdBrep.BrepGeometry.Faces.Count == 148 go.SetCustomGeometryFilter(is1FaceBrep)49 50 optT_Smooth = ri.Custom.OptionToggle(bSmooth, 'RuledExtension', 'Smooth')51 go.AddOptionToggle('Type', optT_Smooth)52 53 while go.Get() != ri.GetResult.Object:54 sc.escape_test()55 if go.CommandResult() != Rhino.Commands.Result.Success: # Canceled.56 dispose(disposeUs); return57 58 objref = go.Object(0); disposeUs.append(objref)59 idBrep = objref.ObjectId60 compIdx = objref.GeometryComponentIndex.Index61 62 bSmooth = optT_Smooth.CurrentValue63 64 # Save sticky.65 stickyValues = bSmooth,66 for i, stickyKey in enumerate(stickyKeys):67 sc.sticky[stickyKey] = stickyValues[i]68 69 dispose(disposeUs)70 return idBrep, compIdx, bSmooth717273def getExtensionFactor():74 75 # Load sticky.76 stickyKeys = ['fExtLength({})'.format(__file__)]77 stickyValues = [1.0]78 for i, stickyKey in enumerate(stickyKeys):79 if sc.sticky.has_key(stickyKey): stickyValues[i] = sc.sticky[stickyKey]80 fExtLength, = stickyValues81 82 rc, fExtLength = ri.RhinoGet.GetNumber("ExtensionFactor",83 False, fExtLength, 0., 1./Rhino.RhinoMath.ZeroTolerance)84 85 if rc == Rhino.Commands.Result.Success:86 # Save sticky.87 stickyValues = fExtLength,88 for i, stickyKey in enumerate(stickyKeys):89 sc.sticky[stickyKey] = stickyValues[i]90 91 return fExtLength929394def isBrepReadyForExtend(rgBrep, bEcho=False, bDebug=False):95 96 if not rgBrep.IsValid:97 if bEcho: print "Surface is not valid. Exiting..."98 return False99 if not rgBrep.IsManifold:100 if bEcho: print "Surface is non-manifold. Exiting..."101 return False102 if rgBrep.IsSurface and rgBrep.IsSolid:103 if bEcho: print "Surface is a solid. Exiting..."104 return False105 106 disposeUs = []107 108 rgFace = rgBrep.Faces[0]; disposeUs.append(rgFace)109 if not rgFace.IsValid:110 if bEcho:111 print "Warning: Face is invalid, but script will attempt to continue."112 113 rgSrf = rgFace.UnderlyingSurface(); disposeUs.append(rgSrf)114 if not rgSrf.IsValid:115 if bEcho: print "Underlying surface is invalid. Exiting.."116 dispose(disposeUs); return 117 118 rgBrep_Extended_TrySplit = rgSrf.ToBrep(); disposeUs.append(rgBrep_Extended_TrySplit)119 120 fKinkTol = sc.doc.ModelAngleToleranceRadians121 bSplitKinkyFaces = rgBrep_Extended_TrySplit.Faces.SplitKinkyFaces(122 fKinkTol, True) # True doesn't mean that any splits had occurred.123 if not bSplitKinkyFaces:124 if bDebug: sPrint = 'bSplitKinkyFaces'; print sPrint + ':', eval(sPrint)125 if bEcho: print "Split kiny face check failed. Exiting..."126 dispose(disposeUs); return127 if rgBrep_Extended_TrySplit.Faces.Count != 1:128 if bEcho: print "At {}{} tolerance, the surface has kinks. " \129 "Repair surface before extending it.".format(130 sc.doc.ModelAngleToleranceDegrees, chr(176))131 dispose(disposeUs); return132 133 dispose(disposeUs)134 135 return True136137138def getAreaOfSurface(rgSrf, bEcho=False):139 """Tries brep if surface fails."""140 areaMassProp = rg.AreaMassProperties.Compute(rgSrf)141 if areaMassProp is None:142 if bEcho:143 print "AreaMassProperties cannot be computed for surface."144 print "Trying to compute AreaMassProperties for brep of surface..."145 rgBrepTemp = rgSrf.ToBrep()146 areaMassProp = rg.AreaMassProperties.Compute(rgBrepTemp)147 rgBrepTemp.Dispose()148 if areaMassProp is None: return149 elif bEcho: print "...Success!"150 area = areaMassProp.Area151 areaMassProp.Dispose()152 return area153154155def isExtendedSrfOk(rgSrf_Ext, rgSrf0, bEcho=False, bDebug=False):156 """157 Includes verification that area has changed.158 """159 160 if not rgSrf_Ext.IsValid:161 return False162 163 # Get AreaMassProperties of original and new surface.164 areaSrf0 = getAreaOfSurface(rgSrf0)165 if areaSrf0 is None:166 if bEcho: print "Area not computed for original surface."167 return False168 areaSrf_Extended = getAreaOfSurface(rgSrf_Ext)169 if areaSrf_Extended is None:170 if bEcho: print "Area of original surface not computed."171 return False172 173 # Compare original and new areas and, if the same, return.174 if Rhino.RhinoMath.EpsilonEquals(areaSrf_Extended, areaSrf0,175 sc.doc.ModelAbsoluteTolerance**2):176 return False177 178 return rgSrf_Ext179180181def trimExtendedSrf(rgSrf_Ext, rgBrep0, idxTrims_toSkip, bEcho=False, bDebug=False):182 """183 Parameters:184 rgSrf_Ext: Extended surface.185 rgBrep0: Original 1-face Brep.186 idxTrims_toSkip: List of SENW Trims not to trim.187 Returns:188 Trimmed 1-face Brep or None.189 """190 191 disposeUs = []192 193 fPtMatchTol = 1.e-6194 195 # Make IsoStatus list per idxTrims_toSkip.196 isoStats_Ext = []197 for i in idxTrims_toSkip:198 rgT_Ext = rgBrep0.Trims[i]; disposeUs.append(rgT_Ext)199 if rgT_Ext.IsoStatus not in isoStats_Ext:200 isoStats_Ext.append(rgT_Ext.IsoStatus)201 202 # Make outer Edge list and inner Edge list of only non-SENW Edges or203 # SENW Trim not extended for Trim.204 rgTrims_B0 = rgBrep0.Trims; disposeUs.extend(rgTrims_B0)205 rgEs_Otr = []; rgEs_Inr = []206 for i, rgTrim in enumerate(rgTrims_B0):207 if rgTrim.Loop.LoopType == rg.BrepLoopType.Outer: # Outer loop.208 # Only use other edges with selected edge SENW.209 if xBrepTrim.isSenw(rgTrim):210 if any(i == idx for idx in idxTrims_toSkip):211 continue # Trims of idxTrims_toSkip will not be used.212 if any(rgTrim.IsoStatus == iso for iso in isoStats_Ext):213 continue # This is a SENW different than that of any of the extended Trims.214 rgEs_Otr.append(rgTrim.Edge)215 else: # Inner loop216 rgEs_Inr.append(rgTrim.Edge)217 218 #map(sc.doc.Objects.AddCurve, rgEs_Otr)219 220 rgBrep_Extended = Rhino.Geometry.Brep.CreateFromSurface(rgSrf_Ext)221 222 # How could this happen?223 if rgEs_Otr.Count + rgEs_Inr.Count == 0:224 if bEcho: print "Original brep was trimmed, but " \225 "no edges found with which to trim. Using full surface."226 return rgBrep_Extended227 228 # Trim face (surface) with both outer and inner edges.229 disposeUs.append(rgBrep_Extended)230 disposeUs.extend(rgEs_Otr + rgEs_Inr)231 232 # TO (Possibly) DO: If extending the curves when they don't need to233 # be becomes a problem, first check whether extended edge is a full234 # surface edge and create conditional based on that condition.235 236 # Join curves (edges).237 rgCs_Otr_Joined_All = rg.Curve.JoinCurves(rgEs_Otr)238 disposeUs.extend(rgCs_Otr_Joined_All)239 240 # Extend joined curves on surface, using SimplifyEnd at actual extensions241 # before the curves are exploded.242 rgFace_Ext = rgBrep_Extended.Faces[0]; disposeUs.append(rgFace_Ext)243 rgCs_Otr_Joined_Ext = []244 for rgC_Otr_Joined in rgCs_Otr_Joined_All:245 rgC_Otr_Joined_Ext = rgC_Otr_Joined.ExtendOnSurface(246 rg.CurveEnd.Both, rgFace_Ext)247 if rgC_Otr_Joined_Ext is None: continue248 disposeUs.append(rgC_Otr_Joined_Ext)249 250 ## Find extension "side" ends by comparing CurveEnd points, before vs. after ExtendOnSurface.251 # At extended "side" ends, use SimplifyEnd to simplify only that "side" end and adjacent segments.252 ptS0 = rgC_Otr_Joined.PointAtStart253 ptE0 = rgC_Otr_Joined.PointAtEnd254 ptS1 = rgC_Otr_Joined_Ext.PointAtStart255 ptE1 = rgC_Otr_Joined_Ext.PointAtEnd256 257 # CurveEnd.Start check and simplification.258 if not ptS1.EpsilonEquals(ptS0, fPtMatchTol):259 rgC_Otr_Joined_Ext_StartSimpl = rgC_Otr_Joined_Ext.SimplifyEnd(260 rg.CurveEnd.Start, rg.CurveSimplifyOptions.All,261 fPtMatchTol, sc.doc.ModelAngleToleranceRadians)262 if rgC_Otr_Joined_Ext_StartSimpl is None:263 rgC_Otr_Joined_Ext_StartSimpl = rgC_Otr_Joined_Ext # For CurveEnd.End processing.264 else:265 disposeUs.append(rgC_Otr_Joined_Ext)266 else: rgC_Otr_Joined_Ext_StartSimpl = rgC_Otr_Joined_Ext # For CurveEnd.End processing.267 268 # CurveEnd.End check and simplification.269 if not ptE1.EpsilonEquals(ptE0, fPtMatchTol):270 rgC_Otr_Joined_Ext = rgC_Otr_Joined_Ext_StartSimpl.SimplifyEnd(271 rg.CurveEnd.End, rg.CurveSimplifyOptions.All,272 fPtMatchTol, sc.doc.ModelAngleToleranceRadians)273 if rgC_Otr_Joined_Ext is None:274 rgC_Otr_Joined_Ext = rgC_Otr_Joined_Ext_StartSimpl275 else:276 disposeUs.append(rgC_Otr_Joined_Ext)277 rgCs_Otr_Joined_Ext.append(rgC_Otr_Joined_Ext)278 disposeUs.extend(rgCs_Otr_Joined_Ext)279 280 # Explode curves so that the proper vertices are replaced on the brep.281 rgCs_Otr_Ext_Segs_All = []282 for rgCrv_Otr_Ext in rgCs_Otr_Joined_Ext:283 rgCrvs_Otr_Ext_Segs = rgCrv_Otr_Ext.DuplicateSegments()284 rgCs_Otr_Ext_Segs_All.extend(rgCrvs_Otr_Ext_Segs)285 disposeUs.extend(rgCs_Otr_Ext_Segs_All)286 287 if bDebug: map(sc.doc.Objects.AddCurve, rgCs_Otr_Ext_Segs_All + rgEs_Inr)288 289 # Split full surface brep with curves.290 rgBrep_Split = rgFace_Ext.Split(rgCs_Otr_Ext_Segs_All + rgEs_Inr,291 sc.doc.ModelAbsoluteTolerance)292 if rgBrep_Split is None: dispose(disposeUs); return293 disposeUs.append(rgBrep_Split)294 295 # Check whether brep has more than one face before attempting to get correct face.296 if rgBrep_Split.Faces.Count > 1:297 # Get point on face of original brep for face matching.298 ptOnFace = xBrepFace.createPoint3dOnInterior(299 rgBrep0.Faces[0],300 fMinDistFromBorder=10.0*sc.doc.ModelAbsoluteTolerance)301 if ptOnFace is None:302 dispose(disposeUs); return303 304 # Get correct brep face.305 idx_rgFace_Pos = xBrep_findMatchingFace.usingPointOnFace(306 rgBrep_Split, ptOnFace)307 if idx_rgFace_Pos is None:308 dispose(disposeUs); return309 310 rgBrep_ForReplace = rgBrep_Split.Faces[311 idx_rgFace_Pos].DuplicateFace(False)312 disposeUs.append(rgBrep_ForReplace)313 else:314 if bEcho:315 print "Brep was not split. Replacing original with full surface..."316 rgBrep_ForReplace = rgBrep_Split317 318 return rgBrep_ForReplace319320321def main(bEcho=False, bDebug=False):322 323 disposeUs = []324 325 sTitle = "Extend Base Surface"326 327 ret = getBrepIdAndTrimIdx()328 if ret is None: return329 idBrep0, idxTrim, bSmooth = ret330 331 rgBrep0 = sc.doc.Objects.Find(idBrep0).BrepGeometry332 disposeUs.append(rgBrep0)333 if not isBrepReadyForExtend(rgBrep0, bEcho=bEcho, bDebug=bDebug):334 dispose(disposeUs); return Rhino.Commands.Result.Failure335 336 rgSrf0 = rgBrep0.Faces[0].UnderlyingSurface(); disposeUs.append(rgSrf0)337 if rgSrf0 is None:338 dispose(disposeUs); return Rhino.Commands.Result.Failure339 340 fExtLength = getExtensionFactor()341 if fExtLength is None: dispose(disposeUs); return342 343 rgTrim = rgBrep0.Trims[idxTrim]; disposeUs.append(rgTrim)344 345 # Create extended surface.346 rgSrf_Ext = rgSrf0.Extend(rgTrim.IsoStatus, fExtLength, bSmooth)347 if rgSrf_Ext is None: dispose(disposeUs); return348 disposeUs.append(rgSrf_Ext)349 if not isExtendedSrfOk(rgSrf_Ext, rgSrf0, bEcho=bEcho, bDebug=bDebug):350 dispose(disposeUs); return351 #sc.doc.Objects.AddSurface(rgSrf_Ext)352 353 # Was brep a full surface?354 if rgBrep0.IsSurface:355 if bEcho: print "Original brep was not trimmed. Using full surface."356 rgBrep_ForReplace = Rhino.Geometry.Brep.CreateFromSurface(rgSrf_Ext)357 else:358 rgBrep_ForReplace = trimExtendedSrf(rgSrf_Ext, rgBrep0, [idxTrim], bEcho=bEcho, bDebug=bDebug)359 if rgBrep_ForReplace is None: dispose(disposeUs); return360 361 if not sc.doc.Objects.Replace(idBrep0, rgBrep_ForReplace):362 dispose(disposeUs); return Rhino.Commands.Result.Failure363 364 sc.doc.Views.Redraw()365 366 dispose(disposeUs); return Rhino.Commands.Result.Success367368
...
excel2Lua.py
Source:excel2Lua.py
1import os2import sys3import codecs4import xlrd #http://pypi.python.org/pypi/xlrd5import traceback6import excel2sproto7'''示ä¾ä»£ç 8local JSON = require "cjson"9local function disposeNumArr(arr, rate)10 local newArr = {}11 for k, v in ipairs(arr) do 12 table.insert(newArr, v / rate)13 end14 return newArr15end16local function disposeStructArr_ntest(arr)17 local newArr = {}18 for k, v in ipairs(arr) do 19 v.count = v.count / rate20 table.insert(newArr, v)21 end22 return newArr23end24local function disposeValue(conf)25 conf.tnumber = conf.tnumber / 1026 conf.cost = JSON.decode(conf.cost)27 -- æ°ç»28 conf.numbers = disposeNumArr(conf.numbers, 10)29 -- ç»æä½30 conf.nitem.count = conf.nitem.count / 10031 -- ç»æä½æ°ç»32 conf.ntest = disposeStructArr_ntest(conf.ntest)33 return conf34end35Configs[v.id] = disposeValue(v)36Configs[v.id] = v37'''38#ååæ¢è¡39disposeNumArr = '''40local function disposeNumArr(arr, rate)41 local newArr = {}42 for k, v in ipairs(arr) do 43 table.insert(newArr, v / rate)44 end45 return newArr46end47'''48lua = '''TestConfigs = {}49local Configs = {}50|disposeValue|51function TestConfigs.InitModule()52 local data = ConfigMgr.ParseBytes("TestConfigs")53 for k, v in pairs(data.AllTestConfig) do 54 Configs[v.id] = disposeValueVV55 end56end57function TestConfigs.Get(id)58 return Configs[id]59end60function TestConfigs.GetAll()61 local arr = {}62 for k, v in pairs(Configs) do63 table.insert(arr, v)64 end65 return arr66end67return TestConfigs'''68#TestConfig -> TestConfigs.lua69def genLuaFile(table, exportPath):70 nrows = table.nrows71 ncols = table.ncols72 config = table.cell_value(0, 0)73 configs = config + "s"74 75 luaPath = exportPath + "/" + configs + ".lua"76 dir = os.path.dirname(luaPath)77 if dir and not os.path.exists(dir):78 os.makedirs(dir)79 file = codecs.open(luaPath, "w", "utf-8")80 81 luaFile = lua82 luaFile = luaFile.replace('AllTestConfig', 'All' + config)83 luaFile = luaFile.replace('TestConfigs', configs)84 disposeValueStr = ""85 hasJSON = False86 hasDisFunc = False87 hasDisposeNumArr = False88 hasStructArr = False89 #å¤çæµ®ç¹ï¼JSON90 sp_struct_arr = excel2sproto.collectStructArrayInfo(table, ncols)91 numStrcutArrDic = {}92 for c in range(0, ncols):93 #第3è¡æ¯å±æ§ 第4è¡æ¯æ°æ®ç±»å94 propName = table.cell_value(3, c)95 propType = table.cell_value(4, c)96 if propName == "" or propType == "":97 continue #è·³è¿ç©ºå98 if propType == "ignore":99 continue100 #JSON101 if propType == "json" or propType == "JSON":102 # conf.cost = JSON.decode(conf.cost)103 jsonVal = "\tconf.{0} = JSON.decode(conf.{1})\n".format(propName, propName)104 disposeValueStr = disposeValueStr + jsonVal105 hasJSON = True106 hasDisFunc = True107 #number108 # å¤çç»æä½ï¼æ°ç»ï¼éé¢çæµ®ç¹æ°109 if "number" in propType:110 rate = propType.replace('number', '')111 #numberæ°ç»112 if propName.find("-") != -1 and propName.find('.') != -1:113 #ç»æä½æ°ç»114 propNames = propName.split("-")115 prop0 = propNames[0]116 parts = propNames[1].split(".")117 part1 = parts[1]118 # ntest-0.count119 struct_item_index = parts[0]120 if struct_item_index == '0':121 if prop0 in sp_struct_arr:122 if not prop0 in numStrcutArrDic:123 numStrcutArrDic[prop0] = {}124 numStrcutArrDic[prop0][part1] = rate125 hasDisFunc = True126 hasStructArr = True127 elif propName.find("-") != -1:128 #æ°ç»129 propNames = propName.split("-")130 prop0 = propNames[0]131 if propNames[1] == "0":132 # conf.numbers = disposeNumArr(conf.numbers, 10)133 numArrVal = "\tconf.{0} = disposeNumArr(conf.{1}, {2})\n".format(prop0, prop0, rate)134 disposeValueStr = disposeValueStr + numArrVal135 hasDisFunc = True136 hasDisposeNumArr = True137 elif propName.find(".") != -1:138 #ç»æä½139 # conf.nitem.count = conf.nitem.count / 100140 numStrcutVal = "\tconf.{0} = conf.{1} / {2}\n".format(propName, propName, rate)141 disposeValueStr = disposeValueStr + numStrcutVal142 hasDisFunc = True143 else:144 # conf.tnumber = conf.tnumber / 10145 numVal = "\tconf.{0} = conf.{1} / {2}\n".format(propName, propName, rate)146 disposeValueStr = disposeValueStr + numVal147 hasDisFunc = True148 if hasStructArr == True:149 numStructArrFunc = ""150 for k, v in numStrcutArrDic.items():151 # conf.ntest = disposeStructArr_ntest(conf.ntest)152 numStructArrVal = "\tconf.{0} = disposeStructArr_{1}(conf.{2})\n".format(k, k, k)153 disposeValueStr = disposeValueStr + numStructArrVal154 funcVal = "\nlocal function disposeStructArr_{}(arr)\n".format(k)155 funcVal = funcVal + "\tlocal newArr = {}\n"156 funcVal = funcVal + "\tfor k, v in ipairs(arr) do\n"157 for kk, vv in v.items():158 funcVal = funcVal + "\t\tv.{} = v.{} / {}\n".format(kk, kk, vv)159 funcVal = funcVal + "\t\ttable.insert(newArr, v)\n"160 funcVal = funcVal + "\tend\n"161 funcVal = funcVal + "\treturn newArr\n"162 funcVal = funcVal + "end\n"163 numStructArrFunc = numStructArrFunc + funcVal164 disposeValueStr = "\nlocal function disposeValue(conf)\n" + disposeValueStr165 disposeValueStr = disposeValueStr + "\treturn conf\nend\n"166 if hasStructArr == True:167 disposeValueStr = numStructArrFunc + disposeValueStr168 if hasDisposeNumArr == True:169 disposeValueStr = disposeNumArr + disposeValueStr170 if hasDisFunc == True:171 luaFile = luaFile.replace('|disposeValue|', disposeValueStr)172 luaFile = luaFile.replace('disposeValueVV', 'disposeValue(v)')173 else:174 luaFile = luaFile.replace('|disposeValue|', '')175 luaFile = luaFile.replace('disposeValueVV', 'v')176 if hasJSON == True:177 luaFile = "local JSON = require \"cjson\"\n\n" + luaFile178 file.write(luaFile)179 file.close()180def exportAll(excelPath, exportPath):181 excel = xlrd.open_workbook(excelPath)182 allSheetNames = excel.sheet_names()183 for name in allSheetNames:184 exports = name.split("_")185 if len(exports) > 1:186 if str(exports[1]) == "noexport":187 continue188 table = excel.sheet_by_name(name)189 genLuaFile(table, exportPath)190 print("generate lua done " + name)191if __name__ == '__main__':192 try:193 if len(sys.argv) < 2:194 print('excel2Lua argv error')195 os.system("pause")196 197 excelPath = sys.argv[1]198 exportPath = sys.argv[2]199 200 exportAll(excelPath, exportPath)201 print("All OK")202 except:203 traceback.print_exc()...
test_chalice_basic.py
Source:test_chalice_basic.py
1# Any copyright is dedicated to the Public Domain.2# http://creativecommons.org/publicdomain/zero/1.0/3#:: IgnoreFile(silicon)(320)4# chaliceSuite/permission-model/basic.chalice5from nagini_contracts.contracts import *6from nagini_contracts.obligations import MustTerminate7from nagini_contracts.thread import Thread8class Cell:9 def __init__(self) -> None:10 Ensures(Acc(self.x))11 self.x = 0 # type: int12 # dispose a read permission to x13 def dispose_rd(self) -> None:14 Requires(Rd(self.x))15 Requires(MustTerminate(2))16 #Ensures(True)17 # return read permission18 def void(self) -> None:19 Requires(Rd(self.x))20 Requires(MustTerminate(2))21 Ensures(Rd(self.x))22 # multiple calls to method that destroys rd(x)23 def a1(self) -> None:24 Requires(Rd(self.x))25 Ensures(True)26 self.dispose_rd()27 self.dispose_rd()28 # call to method that destroys rd(x) really removes permission29 def a2(self) -> None:30 Requires(Rd(self.x))31 #:: ExpectedOutput(postcondition.violated:insufficient.permission)32 Ensures(Rd(self.x))33 self.dispose_rd()34 # forking and method calls of dispose_rd35 def a3(self) -> None:36 Requires(Rd(self.x))37 Ensures(True)38 t1 = Thread(None, self.dispose_rd, args=())39 t1.start(self.dispose_rd)40 self.dispose_rd()41 t2 = Thread(None, self.dispose_rd, args=())42 t2.start(self.dispose_rd)43 #:: UnexpectedOutput(silicon)(call.precondition:insufficient.permission,320)44 self.dispose_rd()45 # forking and method calls of dispose_rd46 def a4(self) -> None:47 Requires(Rd(self.x))48 #:: ExpectedOutput(postcondition.violated:insufficient.permission)49 Ensures(Rd(self.x))50 t1 = Thread(None, self.dispose_rd, args=())51 t1.start(self.dispose_rd)52 # We should retain some permission53 def a6(self) -> None:54 Requires(Rd(self.x))55 Ensures(Acc(self.x, ARP(1)))56 self.dispose_rd()57 # multiple forks of dispose_rd58 def a7(self) -> None:59 Requires(Rd(self.x))60 Ensures(True)61 t1 = Thread(None, self.dispose_rd, args=())62 t1.start(self.dispose_rd)63 t2 = Thread(None, self.dispose_rd, args=())64 # probably due to timeout in silicon, does not always occur65 #:: UnexpectedOutput(silicon)(thread.start.failed:insufficient.permission,320)66 t2.start(self.dispose_rd)67 t3 = Thread(None, self.dispose_rd, args=())68 # probably due to timeout in silicon, does not always occur69 #:: UnexpectedOutput(silicon)(thread.start.failed:insufficient.permission,320)70 t3.start(self.dispose_rd)71 t4 = Thread(None, self.dispose_rd, args=())72 t4.start(self.dispose_rd)73 t5 = Thread(None, self.dispose_rd, args=())74 t5.start(self.dispose_rd)75 t6 = Thread(None, self.dispose_rd, args=())76 t6.start(self.dispose_rd)77 # joining to regain permission78 def a8(self, a: int) -> None:79 Requires(Rd(self.x))80 Ensures(Rd(self.x))81 t1 = Thread(None, self.void, args=())82 t1.start(self.void)83 t1.join(self.void)84 # joining to regain permission85 def a9(self, a: int) -> None:86 Requires(Rd(self.x))87 #:: ExpectedOutput(postcondition.violated:insufficient.permission)88 Ensures(Rd(self.x))89 t1 = Thread(None, self.dispose_rd, args=())90 t1.start(self.dispose_rd)91 t1.join(self.dispose_rd)92 # joining to regain permission93 def a10(self, a: int) -> None:94 Requires(Rd(self.x))95 Ensures(Implies(a == 3, Rd(self.x)))96 t1 = Thread(None, self.void, args=())97 t1.start(self.void)98 if 3 == a:99 t1.join(self.void)100 # finite loop of method calls, preserving rd(x)101 def a11(self) -> None:102 Requires(Rd(self.x))103 Ensures(Rd(self.x))104 i = 0 # type: int105 while i < 1000:106 Invariant(Rd(self.x))107 self.void()108 i += 1109 # forking dispose_rd in a loop110 def a12(self, a: int) -> None:111 Requires(Rd(self.x))112 Ensures(Wildcard(self.x))113 i = 0 # type: int114 while i < a:115 Invariant(Wildcard(self.x))116 # t1 = Thread(None, self.dispose_rd, args=())117 # t1.start(self.dispose_rd)118 i += 1119 # forking dispose_rd in a loop120 def a13(self, a: int) -> None:121 Requires(Rd(self.x))122 #:: ExpectedOutput(postcondition.violated:insufficient.permission)123 Ensures(Rd(self.x))124 i = 0 # type: int125 while i < a:126 Invariant(Wildcard(self.x))127 # t1 = Thread(None, self.dispose_rd, args=())128 # t1.start(self.dispose_rd)129 i += 1130 # calling dispose_rd in a loop131 def a14(self) -> None:132 Requires(Rd(self.x))133 Ensures(True)134 self.dispose_rd()135 i = 0 # type: int136 while i < 1000:137 Invariant(Wildcard(self.x))138 self.dispose_rd()139 i += 1140 # return unknown permission141 def a15(self) -> None:142 Requires(Rd(self.x))143 Ensures(Wildcard(self.x))144 self.dispose_rd()145 # rd in loop invariant146 def a16(self) -> None:147 Requires(Rd(self.x))148 Ensures(Wildcard(self.x))149 self.dispose_rd()150 i = 0 # type: int151 while i < 1000:152 Invariant(Rd(self.x))153 self.void()154 i += 1155 # rd in method contracts156 def a17(self) -> None:157 Requires(Rd(self.x))158 self.dispose_rd()159 self.a17()160 # multiple rd in method contracts161 def a18(self) -> None:162 Requires(Rd(self.x))163 Ensures(Rd(self.x))164 self.a18a()165 self.a18a()166 self.a18b()167 self.a18b()168 def a18a(self) -> None:169 Requires(Acc(self.x, 2*ARP()))170 Ensures(Acc(self.x, ARP()+ARP()))171 pass172 def a18b(self) -> None:173 Requires(Acc(self.x, ARP()+ARP()))174 Ensures(Acc(self.x, 2*ARP()))...
CustomerRegister.py
Source:CustomerRegister.py
1import unittest2import requests3import ddt4from tools import ReadConfig,ReadExcl,ReadRedis5from common import DisposeCase,DisposeApi,DisposeHeader,DisposeReport,RunMain,DisposeRely,DisposeAssert,DisposeEnv,DisposeEnv6import os7import time8case_name = "CustomerRegister"9@ddt.ddt10class CustomerRegister(unittest.TestCase):11 @classmethod12 def setUpClass(self):13 self.runmethodhandle = RunMain.RunMethod()14 self.disposeapihandle = DisposeApi.DisposeApi(case_name)15 self.disposeheaderhandle = DisposeHeader.DisposeHeader()16 self.disposecasehandle = DisposeCase.DisposeCase(case_name)17 self.disposereporthandle = DisposeReport.DisposeReport(case_name)18 self.disposerelyhandle = DisposeRely.DisposeRely()19 self.disposeasserthandle = DisposeAssert.DisposeAssert()20 self.disposeenvhandle = DisposeEnv.DisposeEnv()21 @classmethod22 def tearDownClass(self): 23 pass24 25 def setUp(self):26 time.sleep(2)27 pass28 def tearDown(self):29 pass30 #æ°æ®é©±å¨æ§è¡å段'æ¯å¦æ§è¡'为æ¯çç¨ä¾31 @ddt.data(*DisposeCase.DisposeCase(case_name).get_case_data())32 def test_CustomerRegister(self,data):33 #æµè¯æ¥åç¨äºè¯´æ34 print("æ£å¨æ§è¡ç¨ä¾:"+data['ç¨ä¾å·']+",ç¨ä¾å称:"+data['ç¨ä¾å称']+",ç¨ä¾æ¥å£:"+data["请æ±API"])35 #æµè¯ç¯å¢å¤ç36 self.disposeenvhandle.set_env(data) 37 #请æ±æ¥å£urlå¤ç38 url = self.disposeapihandle.get_url(data)39 #请æ±æ¥å£hearderå¤ç40 header = self.disposeheaderhandle.get_header(data)41 #请æ±æ¥å£payloadå¤ç42 payload = self.disposecasehandle.get_payload(data)43 #è·å请æ±ç±»å44 method = data['请æ±ç±»å']45 # 请æ±æ¥å£46 r = self.runmethodhandle.run_main(url,method,header,payload)47 #è·åé¢æç»ææ°æ®48 expectedreport = self.disposereporthandle.get_report(data)49 #æè¨50 try: 51 #è¿åç¶ææè¨52 self.assertEqual(expectedreport['status_code'],r.status_code)53 if r.status_code == 200:54 #æ°æ®æè¨55 if "expecteddata" in expectedreport:56 if r.text != '':57 self.disposeasserthandle.AssertReport(expectedreport['expecteddata'],eval(r.text.replace('false', 'False').replace('true', 'True').replace('null','""')))58 else:59 self.disposeasserthandle.AssertReport(expectedreport['expecteddata'],payload)60 elif r.status_code == 400:61 if "expecteddata" in expectedreport:62 self.disposeasserthandle.AssertReport(expectedreport['expecteddata'],eval(r.text.replace('false', 'False').replace('true', 'True').replace('null','""')))63 except AssertionError as e:64 print(e)65 raise66 finally:67 #ä¿åä¾èµæ°æ®...
_collections.py
Source:_collections.py
1# urllib3/_collections.py2# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)3#4# This module is part of urllib3 and is released under5# the MIT License: http://www.opensource.org/licenses/mit-license.php6from collections import MutableMapping7from threading import RLock8try: # Python 2.7+9 from collections import OrderedDict10except ImportError:11 from .packages.ordered_dict import OrderedDict12__all__ = ['RecentlyUsedContainer']13_Null = object()14class RecentlyUsedContainer(MutableMapping):15 """16 Provides a thread-safe dict-like container which maintains up to17 ``maxsize`` keys while throwing away the least-recently-used keys beyond18 ``maxsize``.19 :param maxsize:20 Maximum number of recent elements to retain.21 :param dispose_func:22 Every time an item is evicted from the container,23 ``dispose_func(value)`` is called. Callback which will get called24 """25 ContainerCls = OrderedDict26 def __init__(self, maxsize=10, dispose_func=None):27 self._maxsize = maxsize28 self.dispose_func = dispose_func29 self._container = self.ContainerCls()30 self.lock = RLock()31 def __getitem__(self, key):32 # Re-insert the item, moving it to the end of the eviction line.33 with self.lock:34 item = self._container.pop(key)35 self._container[key] = item36 return item37 def __setitem__(self, key, value):38 evicted_value = _Null39 with self.lock:40 # Possibly evict the existing value of 'key'41 evicted_value = self._container.get(key, _Null)42 self._container[key] = value43 # If we didn't evict an existing value, we might have to evict the44 # least recently used item from the beginning of the container.45 if len(self._container) > self._maxsize:46 _key, evicted_value = self._container.popitem(last=False)47 if self.dispose_func and evicted_value is not _Null:48 self.dispose_func(evicted_value)49 def __delitem__(self, key):50 with self.lock:51 value = self._container.pop(key)52 if self.dispose_func:53 self.dispose_func(value)54 def __len__(self):55 with self.lock:56 return len(self._container)57 def __iter__(self):58 raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.')59 def clear(self):60 with self.lock:61 # Copy pointers to all values, then wipe the mapping62 # under Python 2, this copies the list of values twice :-|63 values = list(self._container.values())64 self._container.clear()65 if self.dispose_func:66 for value in values:67 self.dispose_func(value)68 def keys(self):69 with self.lock:...
compositedisposable.py
Source:compositedisposable.py
...21 should_dispose = True22 else:23 self.disposables.append(item)24 if should_dispose:25 item.dispose()26 def remove(self, item):27 """Removes and disposes the first occurrence of a disposable from the28 CompositeDisposable."""29 if self.is_disposed:30 return31 should_dispose = False32 with self.lock:33 if item in self.disposables:34 self.disposables.remove(item)35 should_dispose = True36 if should_dispose:37 item.dispose()38 return should_dispose39 def dispose(self):40 """Disposes all disposables in the group and removes them from the41 group."""42 if self.is_disposed:43 return44 with self.lock:45 self.is_disposed = True46 current_disposables = self.disposables[:]47 self.disposables = []48 for disposable in current_disposables:49 disposable.dispose()50 def clear(self):51 """Removes and disposes all disposables from the CompositeDisposable,52 but does not dispose the CompositeDisposable."""53 with self.lock:54 current_disposables = self.disposables[:]55 self.disposables = []56 for disposable in current_disposables:57 disposable.dispose()58 def contains(self, item):59 """Determines whether the CompositeDisposable contains a specific60 disposable.61 Keyword arguments:62 item -- Disposable to search for63 Returns True if the disposable was found; otherwise, False"""64 return item in self.disposables65 def to_list(self):66 return self.disposables[:]67 def __len__(self):68 return len(self.disposables)69 @property70 def length(self):71 return len(self.disposables)
Using AI Code Generation
1import React from 'react';2import ReactDOM from 'react-dom';3import App from './App';4import {unmountComponentAtNode} from 'react-dom';5ReactDOM.render(<App />, document.getElementById('root'));6unmountComponentAtNode(document.getElementById('root'));7import React from 'react';8import {useEffect} from 'react';9import Child from './Child';10export default function App(){11 useEffect(()=>{12 return ()=>{13 console.log('App component will unmount');14 };15 }, []);16 return(17 );18}19import React from 'react';20import {useEffect} from 'react';21export default function Child(){22 useEffect(()=>{23 return ()=>{24 console.log('Child component will unmount');25 };26 }, []);27 return(28 );29}30### 4.4.4. React.memo()31- React.memo() is a higher order component that memoizes a component32import React from 'react';33import {useState} from 'react';34import Child from './Child';35export default function App(){36 const [count, setCount] = useState(0);37 return(38 <button onClick={()=>setCount(count+1)}>Increment</button>39 );40}41import React from 'react';42export default function Child(props){43 return(44 Child Component {props.name}45 );46}47import React from 'react';48export default React.memo(function Child(props){49 return(50 Child Component {props.name}51 );52});53### 4.4.5. React.lazy()54- React.lazy() is a function that takes a dynamic import function as an argument and returns
Using AI Code Generation
1import React from "react";2import ReactDOM from "react-dom";3import App from "./App";4const rootElement = document.getElementById("root");5ReactDOM.render(<App />, rootElement);6ReactDOM.unmountComponentAtNode(rootElement);
Using AI Code Generation
1import {createStore, applyMiddleware, compose} from 'redux';2import {rootReducer} from './reducers';3export const store = createStore(4 compose(applyMiddleware())5);6import {store} from './test';7import {Provider} from 'react-redux';8const App = () => {9 return (10 <Provider store={store}>11 );12};13export default App;
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!