Best Python code snippet using tavern
other_language_desc.py
Source:other_language_desc.py
1#!/usr/bin/env python2# -*- coding:utf-8 -*-3#å¶ä½:温赫ç·4'''5å
¶ä»è¯ç§çæè¿°ä¿¡æ¯6'''7from django.db.models import Q8from sysproduct.models import SpuDescriptionLanguage9from pro_db_models.models.market_channel_models import SysMarketProductMap10from order_statistic.models import SkuDaysStatistic,SkuStatistic11from sale_amazon.models import Amazonstore, Amazonsummary, AmazonCharger, AmazonAccount, amazonasintrack12from maindefine import COUNTRY13#from django.db import models14import datetime15from utils import misc16from sysproduct.platform_info import *17from django.db.models import Count, Avg, Max, Min, Sum18import enum19from sysproduct.models import *20from warehouse.models import Warehouse,SkuWarehouseInventory21import pytz22from utils import misc23import bottlenose24# import logging25# bottlenose.api.log.level = logging.DEBUG26#27# handle = logging.StreamHandler()28# handle.setLevel(logging.DEBUG)29#30# formatter = logging.Formatter(31# '%(asctime)s - %(funcName)s - %(lineno)d- %(message)s')32# handle.setFormatter(formatter)33#34# if not bottlenose.api.log.handlers:35# bottlenose.api.log.addHandler(handle)36from django.db import models37from sale_amazon.models.listing import *38from amazon.api import AmazonAPI39from sale_amazon.models import Amazonstore, amazonbrand40from utils import misc41class _Report_Info(str):42 c_Code = 043 def __new__(cls,msg):44 cls.c_Code +=145 codeid = cls.c_Code46 self = str.__new__(cls, codeid)47 self.m_CodeID = codeid48 self.m_Msg = msg49 self.m_Num = 050 self.m_Data = {}51 return self52 @property53 def num(self):54 return self.m_Num55 @num.setter56 def num(self, value):57 self.m_Num = value58 @property59 def data(self):60 return self.m_Data61 # ä¿åæ¥å62 # åºåæ»skuæ°63 # å¯ä¸æ¶çskuæ°64 # å
±ä¿åasinæ°é65 # æ述为空æ°é66 # æåå°äº30个å符æ°é67 # å
¨è±ææè¿°æ°é68 # å并åspuæ°é69 #70 # æ¥æ¬ç«ï¼71 # æè¿°ä¸ä¸å
å«æ¥ææè¿°æ°é72 # éè¦ç¿»è¯çæ°é73class REPORTINFO(_Report_Info,enum.Enum):74 ALL_SYS_SKU = "ç³»ç»å
¨é¨spuæ°é."75 All_SKU_REDUCE_NOSTOCK = "å»æ没åºå并ä¸ééè´ç¶æçspuåçå©ä½æ°é"76 All_SKU_REDUCE_NOSALE = "å»æéé为0çspuåçå©ä½æ°é"77 #ALL_ASINS = "ç³»ç»çæ»asinæ°é"78 #ALL_ASINS_CANPUSH = "å¯ä¸æ¶ç¶æçasinæ°é"79 ASIN_DOWN = "æåå°çasinæ»æ°"80 ALL_SPU = "éè¦å¯¼å
¥çSPUæ»æ°"81 ALL_SPU_DOWN = "ä¸è½½å°çSPUæ»æ°"82 ALL_SPU_REDUCE_NO_DESC = "å»ææ述为空çspuåå©ä½æ°é"83 DESCLEN_LT30 = "æè¿°é¿åº¦å°äº30çspu"84 SPU_INSERT = "æå
¥SPU"85 SPU_UPDATE = "æ´æ°SPU"86 US_SPU_COUNT = "ç¾å½ç«spuæ»é"87 US_SPU_REDUCE_NOSTOCK = "å»æ没åºå并ä¸ééè´ç¶æçspuå©ä½æ°é"88 US_SPU_REDUCE_DOWN = "å»æå·²ç»ä¸è½½çæ°æ®"89 US_SPU_NO_EN_DESC = "æè±ææè¿°çå©ä½"90 US_SPU_ERR_TRANS = "ç¿»è¯å¤±è´¥ä¸ªæ°"91 TITLE_LENS = "æ é¢é¿åº¦"92 FETURE_LENS = "çææ»é¿åº¦"93 DESC_LENS = "æè¿°æ£ææ»é¿åº¦"94 HASTRANSD_TIME = "éå¤ç¿»è¯"95 AMA_PRODUCTS_PUSHED = "å·²ç»ææ å°çasinæ°(%s),skuæ°(%s),spuæ°(%s)"96 AMA_PRODUCTS_DOWNED = "è¿æ»¤ææ¿ä¸å°asinçasinæ°(%s),skuæ°(%s),spuæ°(%s)"97 AMA_PRODUCTS_REDUCE = "è¿æ»¤ææè¿°é¿åº¦<30çasinæ°(%s),skuæ°(%s),spuæ°(%s)"98 AMA_PRODUCTS_SAVED = "æåä¿¡æ¯å¯¼å
¥ç³»ç»çspuæ°"99 ALL_PRODUCTS = "ç³»ç»å
¨é¨spuæ°é"100 SALED_PRODUCTS = "åå»ç³»ç»æ²¡æçééç产å"101 SALED_PRODUCTS_NOTUSED = "åå»æ¥æ¬ç«å·²åå¨çSPU"102 SALED_PRODUCTS_DESCGT30 = "åå»æè¿°é¿åº¦å°äº30çSPU"103 SALED_PRODUCTS_TRAN_TIME = "éè¦ç¿»è¯çspuæ°"104 SALED_PRODUCTS_TRAN_COUNT = "éè¦ç¿»è¯çå符æ°"105 HASSTOCK_PRODUCTS = "åå»ç³»ç»æ²¡æåºåçSPU"106 HASSTOCK_PRODUCTS_NOTUSED = "åå»æ¥æ¬ç«å·²åå¨çSPU"107 HASSTOCK_PRODUCTS_DESCGT30 = "åå»æè¿°é¿åº¦å°äº30çSPU"108 HASSTOCK_PRODUCTS_TRAN_TIME = "éè¦ç¿»è¯çspuæ°"109 HASSTOCK_PRODUCTS_TRAN_COUNT = "éè¦ç¿»è¯çå符æ°"110 PUSHABLE_PRODUCTS = "åå»ç³»ç»æ²¡æåºåçSPU"111 PUSHABLE_PRODUCTS_NOSALE = "åå»æ²¡ééç,å©ä½spu"112 PUSHABLE_PRODUCTS_NOTUSED = "åå»æ¥æ¬ç«å·²åå¨çSPU"113 PUSHABLE_PRODUCTS_DESCGT30 = "åå»æè¿°é¿åº¦å°äº30çSPU"114 PUSHABLE_PRODUCTS_TRAN_TIME = "éè¦ç¿»è¯çspuæ°"115 PUSHABLE_PRODUCTS_TRAN_COUNT = "éè¦ç¿»è¯çå符æ°"116# 1 å·²ç»ææ å°çasinæ°ï¼skuæ°ï¼spuæ°117# 2 è¿æ»¤ææ¿ä¸å°asinæ°ï¼skuæ°ï¼spuæ°118# 3 è¿æ»¤ææè¿°é¿åº¦<30 asinæ°ï¼skuæ°ï¼spuæ°119# 4 æåä¿¡æ¯å¯¼å
¥ç³»ç»çspuæ°120# 5 ç³»ç»æåºåæééç产åspuæ°121# 6 éè¦ç¿»è¯çspuæ°ï¼122# 7 éè¦ç¿»è¯çå符æ°ï¼123# 8 ç³»ç»å¯ä¸æ¶ç产åspuæ°ï¼124# 9 éè¦ç¿»è¯çspuæ°125# 10 éè¦ç¿»è¯çå符æ°126 @classmethod127 def NewReport(cls):128 alldata = []129 # items = [130 # cls.AMA_PRODUCTS_PUSHED,131 # cls.AMA_PRODUCTS_DOWNED,132 # cls.AMA_PRODUCTS_REDUCE,133 # ]134 # keys = [135 # "asin",136 # "sku",137 # "spu",138 #139 # ]140 # for item in items:141 # info = item.m_Msg%(item.data["asin"],item.data["sku"],item.data["spu"])142 # alldata.append(info)143 # #cls.AMA_PRODUCTS_REDUCE,144 # #alldata.append(cls.AMA_PRODUCTS_REDUCE.GetValue())145 # alldata.append(cls.AMA_PRODUCTS_SAVED.GetValue())146 # alldata.append("-------------")147 #148 #149 # items = [150 # cls.ALL_PRODUCTS,151 # cls.SALED_PRODUCTS,152 # cls.SALED_PRODUCTS_NOTUSED,153 # cls.SALED_PRODUCTS_DESCGT30,154 # cls.SALED_PRODUCTS_TRAN_TIME,155 # cls.SALED_PRODUCTS_TRAN_COUNT,156 #157 # ]158 # for item in items:159 # print id(item),3132123,item.num160 # alldata.append(item.GetValue())161 #162 # alldata.append("-------------")163 #164 #165 # items = [166 # cls.ALL_PRODUCTS,167 # cls.HASSTOCK_PRODUCTS,168 # cls.HASSTOCK_PRODUCTS_NOSALE,169 # # cls.SALED_PRODUCTS_NOSTOCK,170 # cls.HASSTOCK_PRODUCTS_DESCGT30,171 # cls.HASSTOCK_PRODUCTS_TRAN_TIME,172 # cls.HASSTOCK_PRODUCTS_TRAN_COUNT,173 # ]174 # for item in items:175 # alldata.append(item.GetValue())176 #177 # alldata.append("-------------")178 items = [179 cls.ALL_PRODUCTS,180 cls.PUSHABLE_PRODUCTS,181 cls.PUSHABLE_PRODUCTS_NOSALE,182 cls.PUSHABLE_PRODUCTS_NOTUSED,183 # cls.SALED_PRODUCTS_NOSTOCK,184 cls.PUSHABLE_PRODUCTS_DESCGT30,185 cls.PUSHABLE_PRODUCTS_TRAN_TIME,186 cls.PUSHABLE_PRODUCTS_TRAN_COUNT,187 ]188 for item in items:189 alldata.append(item.GetValue())190 # alldata.append("-------------")191 return alldata192 @classmethod193 def Clear(cls):194 for info in cls.GetItems():195 info.num=0196 info.data.clear()197# print info198 @classmethod199 def GetItems(cls):200 return [201 cls.ALL_SYS_SKU,202 cls.All_SKU_REDUCE_NOSTOCK,203 #cls.All_SKU_REDUCE_NOSALE,204 #cls.ASIN_DOWN,205 cls.ALL_SPU_DOWN,206 #cls.ALL_SPU_REDUCE_NO_DESC,207 cls.DESCLEN_LT30,208 cls.SPU_INSERT,209 cls.SPU_UPDATE,210 ]211 @classmethod212 def DoReport1(cls):213 lans = ["jp", ]214 items = [215 cls.US_SPU_COUNT,216 cls.US_SPU_REDUCE_NOSTOCK,217 cls.US_SPU_REDUCE_DOWN,218 cls.US_SPU_NO_EN_DESC,219 #cls.US_SPU_ERR_TRANS,220 cls.TITLE_LENS ,221 cls.FETURE_LENS ,222 cls.DESC_LENS ,223 #cls.HASTRANSD_TIME ,224 # cls.SPU_INSERT,225 # cls.SPU_UPDATE,226 ]227 #lans = ["es",]228 # for info in items:229 # info.num = sum(info.data.values())230 #231 data = []232 # for info in items:233 # data.append(info.GetValue())234 # data.append("----------------")235 for lan in lans:236 for info in items:237 data.append(info.GetValue(lan))238 data.append("----------------")239 data.append(REPORTINFO.HASTRANSD_TIME.GetValue("次æ°"))240 data.append(REPORTINFO.HASTRANSD_COUNT.GetValue("å符"))241 return data242 @classmethod243 def DoReport(cls):244 lans = ["de", "jp", "es", "it", "fr"]245 #lans = ["es",]246 for info in cls.GetItems():247 info.num = sum(info.data.values())248 data = []249 for info in cls.GetItems():250 data.append(info.GetValue())251 data.append("----------------")252 for lan in lans:253 for info in cls.GetItems():254 data.append(info.GetValue(lan))255 data.append("----------------")256 return data257 #data.append()258 def __str__(self):259 return self.GetValue()260 def GetValue(self,lan=""):261 if lan=="all":262 alldata = []263 for key,value in self.m_Data.items():264 alldata.append("%s<%s>:%s"%(self.m_Msg,key,value))265 return "\n".join(alldata)266 elif lan:267 return "%s<%s>:%s"%(self.m_Msg,lan,self.m_Data.get(lan,""))268 else:269 return "%s:%s"%(self.m_Msg,self.m_Num)270def GetMarketID(lan):271 dn = {272 "de": 8,273 "jp": 5,274 "es": 10,275 "it": 11,276 "fr": 12,277 "us": 2,278 }279 market_id = dn.get(lan,"")280 return market_id281# class CReport(object):282# def __init__(self):283# self.m_All_Sys_Sku_Count = 0#åºåæ»skuæ°284# self.m_All_Sys_Sku_Count_Reduce_NoSale = 0# éé为0çsku285# self.m_All_Sys_Sku_Count_Reduce_NoStock = 0#æåºåskuæå¯éè´286# self.m_All_Asin_Count = 0287class CSysAsin(SysMarketProductMap):288 # market_product = models.ForeignKey(MarketProductsCandidates,null=True,db_column='market_product_id',db_constraint=False,on_delete=models.DO_NOTHING, help_text='å¸åºäº§åid')289 # item_id = models.CharField(max_length=50,null=True,db_index=True,help_text='å个平å°ç产åç¼ç ')290 # sku = models.ForeignKey(SkuProduct,db_column='sku_id' ,related_name="sku_product_map",null=True, help_text='对åºçsku产å')291 # market = models.ForeignKey(Market, related_name="market_map",null=True)292 # # 产åæ å°å å
¥ç³»ç»ä¿¡æ¯293 # create_time = models.DateTimeField(default=timezone.now)294 # create_user = models.ForeignKey(settings.AUTH_USER_MODEL,db_constraint=False, null=True, related_name='product_map_create_user')295 # update_time = models.DateTimeField(default=timezone.now)296 # update_user = models.ForeignKey(settings.AUTH_USER_MODEL,db_constraint=False, null=True, related_name='product_map_update_user')297 # default_map = models.BooleanField(default=False,help_text='以第ä¸æ¬¡æ·»å ç为é»è®¤æ å°å
³ç³»,主è¦é对åä¸å¸åº,ä¸ä¸ªskuæå¤ä¸ªæ å°çæ
åµ')298 class Meta:299 proxy = True300 @classmethod301 def Bluk_Set_Category(cls):302 misc.CLog.Clear()303 with open('amazon_us.csv', 'r') as f:304 alldata = f.read().splitlines()305 misc.CLog.SetSpace(100,len(alldata)-1)306 for relationstr in alldata[1:]:307 data = relationstr.split(",")308 misc.CLog.PrintSpace("åå
¥åç±»")309 try:310 asin,category_id = data311 except:312 #print data,22222313 continue314 info = cls.objects.filter(item_id=asin).filter(market=2).first()315 if info:316 info.category_id = category_id317 info.save()318 @classmethod319 def Bluk_Set_Pro_Category(cls):320 class _tmp_table(models.Model):321 id = models.AutoField(primary_key=True)322 category_id = models.CharField(max_length=20)323 asin = models.CharField(max_length=20)324 class Meta:325 managed = False326 db_table = 'channel_product_map_forupdate'327 def _create_tmp():328 from django.db import connection, transaction329 cursor = connection.cursor()330 sql = """331 DROP TABLE IF EXISTS `channel_product_map_forupdate`;332 CREATE TABLE `channel_product_map_forupdate` (333 `id` int(11) NOT NULL AUTO_INCREMENT,334 `asin` varchar(16) DEFAULT NULL DEFAULT '',335 `category_id` varchar(16) NOT NULL DEFAULT '',336 PRIMARY KEY (`id`),337 KEY `channel_product_map_category_id` (`category_id`)338 ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8;339 """340 #341 cursor.execute(sql)342 def _drop_tmp():343 from django.db import connection, transaction344 cursor = connection.cursor()345 sql = """346 DROP TABLE `channel_product_map_forupdate`;347 """348 cursor.execute(sql)349 def _update_relation():350 from django.db import connection, transaction351 cursor = connection.cursor()352 sql = """353 update channel_product_map_forupdate s,channel_product_map t set t.category_id = s.category_id where s.asin = t.item_id and t.market_id = 2;354 """355 cursor.execute(sql)356 _create_tmp()357 alldata1 = []358 misc.CLog.Clear()359 with open('upload/amazon/amazon_us.csv', 'r') as f:360 alldata = f.read().splitlines()361 misc.CLog.SetSpace(10000,len(alldata)-1)362 for relationstr in alldata[1:]:363 data = relationstr.split(",")364 misc.CLog.PrintSpace("读å
¥å类信æ¯")365 try:366 asin,category_id = data367 except:368 #print data,22222369 continue370 info = _tmp_table()371 info.asin = asin372 info.category_id = category_id373 #print info.asin,info.category_id374 alldata1.append(info)375 misc.CLog.Print("å¼å§åå
¥")376 _tmp_table.objects.bulk_create(alldata1)377 misc.CLog.Print("åå
¥å®æ¯")378 _update_relation()379 misc.CLog.Print("æ´æ°å
³ç³»")380 _drop_tmp()381 misc.CLog.Print("æ¸
ç临æ¶è¡¨")382 @classmethod383 def GetAsinList(cls,lan):384 oQuery = cls.GetAsinListQuery(lan)385 alldata = []386 for data in oQuery.all().values("item_id"):387 alldata.append(data["item_id"])388 return alldata389 @classmethod390 def GetAsinListQuery(cls,lan):391 market_id = GetMarketID(lan)392 oQuery = cls.objects.filter(market=market_id)393 ct = cls._GetSpuCount(oQuery.values("sku_id"))394 REPORTINFO.ALL_SYS_SKU.data[lan] = ct395 hasstore_products = SkuWarehouseInventory.objects.values("sku_id").annotate(allct=Sum('on_hand_qty')).filter(396 allct__gt=1).values("sku_id")397 query_select = Q(purchase_ok=1) | Q(id__in=hasstore_products)398 allproducts = SkuProduct.objects.filter(query_select).values("id")399 oQuery = oQuery.filter(sku__in=allproducts)400 ct = cls._GetSpuCount(oQuery.values("sku_id"))401 REPORTINFO.All_SKU_REDUCE_NOSTOCK.data[lan] = ct402 #REPORTINFO.All_SKU_REDUCE_NOSTOCK.data[lan]=oQuery.count()403 #SaledProducts = SkuDaysStatistic.objects.values("sku").filter(total_qty__gt=0).values("sku")404 return oQuery405 @classmethod406 def _GetSpuCount(cls,sku_list):407 ct = SkuProduct.objects.filter(id__in=sku_list).values("spu_id").distinct().count()408 return ct409 # @classmethod410 # def GetSpuList_ExtDone(cls,lan):411 #412 # market_id = GetMarketID("us")413 # oQuery = cls.objects.filter(market=market_id)414 # ct = cls._GetSpuCount(oQuery.values("sku_id"))415 # REPORTINFO.US_SPU_COUNT.data[lan] = ct416 #417 # hasstore_products = SkuWarehouseInventory.objects.values("sku_id").annotate(allct=Sum('on_hand_qty')).filter(418 # allct__gt=1).values("sku_id")419 # query_select = Q(purchase_ok=1) | Q(id__in=hasstore_products)420 # allproducts = SkuProduct.objects.filter(query_select).values("id")421 # oQuery = oQuery.filter(sku__in=allproducts)422 #423 # ct = cls._GetSpuCount(oQuery.values("sku_id"))424 # REPORTINFO.US_SPU_REDUCE_NOSTOCK.data[lan] = ct425 #426 # # all_spu_list = SkuProduct.objects427 # # all_spu_list = all_spu_list.filter(id__in=oQuery.values("sku_id"))428 #429 # done_spu_list = CSpuDesc.GetDoned_Spu_List(lan)430 # all_sku_list = SkuProduct.objects.exclude(spu__in=done_spu_list).values("id")431 # oQuery = oQuery.filter(sku__in=all_sku_list)432 #433 # ct = cls._GetSpuCount(oQuery.values("sku_id"))434 # REPORTINFO.US_SPU_REDUCE_DOWN.data[lan] = ct435 # #print oQuery.query436 # #print "--------"437 # #print SkuProduct.objects.exclude(id__in=oQuery.values_list("sku",flat=True)).values_list("spu",flat=True).distinct().query438 # return SkuProduct.objects.filter(id__in=oQuery.values_list("sku",flat=True)).values_list("spu",flat=True).distinct()439 @classmethod440 def GetSpuList_ExtDone(cls,lan):441 # market_id = GetMarketID("us")442 # oQuery = cls.objects.filter(market=market_id)443 oQuery = SkuProduct.objects.filter()444 ct = cls._GetSpuCount(oQuery.values("id"))445 REPORTINFO.US_SPU_COUNT.data[lan] = ct446 hasstore_products = SkuWarehouseInventory.objects.values("sku_id").annotate(allct=Sum('on_hand_qty')).filter(447 allct__gt=1).values("sku_id")448 query_select = Q(purchase_ok=1) | Q(id__in=hasstore_products)449 allproducts = SkuProduct.objects.filter(query_select).values("id")450 oQuery = oQuery.filter(id__in=allproducts)451 ct = cls._GetSpuCount(oQuery.values("id"))452 REPORTINFO.US_SPU_REDUCE_NOSTOCK.data[lan] = ct453 # all_spu_list = SkuProduct.objects454 # all_spu_list = all_spu_list.filter(id__in=oQuery.values("sku_id"))455 done_spu_list = CSpuDesc.GetDoned_Spu_List(lan)456 all_sku_list = SkuProduct.objects.exclude(spu__in=done_spu_list).values("id")457 oQuery = oQuery.filter(id__in=all_sku_list)458 ct = cls._GetSpuCount(oQuery.values("id"))459 REPORTINFO.US_SPU_REDUCE_DOWN.data[lan] = ct460 #print oQuery.query461 #print "--------"462 #print SkuProduct.objects.exclude(id__in=oQuery.values_list("sku",flat=True)).values_list("spu",flat=True).distinct().query463 return SkuProduct.objects.filter(id__in=oQuery.values_list("id",flat=True)).values_list("spu",flat=True).distinct()464class CSpuDesc(SpuDescriptionLanguage):465 #466 # spu_id = models.IntegerField()467 # language = models.SmallIntegerField()468 # description = models.TextField()469 # feature = models.TextField()470 # attr_detail = models.TextField()471 # product_list = models.CharField(max_length=300)472 # create_time = models.DateTimeField()473 # write_time = models.DateTimeField()474 # version = models.IntegerField()475 # source = models.IntegerField()476 class Meta:477 proxy = True478 @classmethod479 def DoSave(cls):480 pass481 @classmethod482 def GetDoned_Spu_List(cls,lan):483 oQuery = cls.objects.filter(language=lan)484 #oQuery = oQuery.filter(source=SOURCE.AMAZON_INFO)485 return oQuery.values("spu_id")486 @classmethod487 def Bluk_Add(cls,alldata,lan,source):488 REPORTINFO.SPU_INSERT.data[lan] = 0489 REPORTINFO.SPU_UPDATE.data[lan] = 0490 for data in alldata:491 oQuery = cls.objects.filter(spu_id=data["spu_id"]).filter(language=lan)492 #print oQuery.query493 info = oQuery.first()494 if not info:495 info = cls()496 info.create_time = datetime.utcnow().replace(tzinfo=pytz.utc)497 info.spu_id = data["spu_id"]498 info.language = lan499 REPORTINFO.SPU_INSERT.data[lan] += 1500 else:501 REPORTINFO.SPU_UPDATE.data[lan]+=1502 continue#åªæå
¥ä¸æ´æ°503 info.spu_id = data["spu_id"]504 info.write_time = datetime.utcnow().replace(tzinfo=pytz.utc)505 #if not info.version:506 # info.version = 0507 info.version = 1508 info.title = data["title"]509 info.source = source510 info.feature = data["feature"]511 info.description = data["description"]512 info.save()513class SOURCE(object):514 AMAZON_INFO = 0 #äºé©¬éå°è¯ç§ç«ç¹515 AMAZON_US_INFO_TRANS = 1 #äºé©¬éç¾å½ç«èªå¨ç¿»è¯ä¿¡æ¯516 LISTING = 2 #listingæ°æ®æå517# def LoadAmazon_Info_Path(path):518# import os519# '''520# 便åç®å½521# :param path:522# :return:523# '''524# source = SOURCE.AMAZON_INFO525# rootpath = "mount/amazon_info"526# for filename in os.listdir(rootpath):527# alldata = []528def DownProductInfo():529 '''530 ä¸è½½ä¸ä¸ªäº§å531 :return: 532 '''533g_Asin2Spu_Cache = {}534def Asin2Spu(asin):535 global g_Asin2Spu_Cache536 '''537 asin转spu538 :param asin: 539 :return: 540 '''541 if asin in g_Asin2Spu_Cache:542 return g_Asin2Spu_Cache[asin]543 info = CSysAsin.objects.filter(item_id=asin).values("sku_id").first()544 spu_id = None545 if info:546 sku_id = info["sku_id"]547 info = SkuProduct.objects.filter(id=sku_id).values("spu_id").first()548 if info:549 spu_id = info["spu_id"]550 g_Asin2Spu_Cache[asin]=spu_id551 return spu_id552g_Asin2SpuSku_Cache = {}553def Asin2SpuSku(asin):554 global g_Asin2Spu_Cache555 if asin in g_Asin2SpuSku_Cache:556 return g_Asin2SpuSku_Cache[asin]557 info = CSysAsin.objects.filter(item_id=asin).values("sku_id").first()558 spu_id = None559 sku_id = None560 if info:561 sku_id = info["sku_id"]562 info = SkuProduct.objects.filter(id=sku_id).values("spu_id").first()563 if info:564 spu_id = info["spu_id"]565 g_Asin2SpuSku_Cache[asin]=spu_id,sku_id566 return spu_id,sku_id567def ImportXls(filename):568 '''569 å¼å
¥ä¸ä¸ªæ°æ®æ件570 :param filename: 571 :return: 572 '''573 alldata = []574 import xlrd575 bk = xlrd.open_workbook(filename)576 shxrange = range(bk.nsheets)577 try:578 sh = bk.sheet_by_name("Sheet1")579 except:580 return [],"æ件æ¾ä¸å°<Sheet1>å页"581 # è·åè¡æ°582 nrows = sh.nrows583 # è·ååæ°584 ncols = sh.ncols585 print "nrows %d, ncols %d" % (nrows, ncols)586 # è·å第ä¸è¡ç¬¬ä¸åæ°æ®587 cell_value = sh.cell_value(1, 1)588 row_list = []589 # è·ååè¡æ°æ®590 for row in range(1, nrows):591 row_data = sh.row_values(row)592 row_list.append(row_data)593 return row_list594class _CTime(object):595 c_LastTime = 0596 @classmethod597 def Clear(cls):598 import time599 cls.c_LastTime = time.time()600 @classmethod601 def PrintSpace(cls,msg):602 import time603 now = time.time()604 print msg,"---",now-cls.c_LastTime605 #cls.c_LastTime = time.time()606def main1():607 _CTime.Clear()608 lans = ["jp", ]609 transcache = {}610 for lan in lans:611 REPORTINFO.TITLE_LENS.data[lan] = 0612 REPORTINFO.FETURE_LENS.data[lan] = 0613 REPORTINFO.DESC_LENS.data[lan] = 0614 REPORTINFO.HASTRANSD_TIME.data["次æ°"] = 0615 REPORTINFO.HASTRANSD_TIME.data["å符"] = 0616 spu_list = CSysAsin.GetSpuList_ExtDone(lan)617 alldata = []618 REPORTINFO.US_SPU_NO_EN_DESC.data[lan] = len(spu_list)619 REPORTINFO.US_SPU_ERR_TRANS.data[lan] = 0620 ct = 0621 allct = len(spu_list)622 for spu_id in spu_list:623 product = SpuProduct.objects.filter(id = spu_id).values("title").first()624 title = product["title"]625 ct += 1626 if ct %1000==0:627 _CTime.PrintSpace("%s/%s"%(ct,allct))628 extinfo =SpuDescriptionEn.objects.filter(spu=spu_id).values().first()629 if not extinfo:630 REPORTINFO.US_SPU_NO_EN_DESC.data[lan] -= 1631 continue632 description = extinfo["description"]633 feature = extinfo["feature"]634 texts = [title,description,feature]635 if title not in transcache:636 transcache[title] = 1637 REPORTINFO.TITLE_LENS.data[lan] += len(title)638 else:639 REPORTINFO.HASTRANSD_TIME.data["次æ°"] +=1640 REPORTINFO.HASTRANSD_TIME.data["å符"] += len(title)641 if feature not in transcache:642 transcache[feature] = 1643 REPORTINFO.FETURE_LENS.data[lan] += len(feature)644 else:645 REPORTINFO.HASTRANSD_TIME.data["å符"] += len(feature)646 REPORTINFO.HASTRANSD_TIME.data["次æ°"] += 1647 if description not in transcache:648 transcache[description] = 1649 REPORTINFO.DESC_LENS.data[lan] += len(description)650 else:651 REPORTINFO.HASTRANSD_TIME.data["次æ°"] += 1652 REPORTINFO.HASTRANSD_TIME.data["å符"] += len(description)653 #REPORTINFO.DESC_LENS.data[lan] += len(description)654 #REPORTINFO.HASTRANSD_TIME = += len(title)655 #dn = misc.EN2JP(texts=texts)656 # newdata = {}657 # try:658 # newdata["title"] = dn[title]659 # newdata["description"] = dn[description]660 # newdata["feature"] = dn[feature]661 # except:662 # REPORTINFO.US_SPU_ERR_TRANS.data[lan] += 1663 # continue664 # try:665 # newdata["feature"] = dn[feature]666 # except:667 # newdata["feature"] = ""668 #newdata["spu_id"] = spu_id669 #alldata.append(newdata)670 #CSpuDesc.Bluk_Add(alldata, lan, SOURCE.AMAZON_US_INFO_TRANS)671 print "\n".join(REPORTINFO.DoReport1())672def DownDesc():673 def _GetProduct(lan):674 dn = {675 "de": 8,676 "jp": 5,677 "es": 10,678 "it": 11,679 "fr": 12,680 }681 market_id = dn[lan]682 products = SysMarketProductMap.objects.filter(market_id=market_id).values("item_id").all()683 for product in products:684 yield product["item_id"]685 amazon_dict = {}686 # amazon_dict["it"] = AmazonAPI("AKIAJEHY7PZI24XSZ7GA", "al5M1gd5UVbDyjihIzF6kn/47ok7M4S29546vqRu",687 # "oucher308-22", region="IT", Timeout=5, MaxQPS=1)oucher3@163.com688 #amazon_dict["it"] = AmazonAPI("AKIAJEHY7PZI24XSZ7GA", "al5M1gd5UVbDyjihIzF6kn/47ok7M4S29546vqRu", "oucher308-22",689 # region="IT", Timeout=5, MaxQPS=1)690 #amazon_dict["it"] = AmazonAPI("AKIAJGLFK6P37DXE4GOQ", "CKJUOQlcQ3Ar8wH9lIBKtePt5WxzWspePL3c/QsD", "oucher309-21",691 # region="IT", Timeout=5, MaxQPS=1)692 # amazon_dict["it"] = AmazonAPI("AKIAIADF3FDWG2EADWCA", "hCenHr3RzMYRsNmbXSiyywVknBS58AhN/llkujT", "oucher309-21",693 # region="IT", Timeout=5, MaxQPS=1)#43800052@qq.com694#########################################695 # amazon_dict["it"] = AmazonAPI("AKIAIGEGIKE7YTTY4GRQ", "ccF+1I42wYE5bs2TUmi+ghJXcSTqoS3Nn6lgbjr6", "oucher33-21",696 # region="IT", Timeout=5, MaxQPS=1)697 amazon_dict["it"] = AmazonAPI("AKIAINRPVKEXVQ65GN6A", "qo4GbATKZqDBpsVrFM6v3p/Y0DA5D/NogrCkyRIy", "oucher3-21",698 region="IT", Timeout=5, MaxQPS=1)699 amazon_dict["es"] = AmazonAPI("AKIAJGLFK6P37DXE4GOQ", "CKJUOQlcQ3Ar8wH9lIBKtePt5WxzWspePL3c/QsD", "oucher0e-21",700 region="ES", Timeout=5, MaxQPS=1)701 amazon_dict["fr"] = AmazonAPI("AKIAISQ5EL7RGVOIKCPQ", "KWZaLFPu3VY6nWKKBLldEKqTQzuFT2ahdr3VHNTy", "oucher3-21",702 region="FR", Timeout=5, MaxQPS=1)703 #amazon_dict["fr"] = AmazonAPI("AKIAJTJ6H6FMQPM5TSUQ", "JuVg/qCoJsQq9y3LHQnhmHCxb4VfqVAFMEBb+gih", "oucher3-21",704 # region="FR", Timeout=5, MaxQPS=1)705 #706 # amazon_dict["fr"] = AmazonAPI("AKIAII4O6ZBEJ6YJFX2Q", "q3euQaWkYlJQChvxc7/pblwq0F9jWWMbzv479a+/", "home0d81-21",707 # region="FR", Timeout=5, MaxQPS=1)708 amazon_dict["de"] = AmazonAPI("AKIAJGLFK6P37DXE4GOQ", "CKJUOQlcQ3Ar8wH9lIBKtePt5WxzWspePL3c/QsD", "oucher0e-21",709 region="DE", Timeout=5, MaxQPS=1)710 amazon_dict["jp"] = AmazonAPI("AKIAJEHY7PZI24XSZ7GA", "al5M1gd5UVbDyjihIzF6kn/47ok7M4S29546vqRu",711 "oucher308-22", region="JP", Timeout=5, MaxQPS=1)712 import socket713 socket.setdefaulttimeout(5)714 def _GetProducts(lan):715 ct = 0716 ct1 = 0717 alldata = []718 alldata.append(["asin", "title", "featurestr", "desc", "publisher", "detail_page_url"])719 amazon = amazon_dict[lan]720 product = _GetProduct(lan)721 flag = True722 while flag:723 items = []724 for i in range(10):725 try:726 asin = product.next()727 except:728 flag = False729 break730 if asin:731 items.append(asin)732 ct += 1733 if not items:734 break735 itemstr = ",".join(items)736 _CTime.PrintSpace(",".join(("åæ<%s>"%lan, "%s/%s"%(ct, ct1), itemstr)))737 data = []738 import time739 time.sleep(1)740 try:741 amzproducts = amazon.lookup(ItemId=itemstr, ResponseGroup="Medium", Timeout=5)742 except Exception as e:743 print e744 continue745 if type(amzproducts) == list:746 pass747 else:748 amzproducts = [amzproducts]749 for amzproduct in amzproducts:750 ct1 += 1751 asin = amzproduct.asin752 title = amzproduct.title753 featurestr = ",".join(amzproduct.features)754 desc = amzproduct.editorial_review755 publisher = amzproduct.publisher756 detail_page_url = amzproduct.detail_page_url757 #data = [asin, title, featurestr, desc, publisher, detail_page_url]758 data = [asin, title, featurestr, desc, publisher, detail_page_url]759 alldata.append(data)760 _CTime.PrintSpace("æ§è¡ç»æ")761 misc.save_data_xls(alldata, "amazon_%s.xls" % lan)762 misc.save_data_csv(alldata, "amazon_%s.csv" % lan)763 return ct, ct1764 # print product.asin_id,amzproduct.editorial_review765 # newdata[product.asin_id] = amzproduct.editorial_review766 # with open('./detail.txt', 'a') as f:767 # f.write(json_encode(newdata, indent=4))768 # ct +=1769 alldata = []770 allct = 0771 allct1 = 1772 for lan in amazon_dict:773 ct, ct1 = _GetProducts(lan)774 allct += ct775 allct1 += ct1776 alldata.append(("<%s>ä¸è½½äº%s/%s" % (lan, ct1, ct)))777 _CTime.PrintSpace("ä¸è½½å®æ¯")778 print "å
±å®æ:%s/%s" % (allct1, allct)779 print "\n".join(alldata)780def DownNodeIDs():781 def _GetProduct(lan):782 dn = {783 "us": 2,784 }785 market_id = dn[lan]786 products = SysMarketProductMap.objects.filter(market_id=market_id).values("item_id").all()787 for product in products:788 yield product["item_id"]789 amazon_dict = {}790 # amazon_dict["it"] = AmazonAPI("AKIAJEHY7PZI24XSZ7GA", "al5M1gd5UVbDyjihIzF6kn/47ok7M4S29546vqRu",791 # "oucher308-22", region="IT", Timeout=5, MaxQPS=1)oucher3@163.com792 #amazon_dict["it"] = AmazonAPI("AKIAJEHY7PZI24XSZ7GA", "al5M1gd5UVbDyjihIzF6kn/47ok7M4S29546vqRu", "oucher308-22",793 # region="IT", Timeout=5, MaxQPS=1)794 #amazon_dict["it"] = AmazonAPI("AKIAJGLFK6P37DXE4GOQ", "CKJUOQlcQ3Ar8wH9lIBKtePt5WxzWspePL3c/QsD", "oucher309-21",795 # region="IT", Timeout=5, MaxQPS=1)796 # amazon_dict["it"] = AmazonAPI("AKIAIADF3FDWG2EADWCA", "hCenHr3RzMYRsNmbXSiyywVknBS58AhN/llkujT", "oucher309-21",797 # region="IT", Timeout=5, MaxQPS=1)#43800052@qq.com798#########################################799 # amazon_dict["it"] = AmazonAPI("AKIAIGEGIKE7YTTY4GRQ", "ccF+1I42wYE5bs2TUmi+ghJXcSTqoS3Nn6lgbjr6", "oucher33-21",800 # region="IT", Timeout=5, MaxQPS=1)801 # amazon_dict["es"] = AmazonAPI("AKIAJGLFK6P37DXE4GOQ", "CKJUOQlcQ3Ar8wH9lIBKtePt5WxzWspePL3c/QsD", "oucher0e-21",802 # region="ES", Timeout=5, MaxQPS=1)803 # # amazon_dict["fr"] = AmazonAPI("AKIAJGLFK6P37DXE4GOQ", "CKJUOQlcQ3Ar8wH9lIBKtePt5WxzWspePL3c/QsD", "oucher3-21",804 # # region="FR", Timeout=5, MaxQPS=1)805 # amazon_dict["fr"] = AmazonAPI("AKIAJTJ6H6FMQPM5TSUQ", "JuVg/qCoJsQq9y3LHQnhmHCxb4VfqVAFMEBb+gih", "oucher3-21",806 # region="FR", Timeout=5, MaxQPS=1)807 #808 # amazon_dict["us"] = AmazonAPI("AKIAJGLFK6P37DXE4GOQ", "CKJUOQlcQ3Ar8wH9lIBKtePt5WxzWspePL3c/QsD", "oucher0e-21",809 # region="US", Timeout=5, MaxQPS=1)810 # amazon_dict["us"] = AmazonAPI("AKIAIGEGIKE7YTTY4GRQ", "ccF+1I42wYE5bs2TUmi+ghJXcSTqoS3Nn6lgbjr6", "oucher0e-21",811 # region="US", Timeout=5, MaxQPS=1)812 amazon_dict["us"] = AmazonAPI("AKIAJEXGSZ6O2Y6GCVGA", "D801IPdV5loOr29fW46/nVELHtsQZaWjh1M1Z1l3", "oucher309-21",813 region="US", Timeout=5, MaxQPS=1)#43800052@qq.com814 #815 # amazon_dict["jp"] = AmazonAPI("AKIAJEHY7PZI24XSZ7GA", "al5M1gd5UVbDyjihIzF6kn/47ok7M4S29546vqRu",816 # "oucher308-22", region="JP", Timeout=5, MaxQPS=1)817 # amazon_dict["us"] = AmazonAPI("AKIAJEHY7PZI24XSZ7GA", "al5M1gd5UVbDyjihIzF6kn/47ok7M4S29546vqRu",818 # "oucher308-22", region="US", Timeout=5, MaxQPS=1)819 import socket820 socket.setdefaulttimeout(5)821 needdownparent = []822 def _GetProducts(lan):823 ct = 0824 ct1 = 0825 alldata = []826 alldata.append(["asin", "title", "featurestr", "desc", "publisher", "detail_page_url"])827 amazon = amazon_dict[lan]828 product = _GetProduct(lan)829 flag = True830 while flag:831 items = []832 for i in range(10):833 try:834 asin = product.next()835 except:836 flag = False837 break838 if asin:839 items.append(asin)840 ct += 1841 if not items:842 break843 itemstr = ",".join(items)844 _CTime.PrintSpace(",".join(("åæ<%s>"%lan, "%s/%s"%(ct, ct1), itemstr,str(len(needdownparent)))))845 data = []846 import time847 time.sleep(1)848 try:849 amzproducts = amazon.lookup(ItemId=itemstr, ResponseGroup="BrowseNodes", Timeout=5)850 except Exception as e:851 print e852 continue853 if type(amzproducts) == list:854 pass855 else:856 amzproducts = [amzproducts]857 for amzproduct in amzproducts:858 ct1 += 1859 asin = amzproduct.asin860 browse_nodes = amzproduct.browse_nodes861 try:862 nodeid = browse_nodes[0].id863 except:864 needdownparent.append(asin)865 continue866 # print node.id,222222222222867 # sdf868 data = [asin,nodeid ]869 alldata.append(data)870 _CTime.PrintSpace("æ§è¡ç»æ")871 misc.save_data_csv(alldata, "amazon_%s.csv" % lan)872 return ct, ct1873 alldata = []874 allct = 0875 allct1 = 1876 for lan in amazon_dict:877 ct, ct1 = _GetProducts(lan)878 allct += ct879 allct1 += ct1880 alldata.append(("<%s>ä¸è½½äº%s/%s" % (lan, ct1, ct)))881 _CTime.PrintSpace("ä¸è½½å®æ¯")882 print "å
±å®æ:%s/%s" % (allct1, allct)883 print "\n".join(alldata)884nobrandlist =None885lostasinlist =None886def DownBrandInfo():887 global lostasinlist,nobrandlist888 retrylist = []889 nobrandlist = []890 lostasinlist = []891 # def _GetProduct(lan):892 # dn = {893 # "us": 2,894 # }895 # market_id = dn[lan]896 # products = SysMarketProductMap.objects.filter(market_id=market_id).values("item_id").all()897 # for product in products:898 # yield product["item_id"]899 def _GetProduct(lan):900 for asin in amazonasintrack.objects.values_list("asin",flat=True):901 yield asin902 # for asin in retrylist:903 # yield asin904 amazon_dict = {}905 amazon_dict["us"] = AmazonAPI("AKIAJEXGSZ6O2Y6GCVGA", "D801IPdV5loOr29fW46/nVELHtsQZaWjh1M1Z1l3", "oucher309-21",906 region="US", Timeout=5, MaxQPS=1)#43800052@qq.com907 import socket908 socket.setdefaulttimeout(5)909 needdownparent = []910 def _GetProducts(lan):911 global lostasinlist,nobrandlist912 ct = 0913 ct1 = 0914 alldata = []915 alldata.append(["asin", "title", "featurestr", "desc", "publisher", "detail_page_url"])916 amazon = amazon_dict[lan]917 product = _GetProduct(lan)918 flag = True919 while flag:920 items = []921 for i in range(10):922 try:923 asin = product.next()924 except:925 flag = False926 break927 if asin:928 items.append(asin)929 ct += 1930 #print items,222222222931 if not items:932 break933 # if ct>4000:934 # break935 lostasinlist += items[:]936 itemstr = ",".join(items)937 _CTime.PrintSpace(",".join(("åæ<%s>"%lan, "%s/%s"%(ct, ct1), itemstr,str(len(retrylist)))))938 print len(lostasinlist),len(nobrandlist)939 data = []940 # import time941 # time.sleep(1)942 try:943 amzproducts = amazon.lookup(ItemId=itemstr, ResponseGroup="ItemAttributes", Timeout=5)944 except Exception as e:945 retrylist.extend(items)946 print type(e),dir(e),e.args,e.message,len(retrylist),111111111111111947 continue948 if type(amzproducts) == list:949 pass950 else:951 amzproducts = [amzproducts]952 for amzproduct in amzproducts:953 asin = amzproduct.asin954 try:955 lostasinlist.remove(asin)956 except:957 pass958 brandname = amzproduct.brand959 if not brandname:960 nobrandlist.append(asin)961 continue962 ct1 += 1963 #print asin,brandname,111111111964 # browse_nodes = amzproduct.browse_nodes965 # try:966 # nodeid = browse_nodes[0].id967 # except:968 # needdownparent.append(asin)969 # continue970 data = [asin,brandname]971 info = amazonbrand.objects.filter(asin=asin).first()972 if not info:973 info = amazonbrand()974 info.asin = asin975 info.brand = brandname976 info.save()977 # with open("amazon_brand.csv","a") as f:978 # f.write("%s,%s\n"%(asin,brandname))979 #alldata.append(data)980 _CTime.PrintSpace("æ§è¡ç»æ")981 #misc.save_data_csv(alldata, "amazon_%s.csv" % lan)982 return ct, ct1983 alldata = []984 allct = 0985 allct1 = 1986 for lan in amazon_dict:987 ct, ct1 = _GetProducts(lan)988 allct += ct989 allct1 += ct1990 alldata.append(("<%s>ä¸è½½äº%s/%s" % (lan, ct1, ct)))991 nobrandlist = list(set(nobrandlist))992 lostasinlist = list(set(lostasinlist))993 with open("nobrandlist.csv","w") as f:994 f.write("\n".join(nobrandlist))995 with open("lostasinlist.csv","w") as f:996 f.write("\n".join(lostasinlist))997 _CTime.PrintSpace("ä¸è½½å®æ¯")998 print "å
±å®æ:%s/%s" % (allct1, allct)999 print "\n".join(alldata)1000def main():1001 #1002 lans = ["it","de","jp","es","fr"]1003 #lans = ["de", "jp", "es", "fr"]1004 #lans = ["es",]1005 for lan in lans:1006 filename = "upload/amazon/desc/amazon_%s.xls"%lan1007 alldata = ImportXls(filename)1008 #REPORTINFO.ASIN_DOWN.data[lan]=len(alldata)1009 newdata_dict = {}1010 for data in alldata:1011 newdata = {}1012 asin,title,featurestr,desc = data[:4]1013 newdata["asin"] = asin1014 newdata["title"] = title1015 newdata["feature"] = featurestr1016 newdata["description"] = desc1017 spu_id = Asin2Spu(asin)1018 if spu_id in (0,None):1019 continue1020 newdata["spu_id"] = spu_id1021 olddata = newdata_dict.get(spu_id,None)1022 if olddata:1023 if len(olddata["description"]) > len(newdata["description"]):1024 continue1025 newdata_dict[spu_id] = newdata1026 # alldata = []1027 # asin_list = CSysAsin.GetAsinList(lan)1028 # for newdata in newdata_dict.values():1029 # if newdata["asin"] not in asin_list:1030 # continue1031 # alldata.append(newdata)1032 alldata = newdata_dict.values()1033 REPORTINFO.ALL_SPU_DOWN.data[lan] = len(alldata)1034 REPORTINFO.ALL_SPU_REDUCE_NO_DESC.data[lan] = 01035 REPORTINFO.DESCLEN_LT30.data[lan] = 01036 alldata1 =[]1037 for newdata in alldata:1038 ct =len(newdata["description"])1039 if ct<30:1040 REPORTINFO.DESCLEN_LT30.data[lan] +=11041 if ct==0:1042 #REPORTINFO.ALL_SPU_REDUCE_NO_DESC.data[lan] += 11043 continue1044 alldata1.append(newdata)1045 REPORTINFO.ALL_SPU_REDUCE_NO_DESC.data[lan] = len(alldata1)1046 CSpuDesc.Bluk_Add(alldata1,lan,SOURCE.AMAZON_INFO)1047 print "\n".join(REPORTINFO.DoReport())1048def main2():1049 lan = "jp"1050 # asin_list = CSysAsin.GetAsinList(lan)1051 #1052 # market_id = GetMarketID(lan)1053 # oQuery = SysMarketProductMap.objects.filter(market=market_id)1054 # REPORTINFO.AMA_PRODUCTS_PUSHED.data["asin"] = oQuery.count()1055 # REPORTINFO.AMA_PRODUCTS_PUSHED.data["sku"] = oQuery.values("sku_id").distinct().count()1056 #1057 # sku_list = oQuery.values("sku_id")#.values_list("sku_id",flat=True)1058 # REPORTINFO.AMA_PRODUCTS_PUSHED.data["spu"] =SkuProduct.objects.filter(id__in=sku_list).values("spu_id").distinct().count()1059 #1060 # filename = "amazon_%s.xls" % lan1061 # #alldata = ImportXls(filename)1062 #1063 #1064 #1065 # #newdata_dict = {}1066 # alldata =[]1067 # skudict = {}1068 # spudict = {}1069 #1070 # for data in ImportXls(filename):1071 # asin = data[0]1072 #1073 # spu_id,sku_id = Asin2SpuSku(asin)1074 # if spu_id in (0, None):1075 # continue1076 # skudict[sku_id] = 11077 # spudict[spu_id] = 11078 #1079 # alldata.append(data)1080 #1081 # REPORTINFO.AMA_PRODUCTS_DOWNED.data["asin"] = len(alldata)1082 # REPORTINFO.AMA_PRODUCTS_DOWNED.data["sku"] = len(skudict)1083 # REPORTINFO.AMA_PRODUCTS_DOWNED.data["spu"] = len(spudict)1084 #1085 #1086 # #alldata = newdata_dict.values()1087 #1088 # skudict = {}1089 # spudict = {}1090 # alldata1 = []1091 # for newdata in alldata:1092 # asin, title, featurestr, desc = newdata[:4]1093 # ct = len(desc)1094 # if ct < 30:1095 # # REPORTINFO.DESCLEN_LT30.data[lan] += 11096 # # if ct == 0:1097 # continue1098 # spu_id,sku_id = Asin2SpuSku(asin)1099 #1100 # skudict[sku_id] = 11101 # spudict[spu_id] = 11102 #1103 # alldata1.append(newdata)1104 #1105 # REPORTINFO.AMA_PRODUCTS_REDUCE.data["asin"] = len(alldata1)1106 # REPORTINFO.AMA_PRODUCTS_REDUCE.data["sku"] = len(skudict)1107 # REPORTINFO.AMA_PRODUCTS_REDUCE.data["spu"] = len(spudict)1108 #1109 # newdata_dict = {}1110 # for data in alldata1:1111 # asin, title, featurestr, desc = data[:4]1112 # newdata = {}1113 # newdata["asin"] = asin1114 # spu_id, sku_id = Asin2SpuSku(asin)1115 # newdata["title"] = title1116 # newdata["feature"] = featurestr1117 # newdata["description"] = desc1118 # newdata["spu_id"] = spu_id1119 # olddata = newdata_dict.get(spu_id, None)1120 # if olddata:1121 # if len(olddata["description"]) > len(newdata["description"]):1122 # continue1123 #1124 # newdata_dict[spu_id]= newdata1125 #1126 # # CSpuDesc.Bluk_Add(newdata_dict.values(), lan, SOURCE.AMAZON_INFO)1127 # REPORTINFO.AMA_PRODUCTS_SAVED.num = len(newdata_dict)1128 #------------------------------------------------------------------1129 #1130 # oQuery = SkuProduct.objects.filter()1131 # REPORTINFO.ALL_PRODUCTS.num = oQuery.values("spu_id").distinct().count()1132 #1133 # SaledProducts = SkuStatistic.objects.filter(platform="Amazon").values("spu").distinct()1134 # #SaledProducts = SkuDaysStatistic.objects.values("sku").filter(total_qty__gt=0).values("sku")1135 # oQuery = oQuery.filter(id__in=SaledProducts)1136 # REPORTINFO.SALED_PRODUCTS.num = oQuery.values("spu_id").distinct().count()1137 #1138 # done_spu_list = CSpuDesc.GetDoned_Spu_List(lan)1139 # oQuery = oQuery.exclude(spu__in=done_spu_list)1140 # REPORTINFO.SALED_PRODUCTS_NOTUSED.num = oQuery.values("spu_id").distinct().count()1141 #1142 # spu_list = oQuery.values_list("spu_id",flat=True).distinct()1143 # allct = len(spu_list)1144 # alldata = []1145 # for spu_id in spu_list:1146 # #print spu_id,2222221147 # product = SpuProduct.objects.filter(id=spu_id).values("title").first()1148 # title = product["title"]1149 #1150 # ct += 11151 # if ct % 1000 == 0:1152 # _CTime.PrintSpace("%s/%s" % (ct, allct))1153 #1154 # extinfo = SpuDescriptionEn.objects.filter(spu=spu_id).values().first()1155 # if not extinfo:1156 # continue1157 # description = extinfo["description"]1158 #1159 # if len(description)<30:1160 # continue1161 #1162 # feature = extinfo["feature"]1163 # data = [spu_id,title,feature,description]1164 # alldata.append(data)1165 #1166 # REPORTINFO.SALED_PRODUCTS_DESCGT30.m_Num = len(alldata)1167 # REPORTINFO.SALED_PRODUCTS_TRAN_TIME.m_Num = len(alldata)1168 # REPORTINFO.SALED_PRODUCTS_TRAN_COUNT.m_Num = 01169 # newalldata = []1170 # transcache = {}1171 # for data in alldata:1172 # spu_id, title, feature, description = data1173 #1174 # if title not in transcache:1175 # transcache[title] = 11176 # REPORTINFO.SALED_PRODUCTS_TRAN_COUNT.m_Num += len(title)1177 # # else:1178 # # REPORTINFO.HASTRANSD_TIME.data["次æ°"] += 11179 # # REPORTINFO.HASTRANSD_TIME.data["å符"] += len(title)1180 #1181 # if feature not in transcache:1182 # transcache[feature] = 11183 # REPORTINFO.SALED_PRODUCTS_TRAN_COUNT.m_Num += len(feature)1184 # # else:1185 # # REPORTINFO.HASTRANSD_TIME.data["å符"] += len(feature)1186 # # REPORTINFO.HASTRANSD_TIME.data["次æ°"] += 11187 #1188 # if description not in transcache:1189 # transcache[description] = 11190 # REPORTINFO.SALED_PRODUCTS_TRAN_COUNT.m_Num += len(description)1191 # # else:1192 # # REPORTINFO.HASTRANSD_TIME.data["次æ°"] += 11193 # # REPORTINFO.HASTRANSD_TIME.data["å符"] += len(description)1194 #1195 #1196 # # ------------------------------------------------------------------1197 # # CSpuDesc.Bluk_Add(alldata, lan, SOURCE.AMAZON_INFO)1198 # REPORTINFO.AMA_PRODUCTS_SAVED.num = len(newdata_dict)1199 #1200 # oQuery = SkuProduct.objects.filter()1201 # REPORTINFO.ALL_PRODUCTS.num = oQuery.values("spu_id").distinct().count()1202 #1203 # # HASSTOCKProducts = SkuDaysStatistic.objects.values("sku").filter(total_qty__gt=0).values("sku")1204 # # oQuery = oQuery.filter(id__in=HASSTOCKProducts)1205 # hasstore_products = SkuWarehouseInventory.objects.values("sku_id").annotate(1206 # allct=Sum('on_hand_qty')).filter(1207 # allct__gt=1).values("sku_id")1208 # query_select = Q(purchase_ok=1) | Q(id__in=hasstore_products)1209 # allproducts = SkuProduct.objects.filter(query_select).values("id")1210 # oQuery = oQuery.filter(id__in=allproducts)1211 #1212 # REPORTINFO.HASSTOCK_PRODUCTS.num = oQuery.values("spu_id").distinct().count()1213 #1214 # done_spu_list = CSpuDesc.GetDoned_Spu_List(lan)1215 # oQuery = oQuery.exclude(spu__in=done_spu_list)1216 # REPORTINFO.HASSTOCK_PRODUCTS_NOTUSED.num = oQuery.values("spu_id").distinct().count()1217 #1218 # spu_list = oQuery.values_list("spu_id", flat=True).distinct()1219 # allct = len(spu_list)1220 # alldata = []1221 # ct = 01222 # for spu_id in spu_list:1223 # # print spu_id,2222221224 # product = SpuProduct.objects.filter(id=spu_id).values("title").first()1225 # title = product["title"]1226 #1227 # ct += 11228 # if ct % 1000 == 0:1229 # _CTime.PrintSpace("%s/%s" % (ct, allct))1230 #1231 # extinfo = SpuDescriptionEn.objects.filter(spu=spu_id).values().first()1232 # if not extinfo:1233 # continue1234 # description = extinfo["description"]1235 #1236 # if len(description) < 30:1237 # continue1238 #1239 # feature = extinfo["feature"]1240 # data = [spu_id, title, feature, description]1241 # alldata.append(data)1242 #1243 # REPORTINFO.HASSTOCK_PRODUCTS_DESCGT30.m_Num = len(alldata)1244 # REPORTINFO.HASSTOCK_PRODUCTS_TRAN_TIME.m_Num = len(alldata)1245 # REPORTINFO.HASSTOCK_PRODUCTS_TRAN_COUNT.m_Num = 01246 # newalldata = []1247 # transcache = {}1248 # for data in alldata:1249 # spu_id, title, feature, description = data1250 #1251 # if title not in transcache:1252 # transcache[title] = 11253 # REPORTINFO.HASSTOCK_PRODUCTS_TRAN_COUNT.m_Num += len(title)1254 # # else:1255 # # REPORTINFO.HASTRANSD_TIME.data["次æ°"] += 11256 # # REPORTINFO.HASTRANSD_TIME.data["å符"] += len(title)1257 #1258 # if feature not in transcache:1259 # transcache[feature] = 11260 # REPORTINFO.HASSTOCK_PRODUCTS_TRAN_COUNT.m_Num += len(feature)1261 # # else:1262 # # REPORTINFO.HASTRANSD_TIME.data["å符"] += len(feature)1263 # # REPORTINFO.HASTRANSD_TIME.data["次æ°"] += 11264 #1265 # if description not in transcache:1266 # transcache[description] = 11267 # REPORTINFO.HASSTOCK_PRODUCTS_TRAN_COUNT.m_Num += len(description)1268 # # else:1269 # # REPORTINFO.HASTRANSD_TIME.data["次æ°"] += 11270 # # REPORTINFO.HASTRANSD_TIME.data["å符"]1271 #1272 # ------------------------------------------------------------------1273 oQuery = SkuProduct.objects.filter()1274 REPORTINFO.ALL_PRODUCTS.num = oQuery.values("spu_id").distinct().count()1275 # PUSHABLEProducts = SkuDaysStatistic.objects.values("sku").filter(total_qty__gt=0).values("sku")1276 # oQuery = oQuery.filter(id__in=PUSHABLEProducts)1277 hasstore_products = SkuWarehouseInventory.objects.values("sku_id").annotate(allct=Sum('on_hand_qty')).filter(1278 allct__gt=1).values("sku_id")1279 query_select = Q(purchase_ok=1) | Q(id__in=hasstore_products)1280 #query_select = Q(id__in=hasstore_products)1281 allproducts = SkuProduct.objects.filter(query_select).values("id")1282 oQuery = oQuery.filter(id__in=allproducts)1283 REPORTINFO.PUSHABLE_PRODUCTS.num = oQuery.values("spu_id").distinct().count()1284 SaledProducts = SkuStatistic.objects.values("sku").distinct()1285 oQuery = oQuery.filter(id__in=SaledProducts)1286 REPORTINFO.PUSHABLE_PRODUCTS_NOSALE.num = oQuery.values("spu_id").distinct().count()1287 # done_spu_list = CSpuDesc.GetDoned_Spu_List(lan)1288 # oQuery = oQuery.exclude(spu__in=done_spu_list)1289 # REPORTINFO.PUSHABLE_PRODUCTS_NOTUSED.num = oQuery.values("spu_id").distinct().count()1290 spu_list = oQuery.values_list("spu_id", flat=True).distinct()1291 allct = len(spu_list)1292 alldata = []1293 ct =01294 for spu_id in spu_list:1295 # print spu_id,2222221296 product = SpuProduct.objects.filter(id=spu_id).values("title").first()1297 ct += 11298 if ct % 1000 == 0:1299 _CTime.PrintSpace("%s/%s" % (ct, allct))1300 if not product:1301 continue1302 title = product["title"]1303 extinfo = SpuDescriptionEn.objects.filter(spu=spu_id).values().first()1304 if not extinfo:1305 continue1306 description = extinfo["description"]1307 if len(description) < 30:1308 continue1309 feature = extinfo["feature"]1310 data = [spu_id, title, feature, description]1311 alldata.append(data)1312 REPORTINFO.PUSHABLE_PRODUCTS_DESCGT30.m_Num = len(alldata)1313 REPORTINFO.PUSHABLE_PRODUCTS_TRAN_TIME.m_Num = len(alldata)1314 REPORTINFO.PUSHABLE_PRODUCTS_TRAN_COUNT.m_Num = 01315 newalldata = []1316 transcache = {}1317 for data in alldata:1318 spu_id, title, feature, description = data1319 if title not in transcache:1320 transcache[title] = 11321 REPORTINFO.PUSHABLE_PRODUCTS_TRAN_COUNT.m_Num += len(title)1322 # else:1323 # REPORTINFO.HASTRANSD_TIME.data["次æ°"] += 11324 # REPORTINFO.HASTRANSD_TIME.data["å符"] += len(title)1325 if feature not in transcache:1326 transcache[feature] = 11327 REPORTINFO.PUSHABLE_PRODUCTS_TRAN_COUNT.m_Num += len(feature)1328 # else:1329 # REPORTINFO.HASTRANSD_TIME.data["å符"] += len(feature)1330 # REPORTINFO.HASTRANSD_TIME.data["次æ°"] += 11331 if description not in transcache:1332 transcache[description] = 11333 REPORTINFO.PUSHABLE_PRODUCTS_TRAN_COUNT.m_Num += len(description)1334 # else:1335 # REPORTINFO.HASTRANSD_TIME.data["次æ°"] += 11336 # REPORTINFO.HASTRANSD_TIME.data["å符"]1337 #REPORTINFO.SALED_PRODUCTS_DESCGT30m_Num = oQuery.values("spu_id").distinct().count()1338 #REPORTINFO.NewReport()1339 print "\n".join(REPORTINFO.NewReport())1340 # # alldata = filter(clearlt30desc,alldata)1341 #1342 # SALED_PRODUCTS = "ç³»ç»æåºåæééç产åspuæ°"1343 # SALED_PRODUCTS_TRAN_TIME = "éè¦ç¿»è¯çspuæ°"1344 # SALED_PRODUCTS_TRAN_COUNT = "éè¦ç¿»è¯çå符æ°"1345 #1346 # PUSHABLE_PRODUCTS = "ç³»ç»å¯ä¸æ¶ç产åspuæ°"1347 # PUSHABLE_PRODUCTS_TRAN_TIME = "éè¦ç¿»è¯çspuæ°"1348 # PUSHABLE_PRODUCTS_TRAN_TIME = "éè¦ç¿»è¯çå符æ°"1349def GetTransReport():1350 lan = "jp"1351 alldata,result = misc.get_data_xls("upload/amazon/sku_fortrans_jp.xls")1352 skulist = []1353 for data in alldata:1354 skulist.append(str(data[1].strip()))1355 #skulist = skulist_str.split(",")1356 print "æ»å
±éè¦å¤çsku-->",len(skulist)1357 # ------------------------------------------------------------------1358 oQuery = SkuProduct.objects.filter(sku__in=skulist)1359 print "ææsku-->",len(oQuery.values_list("sku",flat=True))1360 oQuery = SkuProduct.objects.filter(sku__in=skulist)1361 print "ææspu-->",oQuery.values_list("spu",flat=True).distinct().count()1362 allct = oQuery.values_list("spu",flat=True).distinct().count()1363 alldata = []1364 ct =01365 for spu_id in oQuery.values_list("spu",flat=True).distinct():1366 product = SpuProduct.objects.filter(id=spu_id).values("title").first()1367 ct += 11368 if ct % 100 == 0:1369 _CTime.PrintSpace("%s/%s" % (ct, allct))1370 if not product:1371 continue1372 title = product["title"]1373 extinfo = SpuDescriptionEn.objects.filter(spu=spu_id).values().first()1374 if extinfo:1375 feature = extinfo["feature"]1376 description = extinfo["description"]1377 else:1378 feature = ""1379 description = ""1380 # if len(description) < 30:1381 # continue1382 data = [spu_id, title, feature, description]1383 alldata.append(data)1384 transcache = {}1385 for data in alldata:1386 spu_id, title, feature, description = data1387 if title not in transcache:1388 transcache[title] = len(title)1389 if feature not in transcache:1390 transcache[feature] = len(feature)1391 if description not in transcache:1392 transcache[description] = len(description)1393 print "éè¦ç¿»è¯å符é¿åº¦--->",sum(transcache.values())1394 # oldinfo_list = ReadCache("de")1395 # _CTime.PrintSpace("读å以åä¸æ¶ç产åä¿¡æ¯")1396 # transcache = {}1397 # for data in alldata:1398 # spu_id, title, feature, description = data1399 #1400 # if title not in transcache and title not in oldinfo_list:1401 # transcache[title] = len(title)1402 #1403 # if feature not in transcache and feature not in oldinfo_list:1404 # transcache[feature] = len(feature)1405 #1406 # if description not in transcache and description not in oldinfo_list:1407 # transcache[description] = len(description)1408 #1409 #1410 # print "使ç¨åæ¥çä¸æ¶äº§ååéè¦ç¿»è¯å符é¿åº¦--->",sum(transcache.values())1411def OutTrans(lan):1412 alldata,result = misc.get_data_xls("upload/amazon/sku_fortrans_%s.xls"%lan)1413 skulist = []1414 for data in alldata:1415 skulist.append(str(data[1].strip()))1416 print "æ»å
±éè¦å¤çsku-->",len(skulist)1417 # ------------------------------------------------------------------1418 oQuery = SkuProduct.objects.filter(sku__in=skulist)1419 print "ææsku-->",len(oQuery.values_list("sku",flat=True))1420 oQuery = SkuProduct.objects.filter(sku__in=skulist)1421 print "ææspu-->",oQuery.values_list("spu",flat=True).distinct().count()1422 allct = oQuery.values_list("spu",flat=True).distinct().count()1423 alldata = [1424 [ "spu","æ§æ é¢","æ§æè¦","æ§æè¿°","æ°æ é¢","æ°æè¦","æ°æè¿°"]1425 ]1426 ct =01427 for spu_id in oQuery.values_list("spu",flat=True).distinct():1428 product = SpuProduct.objects.filter(id=spu_id).values("title","spu").first()1429 spucode = product["spu"]1430 ct += 11431 if ct % 100 == 0:1432 _CTime.PrintSpace("%s/%s" % (ct, allct))1433 if not product:1434 continue1435 # if CSpuDesc.objects.filter(spu_id=spu_id).filter(language=lan).exists():1436 # continue1437 title = product["title"]1438 extinfo = SpuDescriptionEn.objects.filter(spu=spu_id).values().first()1439 if extinfo:1440 feature = extinfo["feature"]1441 description = extinfo["description"]1442 else:1443 feature = ""1444 description = ""1445 # if len(description) < 30:1446 # continue1447 newinfo = CSpuDesc.objects.filter(spu_id=spu_id).filter(language=lan).first()1448 if newinfo:1449 data = [spucode, title, feature, description,newinfo.title,newinfo.feature,newinfo.description]1450 else:1451 data = [spucode, title, feature, description, "","",""]1452 alldata.append(data)1453 misc.save_data_xls(alldata,"upload/amazon/out%s.xls"%lan)1454def DoTrans(lan):1455 alldata,result = misc.get_data_xls("upload/amazon/sku_fortrans_%s.xls"%lan)1456 skulist = []1457 for data in alldata:1458 skulist.append(str(data[1].strip()))1459 print "æ»å
±éè¦å¤çsku-->",len(skulist)1460 # ------------------------------------------------------------------1461 oQuery = SkuProduct.objects.filter(sku__in=skulist)1462 print "ææsku-->",len(oQuery.values_list("sku",flat=True))1463 oQuery = SkuProduct.objects.filter(sku__in=skulist)1464 print "ææspu-->",oQuery.values_list("spu",flat=True).distinct().count()1465 allct = oQuery.values_list("spu",flat=True).distinct().count()1466 alldata = []1467 ct =01468 for spu_id in oQuery.values_list("spu",flat=True).distinct():1469 product = SpuProduct.objects.filter(id=spu_id).values("title").first()1470 ct += 11471 if ct % 100 == 0:1472 _CTime.PrintSpace("%s/%s" % (ct, allct))1473 if not product:1474 continue1475 if CSpuDesc.objects.filter(spu_id=spu_id).filter(language=lan).exists():1476 continue1477 title = product["title"]1478 extinfo = SpuDescriptionEn.objects.filter(spu=spu_id).values().first()1479 if extinfo:1480 feature = extinfo["feature"]1481 description = extinfo["description"]1482 else:1483 feature = ""1484 description = ""1485 # if len(description) < 30:1486 # continue1487 data = [spu_id, title, feature, description]1488 alldata.append(data)1489 #print data1490 transcache = {}1491 #savedata = []1492 #sdfsadf1493 ct =01494 allct = len(alldata)1495 #print alldata,231231231496 for data in alldata:1497 spu_id, title, feature, description = data1498 #print title1499 #continue1500 try:1501 features = eval(feature)1502 features_lens = len(features)1503 except:1504 features = [feature,]1505 features_lens = 11506 olddata = {1507 "title":title,1508 #"feature": feature,1509 "description": description,1510 }1511 for i in range(features_lens):1512 olddata["feature%s"%i] = features[i]1513 newdata = {}1514 #continue1515 #print olddata1516 for key in olddata:1517 value = olddata[key]1518 if value not in transcache:1519 if value:1520 dn = misc.Trans(lan,texts=[value])1521 try:1522 newvalue = dn.values()[0]1523 except Exception as e:1524 misc.ColorPrint("ç¿»è¯å¤±è´¥",e,key,spu_id,value)1525 newvalue = ""1526 newvalue = newvalue.replace("</ ","</")1527 newvalue = newvalue.replace(" />", "/>")1528 if ct % 50 == 0:1529 print value ,"-->",newvalue1530 else:1531 newvalue = ""1532 transcache[value] =newvalue1533 newdata[key] = transcache[value]1534 newfeatures = []1535 for i in range(features_lens):1536 newvalue = newdata["feature%s"%i]1537 newfeatures.append(newvalue)1538 #newdata["feature"] = "'%s'"%newdata["feature"].decode("utf-8").encode("utf-8").replace("||","','")1539 newdata["feature"] = "||".join(newfeatures)1540 # if title in transcache:1541 # newdata[title] = transcache[title]1542 # title = ""1543 #1544 #1545 # if feature in transcache:1546 # newdata[feature] = transcache[feature]1547 # feature = ""1548 #1549 # if description in transcache:1550 # newdata[description] = transcache[description]1551 # description = ""1552 #1553 # texts = [title,description,feature]1554 ct += 11555 if ct % 50 == 0:1556 _CTime.PrintSpace("%s/%s" % (ct, allct))1557 #print texts,ct,22221558 # if texts:1559 # dn = misc.EN2DE(texts=texts)1560 #1561 # # try:1562 # if title:1563 # transcache[title] = newdata["title"] = dn[title]1564 #1565 # if description:1566 # transcache[description] = newdata["description"] = dn[description]1567 # if feature:1568 # transcache[feature] = newdata["feature"] = dn[feature]1569 # # except Exception as e:1570 # print e,91111571 # continue1572 newdata["spu_id"] = spu_id1573 #savedata.append(newdata)1574 CSpuDesc.Bluk_Add([newdata], lan, SOURCE.AMAZON_US_INFO_TRANS)1575 #CSpuDesc.Bluk_Add(savedata, lan, SOURCE.AMAZON_US_INFO_TRANS)1576# g_OnTrans = False1577# def DoTransViews(lan,celery_task=None):1578# import time1579# class _task(object):1580# def __init__(self,task,spuct,skuct):1581# self.m_Task = task1582# self.m_SpuCt = spuct1583# self.m_SkuCt = skuct1584# self.m_Simple = ""1585#1586# def update_state(self,curct,failct,allcharct=0):1587# if self.m_Task:1588# self.m_Task.update_state(state="PROGRESS",meta={1589# "curct": curct,1590# "failct": failct,1591# "spuct": self.m_SpuCt,1592# "skuct": self.m_SkuCt,1593# "simple": self.m_Simple,1594# "allcharct":allcharct1595# })1596#1597# curtask = _task(celery_task, 1000, 4000)1598# for i in range(10000):1599# if i %100==0:1600# curtask.m_Simple = "%så·²ç»åå¤å¥½äº"1601# curtask.update_state(i,i/10,i*10)1602# yield i1603# time.sleep(1)1604g_OnTrans = False1605def DoTransViews(lan,celery_task=None):1606 class _task(object):1607 def __init__(self,task,spuct,transct):1608 self.m_Task = task1609 self.m_SpuCt = spuct1610 self.m_transct = transct1611 self.m_Simple = ""1612 def update_state(self,curct,failct,allcharct=0):1613 if self.m_Task:1614 self.m_Task.update_state(state="PROGRESS",meta={1615 "curct": curct,1616 "failct": failct,1617 "spuct": self.m_SpuCt,1618 "transct": self.m_transct,1619 "simple": self.m_Simple,1620 "allcharct":allcharct1621 })1622 #if celery_task==None:1623 #celery_task.update_state()1624 alldata,result = misc.get_data_xls("upload/amazon/sku_fortrans_%s.xls"%lan)1625 skulist = []1626 for data in alldata:1627 skulist.append(str(data[1].strip()))1628 print "æ»å
±éè¦å¤çsku-->%s"%len(skulist)1629 # ------------------------------------------------------------------1630 oQuery = SkuProduct.objects.filter(sku__in=skulist)1631 skuct = len(oQuery.values_list("sku",flat=True))1632 print "ææsku-->%s"%skuct1633 oQuery = SkuProduct.objects.filter(sku__in=skulist)1634 spuct = oQuery.values_list("spu",flat=True).distinct().count()1635 print "ææspu-->%s"% spuct1636 allct = oQuery.values_list("spu",flat=True).distinct().count()1637 alldata = []1638 ct =01639 for spu_id in oQuery.values_list("spu",flat=True).distinct():1640 product = SpuProduct.objects.filter(id=spu_id).values("title").first()1641 ct += 11642 if ct % 100 == 0:1643 print "%s/%s" % (ct, allct)1644 if not product:1645 continue1646 if CSpuDesc.objects.filter(spu_id=spu_id).filter(language=lan).exists():1647 continue1648 title = product["title"]1649 extinfo = SpuDescriptionEn.objects.filter(spu=spu_id).values().first()1650 if extinfo:1651 feature = extinfo["feature"]1652 description = extinfo["description"]1653 else:1654 feature = ""1655 description = ""1656 data = [spu_id, title, feature, description]1657 alldata.append(data)1658 transcache = {}1659 print "å®é
éè¦ç¿»è¯spuæ°%s"%len(alldata)1660 curtask = _task(celery_task, spuct, len(alldata))1661 ct =01662 failct = 01663 allct = len(alldata)1664 allcharct = 01665 for data in alldata:1666 spu_id, title, feature, description = data1667 try:1668 features = eval(feature)1669 features_lens = len(features)1670 except:1671 features = [feature,]1672 features_lens = 11673 olddata = {1674 "title":title,1675 #"feature": feature,1676 "description": description,1677 }1678 for i in range(features_lens):1679 olddata["feature%s"%i] = features[i]1680 newdata = {}1681 #continue1682 #print olddata1683 for key in olddata:1684 value = olddata[key]1685 if value not in transcache:1686 if value:1687 dn = misc.Trans(lan,texts=[value])1688 allcharct += len(value)1689 try:1690 newvalue = dn.values()[0]1691 except Exception as e:1692 failct += 11693 misc.ColorPrint("ç¿»è¯å¤±è´¥",e,key,spu_id,value)1694 newvalue = ""1695 newvalue = newvalue.replace("</ ","</")1696 newvalue = newvalue.replace(" />", "/>")1697 if ct % 50 == 0:1698 print "%s-->%s"%(value,newvalue)1699 else:1700 newvalue = ""1701 transcache[value] =newvalue1702 newdata[key] = transcache[value]1703 newfeatures = []1704 for i in range(features_lens):1705 newvalue = newdata["feature%s"%i]1706 newfeatures.append(newvalue)1707 newdata["feature"] = "||".join(newfeatures)1708 ct += 11709 if ct % 50 == 1:1710 curtask.m_Simple = newdata["title"]1711 print "%s/%s" % (ct, allct)1712 curtask.update_state(ct,failct,allcharct)1713 newdata["spu_id"] = spu_id1714 if newdata["title"]=="":#æ é¢ç¿»è¯å¤±è´¥ç®ç¿»è¯å¤±è´¥1715 continue1716 CSpuDesc.Bluk_Add([newdata], lan, SOURCE.AMAZON_US_INFO_TRANS)1717 print "æ»å
±ç¿»è¯å符%s"%allcharct1718 return {1719 "curct": ct,1720 "failct": failct,1721 "spuct": curtask.m_SpuCt,1722 "transct": curtask.m_transct,1723 "simple": curtask.m_Simple,1724 "allcharct":allcharct1725 }1726def ReadCache(lan):1727 dn = {}1728 products = CSpuDesc.objects.filter(language=lan).values()1729 for product in products:1730 spu_id = product["spu_id"]1731 description = product["description"]1732 feature = product["feature"]1733 title = product["title"]1734 product = SpuProduct.objects.filter(id=spu_id).values("title").first()1735 if not product:1736 continue1737 trans_title = product["title"]1738 dn[title] = trans_title1739 extinfo = SpuDescriptionEn.objects.filter(spu=spu_id).values().first()1740 if extinfo:1741 trans_feature = extinfo["feature"]1742 trans_description = extinfo["description"]1743 dn[title] = trans_title1744 dn[feature] = trans_feature1745 dn[description] = trans_description...
0008_auto_20220521_2138.py
Source:0008_auto_20220521_2138.py
1# Generated by Django 3.2.8 on 2022-05-21 21:382from django.conf import settings3from django.db import migrations, models4import django.db.models.deletion5class Migration(migrations.Migration):6 dependencies = [7 ('user', '0007_alter_dailyinfo_daily_time'),8 ]9 operations = [10 migrations.AddField(11 model_name='reportinfo',12 name='result',13 field=models.IntegerField(choices=[(0, 'æªè¿è§'), (1, 'è¿è§')], default=0, verbose_name='å¤çç»æ'),14 ),15 migrations.AlterField(16 model_name='reportinfo',17 name='adminstrate',18 field=models.ForeignKey(db_column='adminstrate', null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='å¤ç人'),19 ),20 migrations.AlterField(21 model_name='reportinfo',22 name='imformer',23 field=models.ForeignKey(db_column='imformer', on_delete=django.db.models.deletion.CASCADE, related_name='imformer_id', to=settings.AUTH_USER_MODEL, verbose_name='举æ¥äºº'),24 ),25 migrations.AlterField(26 model_name='reportinfo',27 name='isdealt',28 field=models.BooleanField(default=False, verbose_name='æ¯å¦å¤ç'),29 ),30 migrations.AlterField(31 model_name='reportinfo',32 name='range',33 field=models.CharField(max_length=20, verbose_name='书ç±è¿è§é¡µèå´'),34 ),35 migrations.AlterField(36 model_name='reportinfo',37 name='reported',38 field=models.ForeignKey(db_column='reported', on_delete=django.db.models.deletion.CASCADE, related_name='reported_id', to=settings.AUTH_USER_MODEL, verbose_name='被举æ¥äºº'),39 ),40 migrations.AlterField(41 model_name='reportinfo',42 name='reportid',43 field=models.IntegerField(db_column='reportid', primary_key=True, serialize=False, verbose_name='举æ¥ç¼å·'),44 ),...
pytest.py
Source:pytest.py
1import os, shutil2from subprocess import check_output3class PythonTesting:4 def __init__(self, pyfile):5 self.pyfile = "test.py"6 shutil.copyfile(pyfile, self.pyfile)7 #print("Copy successfully")8 def Test(self, inputpath, outputpath):9 result = os.popen("coverage run " + self.pyfile + " < \"" + inputpath + "\" > \"" + outputpath + "\"")10 res = result.read()11 for line in res.splitlines():12 #print(line)13 pass14 #print("coverage run " + pyfilename + " < " + inputpath + " > " + outputpath)15 Report = check_output("coverage report -m "+self.pyfile, shell=True).decode()16 ReportList = Report.split("\n") # Analyze information in the Report information17 ReportInfo = ReportList[2].split(" ")18 ReportInfo = [x.replace('\r', '') for x in ReportInfo if x != '']19 ReportInfo = [x.replace(',', '') for x in ReportInfo]20 Totlength = int(ReportInfo[1])21 MissingLine = []22 CoverLine = []23 if len(ReportInfo) > 4:24 for i in range(4, len(ReportInfo)):25 if '-' in ReportInfo[i]:26 RInfoList = ReportInfo[i].split('-')27 L = RInfoList[0]28 R = RInfoList[1]29 for i in range(int(L), int(R) + 1):30 MissingLine.append(i)31 else:32 MissingLine.append(int(ReportInfo[i]))33 for i in range(1,Totlength+1):34 if i not in MissingLine:35 CoverLine.append(i)36 return CoverLine37 def Delete(self):38 os.remove(self.pyfile)...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!