Best Python code snippet using slash
api_views.py
Source:api_views.py
1from django.db.models import Max, Q, F2from django.http import JsonResponse3from django.utils import timezone4from rest_framework import generics5from rest_framework.response import Response6from rest_framework.views import APIView7from accounts.models import SubUser8from .serializers import *9# Create your views here.10# ìí ì ì²´ ëª©ë¡ ë¦¬ì¤í¸11class MovieList(generics.ListAPIView):12 """13 ì ì²´ ìí 목ë¡ì
ëë¤14 ---15 - id : ìíì ê³ ì ID16 - name : ìíì ì´ë¦17 - horizontal_image_path : ìí ê°ë¡ ì´ë¯¸ì§ ê²½ë¡18 - vetical_image : ìí ì¸ë¡ ì´ë¯¸ì§(ì¶í ë³ê²½ìì )19 """20 queryset = Movie.objects.all()21 serializer_class = MovieSerializer22class HomePage(generics.ListAPIView):23 """24 맨ì²ì ííì´ì§ íë©´ì
ëë¤25 ---26 - 맨ì²ì ëì¤ë ìíë 맨ìì í¬ê² ë¤ì´ê° ìí ì
ëë¤.27 í¤ëì28 - Authorization : Token í í° ê°29 - subuserid : íë¡íê³ì ì ID30 를 ì
ë ¥í´ ì£¼ì¸ì (subuseridë ì¸ëë°(_)ê° ììµëë¤)31 """32 serializer_class = HomePageSerializer33 def get_queryset(self):34 # ëë¤íê² ìí 1ê°ë¥¼ ê°ì ¸ì¤ê¸° ìí¨35 max_id = Movie.objects.all().aggregate(max_id=Max('id'))['max_id']36 while True:37 pk = random.randint(1, max_id)38 # ëë¤ì¼ë¡ ì íí ìí 1í¸39 queryset = Movie.objects.filter(pk=pk)40 if queryset:41 break42 return queryset43 def get_serializer_context(self):44 sub_user_id = self.request.META['HTTP_SUBUSERID']45 context = super().get_serializer_context()46 context['sub_user_id'] = sub_user_id47 return context48# ìí íì ë르면 ëì¤ë íë©´ì íìí ìíë¤ì 목ë¡49class GenreSelectBefore(generics.ListAPIView):50 """51 ìí íì ë르면 ëì¤ë íë©´ì ë°ì´í°ë¥¼ ì ë¬íë ë·° ì
ëë¤52 ---53 í¤ëì54 - Authorization : Token í í° ê°55 - subuserid : íë¡íê³ì ì ID56 를 ì
ë ¥í´ ì£¼ì¸ì (subuseridë ì¸ëë°(_)ê° ììµëë¤)57 맨 ì²ì ëì¤ë ìí 1ê°ë 맨ìì í¬ê² ë±ë¡ëë ìí ì
ëë¤58 - id : ìíì id59 - name : ìíì ì´ë¦60 - horizontal_image_path : ê°ë¡ ì´ë¯¸ì§ì path61 - vertical_image : ì¸ë¡ ì´ë¯¸ì§ íì¼62 """63 serializer_class = GenreSelectBeforeSerializer64 def get_queryset(self):65 # ëë¤íê² ìí 1ê°ë¥¼ ê°ì ¸ì¤ê¸° ìí¨66 max_id = Movie.objects.all().aggregate(max_id=Max('id'))['max_id']67 while True:68 pk = random.randint(1, max_id)69 # ëë¤ì¼ë¡ ì íí ìí 1í¸70 queryset = Movie.objects.filter(pk=pk)71 if queryset:72 break73 return queryset74 def get_serializer_context(self):75 sub_user_id = self.request.META['HTTP_SUBUSERID']76 genre_list = ['íêµ ìí', 'ì¸êµ ìí', 'ì´ë¦°ì´', 'ê°ì¡±', 'ì¡ì
', 'ì¤ë¦´ë¬', 'SF',77 'ííì§', 'ë²ì£', 'í¸ë¬', 'ë¤íë©í°ë¦¬', 'ë¡ë§¨ì¤', 'ì½ë¯¸ë', 'ì ë', 'ì¤ë¦¬ì§ë']78 context = super().get_serializer_context()79 context['genre_list'] = genre_list80 context['sub_user_id'] = sub_user_id81 return context82# ì±ì ìí ë·°83class PreviewCellList(generics.ListAPIView):84 """85 ì±ì ìí í리뷰ì
APIì
ëë¤86 ---87 ```88 GET ì¼ë¡ ìì² íìë©´ ë©ëë¤89 리í´ê° :90 - id : ìíì ê³ ì ID91 - name : ìí ì´ë¦92 - circle_image : ìíì ìí ì´ë¯¸ì§93 - logo_image_path : ìíì ë¡ê³ ì´ë¯¸ì§ path94 - video_file : ìí íì¼95 - vertical_sample_video_file : ìíì ì¸ë¡ ìí ìì96 ```97 """98 serializer_class = PreviewCellListSerializer99 def get_queryset(self):100 queryset = Movie.objects.all().order_by('?')[:10]101 return queryset102# ìí ë±ë¡103# class MovieCerate(generics.CreateAPIView):104# """105# ìí ë±ë¡ API ì
ëë¤106#107# ---108# - name : ìí ì´ë¦109# - production_date : ìí ê°ë´ ë ì§110# - uploaded_date : ìí ë±ë¡(ì
ë¡ë) ë ì§111# - synopsis : ìí ì¤ê±°ë¦¬112# - running_time : ìí ë¬ëíì113# - view_count : ìí ì¡°íì114# - logo_image_path : ìí ë¡ê³ ì´ë¯¸ì§ ê²½ë¡115# - horizontal_image_path : ìí ê°ë¡ ì´ë¯¸ì§ ê²½ë¡116# - degree : ìí ë±ê¸ (Ex.ì²ìë
ê´ëë¶ê°, 15ì¸ ë±ë±)117# - directors : ìí ê°ë
118# - actors : ë°°ì°119# - feature : ìí í¹ì§(Ex.í¥ë¯¸ì§ì§)120# - author : ê°ë³¸ê°121# - genre : ì¥ë¥´122#123# """124# queryset = Movie.objects.all()125# serializer_class = MovieSerializer126# ìí ì¥ë¥´ 리ì¤í¸127class GenreList(generics.ListAPIView):128 """129 ìí ì¥ë¥´ 리ì¤í¸ì
ëë¤130 ---131 í¤ëì132 - Authorization : Token í í° ê°133 를 ì
ë ¥í´ ì£¼ì¸ì134 - id : ìí ì¥ë¥´ ID135 - name : ìí ì¥ë¥´136 137 """138 queryset = Genre.objects.all()139 serializer_class = GenreListSerializer140# ì¥ë¥´ë³ ìí 리ì¤í¸ë¥¼ ì ì²´ë¡ ë¿ë ¤ì£¼ê¸°141class MovieListFirstGenre(generics.ListAPIView):142 """143 ì¥ë¥´ë³ ìí 리ì¤í¸ ì
ëë¤144 ---145 - ìì²í ë movie/genre/'ì¹´í
ê³ ë¦¬ ëª
'/list/ë¡ ìì²íìë©´ ë©ëë¤146 - Ex) movie/genre/ì¡ì
/list/147 - Ex) movie/genre/ì¤ë¦´ë¬/list/148 - name : ìí ì´ë¦149 - logo_image_path : ë¡ê³ ì´ë¯¸ì§ì ê²½ë¡150 - horizontal_image_path : ê°ë¡ ì´ë¯¸ì§ ê²½ë¡151 - vertical_image : ì¸ë¡ ì´ë¯¸ì§(ì°¨í ë³ê²½ ìì )152 """153 serializer_class = MovieListSerializer154 def get_queryset(self):155 if 'kind' in self.kwargs:156 kind = self.kwargs['kind']157 else:158 kind = None159 sub_user_id = self.request.META['HTTP_SUBUSERID']160 queryset = Movie.objects.filter(genre__name__icontains=kind).exclude(like__sub_user_id=sub_user_id,161 like__like_or_dislike=2).distinct()[:18]162 return queryset163# í´ë¹ ì ì ì ì° ìí 목ë¡164# ì ì ë³ ì°ëª©ë¡ ìí 리ì¤í¸165class MarkedList(generics.ListAPIView):166 """167 ì ì ë³ ì° ëª©ë¡ ìí 리ì¤í¸ ì
ëë¤168 ---169 í¤ëì170 - Authorization : Token í í° ê°171 - subuserid : íë¡íê³ì ì ID172 를 ì
ë ¥í´ ì£¼ì¸ì (subuseridë ì¸ëë°(_)ê° ììµëë¤)173 - ìì²í ë "/movies/my_list" ë¡ ìì²íìë©´ ë©ëë¤174 - Ex) /movies/my_list/175 - id : ìíì ê³ ì ID ê°176 - name : ìí ì´ë¦177 - horizontal_image_path : ê°ë¡ ì´ë¯¸ì§ ê²½ë¡178 - vertical_image : ì¸ë¡ ì´ë¯¸ì§(ì°¨í ë³ê²½ ìì )179 """180 serializer_class = MarkedListSerializer181 def get_queryset(self):182 sub_user_id = self.request.META['HTTP_SUBUSERID']183 queryset = Movie.objects.filter(like__sub_user=sub_user_id, like__marked=True)184 return queryset185# ìí ìì¸ì ë³´ ë·°186class MovieDetail(generics.RetrieveAPIView):187 """188 ìí ëí
ì¼ íì´ì§ url ì
ëë¤.189 ---190 í¤ëì191 - Authorization : Token í í° ê°192 - subuserid : íë¡íê³ì ì ID193 를 ì
ë ¥í´ ì£¼ì¸ì (subuseridë ì¸ëë°(_)ê° ììµëë¤)194 - ìì²í ë "/movie/'ìí IDê°'" ì¼ë¡ ìì²íìë©´ ë©ëë¤.195 - Ex) /movie/2196 - Ex) /movie/7197 - id : ìíì ê³ ì ID ê°198 - name : ìí ì´ë¦199 - video_file : ë¹ëì¤íì¼200 - sample_video_file : ìí ë¹ëì¤ íì¼201 - production_date : ìí ê°ë´ ë ì§202 - uploaded_date : ìí ë±ë¡(ì
ë¡ë) ë ì§203 - synopsis : ìí ì¤ê±°ë¦¬204 - running_time : ìí ë¬ëíì205 - view_count : ìí ì¡°íì206 - logo_image_path : ë¡ê³ ì´ë¯¸ì§ì ê²½ë¡207 - horizontal_image_path : ê°ë¡ ì´ë¯¸ì§ ê²½ë¡208 - vertical_image : ì¸ë¡ ì´ë¯¸ì§(ì°¨í ë³ê²½ ìì )209 - circle_image : ìí ì´ë¯¸ì§(ì°¨í ë³ê²½ìì )210 - degree : ìí ë±ê¸ (Ex.ì²ìë
ê´ëë¶ê°, 15ì¸ ë±ë±)211 - directors : ìí ê°ë
212 - actors : ë°°ì°213 - feature : ìí í¹ì§(Ex.í¥ë¯¸ì§ì§)214 - author : ê°ë³¸ê°215 - genre : ì¥ë¥´216 - marked : ì ì ê° ì°í ìíì¸217 - like : ì ì ê° ì¢ììí ìíì¸ì§, ì«ì´ìí ìíì¸ì§ (íê°ìí¨ = 0 , ì¢ìì = 1, ì«ì´ì = 2)218 - total_minute : ìê°ì ë¶ì¼ë¡ íì°í ê°219 - match_rate : ì¼ì¹ì¨(íì¬ 70~97 ëë¤, ì¶í ì
ë°ì´í¸ ìì )220 - to_be_continue : ì ì ê° ì¬ìì ë©ì¶ìê°221 - remaining_time : running_time - to_be_continue222 - can_i_store : ì ì¥ê°ë¥ ì¬ë¶223 """224 queryset = Movie.objects.all()225 serializer_class = MovieDetailSerializer226 def get_serializer_context(self):227 context = super().get_serializer_context()228 sub_user_id = self.request.META['HTTP_SUBUSERID']229 context['sub_user_id'] = sub_user_id230 return context231# ìì²ì¤ì¸ ëª©ë¡ ë·°232class FollowUpMovies(generics.ListAPIView):233 """234 ë©ì¸íë©´ìì ë³´ì¬ì¤ ìì² ì¤ì¸ ìí리ì¤í¸ url ì
ëë¤.235 ---236 - ìì²í ë /movie/followup/ ì¼ë¡ ìì²íìë©´ ë©ëë¤.237 - í¤ëì subuserid : ìë¸ì ì id ê°(int) ì ë£ì´ì£¼ì
ì¼ í©ëë¤.238 - id : ìíì ê³ ì ID ê°239 - name : ìí ì´ë¦240 - video_file : ë¹ëì¤íì¼241 - logo_image_path : ë¡ê³ ì´ë¯¸ì§ì ê²½ë¡242 - horizontal_image_path : ê°ë¡ ì´ë¯¸ì§ ê²½ë¡243 - vertical_image : ì¸ë¡ ì´ë¯¸ì§(ì°¨í ë³ê²½ ìì )244 - to_be_continue : ì ì ê° ì¬ìì ë©ì¶ìê°245 """246 # queryset = Movie.objects.all()247 serializer_class = MovieContinueSerializer248 def get_queryset(self):249 sub_user_id = self.request.META['HTTP_SUBUSERID']250 queryset = MovieContinue.objects.filter(sub_user_id=sub_user_id)251 return queryset252# ì¥ë¥´ë³ ìí 리ì¤í¸253class MovieListByGenre(APIView):254 """255 ìí íì´ì§ìì ì¥ë¥´ë¥¼ ì ííë©´ ë³´ì¬ì¤ ìí리ì¤í¸ url ì
ëë¤.256 ---257 - ìì²í ë /movies/list_by_genre/'genre_key'/ ë¡ ìì²íìë©´ ë©ëë¤.258 - Ex) /movies/list_by_genre/ì¡ì
/259 - Ex) /movies/list_by_genre/ì¸êµ/260 genre_key ì¢
ë¥261 'íêµ', '미êµ', 'ì´ë¦°ì´', 'ì¡ì
', 'ì¤ë¦´ë¬', 'sf', 'ííì§',262 'ë²ì£', 'í¸ë¬', 'ë¤í', 'ë¡ë§¨ì¤', 'ì½ë¯¸ë', 'ì ë', 'ì¸êµ',263 - í¤ëì subuserid : ìë¸ì ì id ê°(int) ì ë£ì´ì£¼ì
ì¼ í©ëë¤.264 - id : ìíì ê³ ì ID ê°265 - name : ìí ì´ë¦266 - sample_video_file : 미리보기 ë¹ëì¤íì¼267 - logo_image_path : ë¡ê³ ì´ë¯¸ì§ì ê²½ë¡268 - horizontal_image_path : ê°ë¡ ì´ë¯¸ì§ ê²½ë¡269 - vertical_image : ì¸ë¡ ì´ë¯¸ì§270 """271 def get(self, request, format=None, **kwargs):272 vertical_genre = self.kwargs['genre_key']273 sub_user = self.request.META['HTTP_SUBUSERID']274 genre_list = [275 'íêµ',276 '미êµ',277 'ì´ë¦°ì´',278 'ì¡ì
',279 'ì¤ë¦´ë¬',280 'sf',281 'ííì§',282 'ë²ì£',283 'í¸ë¬',284 'ë¤í',285 'ë¡ë§¨ì¤',286 'ì½ë¯¸ë',287 'ì ë',288 'ì¸êµ',289 ]290 context = {}291 vertical_q = Q(genre__name__icontains=vertical_genre)292 for genre in genre_list:293 if vertical_genre == genre:294 continue295 else:296 horizontal_q = Q(genre__name__icontains=genre)297 if vertical_genre == 'ì¸êµ':298 queryset = Movie.objects.exclude(like__sub_user=sub_user, like__like_or_dislike=2) \299 .exclude(genre__name__icontains='íêµ').filter(horizontal_q)300 else:301 if genre == 'ì¸êµ':302 queryset = Movie.objects.exclude(like__sub_user=sub_user, like__like_or_dislike=2) \303 .exclude(genre__name__icontains='íêµ').filter(vertical_q)304 else:305 queryset = Movie.objects.exclude(like__sub_user=sub_user, like__like_or_dislike=2) \306 .filter(vertical_q).filter(horizontal_q)307 if queryset.count() < 3:308 continue309 serializer_data = MovieListByGenreSerializer(queryset.distinct(), many=True).data310 random.shuffle(serializer_data)311 context[f'{genre}'] = serializer_data312 if vertical_genre == 'ì¸êµ':313 vertical_queryset = Movie.objects.exclude(like__sub_user=1, like__like_or_dislike=2) \314 .exclude(genre__name__icontains='íêµ').distinct()315 else:316 vertical_queryset = Movie.objects.exclude(like__sub_user=1, like__like_or_dislike=2) \317 .filter(vertical_q).distinct()318 vertical_serializer_data = MovieListByGenreSerializer(vertical_queryset.order_by('?'), many=True).data319 random.shuffle(vertical_serializer_data)320 context[f'{vertical_genre}'] = vertical_serializer_data321 return Response(context)322# íë¡í ìì±í ì¢ìíë ìí 3ê° ì íí기(무ìì 50ê°) -> ì±ë¥ ê°ì íì323class RecommendMovieAfterCreateSubUser(generics.ListAPIView):324 """325 íë¡íê³ì ê°ì
í ì¢ìíë ìí 목ë¡3ê° ì íí기ì
ëë¤. ìí 60ê°ë¥¼ 리í´í©ëë¤.326 ---327 ë무 ëë ¤ì ì±ë¥ ê°ì ì´ íìì
ëë¤328 headerì329 Authorization: Token "í í°ê°"330 ì ë£ì´ì£¼ì¸ì331 리í´ê°:332 [333 {334 "id": ìíì ID,335 "name": ìí ì 목336 "horizontal_image_path": ìíì ê°ë¡ ì´ë¯¸ì§ path337 "vertical_image": ìíì ì¸ë¡ ì´ë¯¸ì§ path338 },339 ... ì´í 59ê° ëì¼340 ]341 """342 serializer_class = MovieSerializer343 def get_queryset(self):344 queryset = Movie.objects.all().order_by('?')[:60]345 return queryset346 # def get_queryset(self):347 # # ë±ë¡ë ìíì ìµë IDê°ì 구í¨348 # max_id = Movie.objects.all().aggregate(max_id=Max("id"))['max_id']349 # # queryset를 ìëìì ì¬ì©í기 ìí´ ë¯¸ë¦¬ 1ê°ë¥¼ ë½ìëì350 # queryset = Movie.objects.filter(pk=random.randint(1, max_id))351 #352 # # querysetì ê°¯ìê° 60ê° ì´ìì¼ë ê¹ì§353 # while queryset.count() <= 60:354 # # ìíì IDê° ì¤ì íë를 골ë¼ì´355 # pk = random.randint(1, max_id)356 # # IDê°ì í´ë¹íë ìí를 ê°ì ¸ì´357 # movie = Movie.objects.filter(pk=pk)358 # if movie:359 # # 쿼리ì
ì ë¶ì360 # queryset |= movie361 #362 # return queryset363# ì¢ìì 목ë¡ì ì¶ê°í기364class AddLike(APIView):365 """366 ì¢ìì 목ë¡ì ì¶ê°íë APIë·° ì
ëë¤367 ---368 Header369 Authorization: Token í í°ê°370 Body371 movieid : ìíì ID372 subuserid : ìë¸ì ì ì ID373 를 ë£ì´ì POSTë¡ ìì²í´ 주ì¸ì374 리í´ê°375 ì¢ìì ë±ë¡ ì±ê³µ OR ì¢ìì ì·¨ì ì±ê³µ376 """377 def post(self, request, *args, **kwargs):378 movie_id = request.data.get('movieid')379 sub_user_id = request.data.get('subuserid')380 sub_user = SubUser.objects.get(id=sub_user_id)381 movie = Movie.objects.get(id=movie_id)382 obj, created = LikeDisLikeMarked.objects.update_or_create(383 movie__name=movie.name,384 sub_user__name=sub_user.name,385 defaults={'movie': Movie.objects.get(name=movie.name),386 'sub_user': SubUser.objects.get(id=sub_user.id),387 # 'like_or_dislike': 1,388 # 'marked': False,389 # 'created': timezone.now(),390 'updated': timezone.now(),391 'movie_id': movie_id,392 'sub_user_id': sub_user_id})393 if obj.like_or_dislike == 1:394 obj.like_or_dislike = 0395 movie.like_count = F('like_count') - 1396 movie.save()397 obj.save()398 return JsonResponse({'response': "ì¢ìì ì·¨ì ì±ê³µ"}, status=201)399 if created or obj.like_or_dislike != 1:400 obj.like_or_dislike = 1401 movie.like_count = F('like_count') + 1402 movie.save()403 obj.save()404 return JsonResponse({'response': "ì¢ìì ë±ë¡ ì±ê³µ"}, status=201)405# ì«ì´ì 목ë¡ì ì¶ê°í기406class AddDisLike(APIView):407 """408 ì«ì´ 목ë¡ì ì¶ê°íë APIë·° ì
ëë¤409 ---410 Header411 Authorization: Token í í°ê°412 Body413 movieid : ìíì ID414 subuserid : ìë¸ì ì ì ID415 를 ë£ì´ì POSTë¡ ìì²í´ 주ì¸ì416 리í´ê°417 ì«ì´ì ë±ë¡ ì±ê³µ OR ì«ì´ì ì·¨ì ì±ê³µ418 """419 def post(self, request, *args, **kwargs):420 movie_id = request.data.get('movieid')421 sub_user_id = request.data.get('subuserid')422 sub_user = SubUser.objects.get(id=sub_user_id)423 movie = Movie.objects.get(id=movie_id)424 obj, created = LikeDisLikeMarked.objects.update_or_create(425 movie__name=movie.name,426 sub_user__name=sub_user.name,427 defaults={'movie': Movie.objects.get(name=movie.name),428 'sub_user': SubUser.objects.get(id=sub_user.id),429 # 'like_or_dislike': 2,430 # 'marked': False,431 # 'created': timezone.now(),432 'updated': timezone.now(),433 'movie_id': movie_id,434 'sub_user_id': sub_user_id})435 if obj.like_or_dislike == 2:436 obj.like_or_dislike = 0437 movie.like_count = F('like_count') + 1438 movie.save()439 obj.save()440 return JsonResponse({'response': "ì«ì´ì ì·¨ì ì±ê³µ"}, status=201)441 if created or obj.like_or_dislike != 2:442 obj.like_or_dislike = 2443 movie.like_count = F('like_count') - 1444 movie.save()445 obj.save()446 return JsonResponse({'response': "ì«ì´ì ë±ë¡ ì±ê³µ"}, status=201)447# ì° ëª©ë¡ì ì¶ê°í기448class MyList(APIView):449 """450 ì° ëª©ë¡ì ì¶ê°íë APIë·° ì
ëë¤451 ---452 Header453 Authorization: Token í í°ê°454 Body455 movieid : ìíì ID456 subuserid : ìë¸ì ì ì ID457 를 ë£ì´ì POSTë¡ ìì²í´ 주ì¸ì458 리í´ê°459 ì°ëª©ë¡ ì¶ê° ì±ê³µ OR ì°ëª©ë¡ ì ê±° ì±ê³µ460 """461 def post(self, request, *args, **kwargs):462 movie_id = request.data.get('movieid')463 sub_user_id = request.data.get('subuserid')464 sub_user = SubUser.objects.get(id=sub_user_id)465 movie = Movie.objects.get(id=movie_id)466 obj, created = LikeDisLikeMarked.objects.update_or_create(467 movie__name=movie.name,468 sub_user__name=sub_user.name,469 defaults={'movie': Movie.objects.get(name=movie.name),470 'sub_user': SubUser.objects.get(id=sub_user.id),471 # 'like_or_dislike': 0,472 # 'marked': True,473 # 'created': timezone.now(),474 'updated': timezone.now(),475 'movie_id': movie_id,476 'sub_user_id': sub_user_id})477 if created:478 obj.marked = True479 obj.save()480 return JsonResponse({'response': "ì°ëª©ë¡ ì¶ê° ì±ê³µ"}, status=201)481 # ì´ë¯¸ ì¢ììë ì«ì´ì íì를 íì¬ ëª©ë¡ì ìì482 else:483 if obj.marked:484 obj.marked = False485 obj.save()486 return JsonResponse({'response': "ì°ëª©ë¡ ì ê±° ì±ê³µ"}, status=201)487 else:488 obj.marked = True489 obj.save()490 return JsonResponse({'response': "ì°ëª©ë¡ ì¶ê° ì±ê³µ"}, status=201)491# ìµì ë±ë¡ ìí 10ê°492class BrandNewMovieList(generics.ListAPIView):493 """494 ìµì ë±ë¡ ìí url ì
ëë¤.495 ---496 - /movies/brand_new/ ë¡ ìì²íìë©´ ë©ëë¤.497 - í¤ëì subuserid : ìë¸ì ì id ê°(int) ì ë£ì´ì£¼ì
ì¼ í©ëë¤.498 - id : ìíì ê³ ì ID ê°499 - name : ìí ì´ë¦500 - sample_video_file : 미리보기 ë¹ëì¤íì¼ ê²½ë¡501 - logo_image_path : ë¡ê³ ì´ë¯¸ì§ì ê²½ë¡502 - horizontal_image_path : ê°ë¡ ì´ë¯¸ì§ ê²½ë¡503 - vertical_image : ì¸ë¡ ì´ë¯¸ì§ ê²½ë¡504 """505 serializer_class = MovieListByGenreSerializer506 def get_queryset(self):507 sub_user = self.request.META['HTTP_SUBUSERID']508 queryset = Movie.objects.exclude(like__sub_user=sub_user, like__like_or_dislike=2).order_by('-created')[:10]509 return queryset510# ì ì°¬ ì¤í¸ë¦¬ë° ì¤511class BigSizeVideo(generics.RetrieveAPIView):512 """513 ì ì°¬ ì¤í¸ë¦¬ë°ì¤ (ëìì íë) url ì
ëë¤.514 ---515 - /movies/big_size_video/ ë¡ ìì²íìë©´ ë©ëë¤.516 - í¤ëì subuserid : ìë¸ì ì id ê°(int) ì ë£ì´ì£¼ì
ì¼ í©ëë¤.517 - id : ìíì ê³ ì ID ê°518 - name : ìí ì´ë¦519 - video_file : ë¹ëì¤íì¼ ê²½ë¡520 - logo_image_path : ë¡ê³ ì´ë¯¸ì§ì ê²½ë¡521 - horizontal_image_path : ê°ë¡ ì´ë¯¸ì§ ê²½ë¡522 - marked : ë´ê° ì°í ì½í
ì¸ ì¸ì§ ì¬ë¶ (True or False)523 """524 serializer_class = BigSizeVideoSerializer525 def get_object(self):526 movie_id = 354527 obj = Movie.objects.get(pk=movie_id)528 return obj529 def get_serializer_context(self):530 sub_user_id = self.request.META['HTTP_SUBUSERID']531 context = super().get_serializer_context()532 context['sub_user_id'] = sub_user_id533 return context534# ì¢ìì ìì 10ê°535class MostLikesMoives(generics.ListAPIView):536 """537 ì¢ìì ìì 10ê° ìí url ì
ëë¤.538 ---539 - /movies/most_likes/ ë¡ ìì²íìë©´ ë©ëë¤.540 - í¤ëì subuserid : ìë¸ì ì id ê°(int) ì ë£ì´ì£¼ì
ì¼ í©ëë¤.541 - id : ìíì ê³ ì ID ê°542 - name : ìí ì´ë¦543 - sample_video_file : 미리보기 ë¹ëì¤íì¼ ê²½ë¡544 - logo_image_path : ë¡ê³ ì´ë¯¸ì§ì ê²½ë¡545 - horizontal_image_path : ê°ë¡ ì´ë¯¸ì§ ê²½ë¡546 - vertical_image : ì¸ë¡ ì´ë¯¸ì§ ê²½ë¡547 """548 serializer_class = MovieListByGenreSerializer549 def get_queryset(self):550 sub_user_id = self.request.META['HTTP_SUBUSERID']551 queryset = Movie.objects.exclude(like__sub_user=sub_user_id, like__like_or_dislike=2).order_by(552 '-like_count')[:10]553 return queryset554class SavePausedVideoTime(APIView):555 """556 ë¹ëì¤ ì¬ììê° ì ì¥ url ì
ëë¤.557 ---558 - /movies/paused_time/ ë¡ ìì²íìë©´ ë©ëë¤.559 - bodyì560 sub_user_id : ìë¸ì ì id (int)561 movie_id : ì ì¥í ìí id (int)562 paused_time : "00:00:00" (str) íìì ì ì¥í ìê°563 ì ë£ì´ì£¼ì
ì¼ í©ëë¤.564 ì ì¥ì ì±ê³µíì ê²½ì°565 {'saved' : True} ê° ë°íë©ëë¤.566 """567 def post(self, *args, **kwargs):568 paused_time = self.request.data.get('paused_time')569 sub_user_id = self.request.data.get('sub_user_id')570 movie_id = self.request.data.get('movie_id')571 movie_obj = Movie.objects.get(pk=movie_id)572 sub_user_obj = SubUser.objects.get(pk=sub_user_id)573 movie = MovieContinue.objects.get_or_create(movie=movie_obj, sub_user=sub_user_obj)[0]574 movie.to_be_continue = paused_time575 movie.save()576 return Response({'saved': True})577class Search(APIView):578 def get(self, *agrs, **kwargs):579 search_key = self.request.GET.get('search_key', None)580 if search_key:581 search_key = search_key.replace(" ", "")582 space = '\s*'583 re_search_key = space.join(search_key)584 print(re_search_key)585 first_movies = Movie.objects.filter(name__iregex=re_search_key)586 print(first_movies)587 movies_name = Movie.objects.filter(name__iregex=re_search_key)588 movie_genre = Movie.objects.prefetch_related('genre').filter(genre__name__iregex=re_search_key)589 print(movie_genre)590 movie_actor = Movie.objects.prefetch_related('actors').filter(actors__name__iregex=re_search_key)591 print(movie_actor)592 queryset = (first_movies | movies_name | movie_genre | movie_actor).distinct()593 queryset_serializer = MovieSerializer(queryset, many=True)594 return JsonResponse({'movie_list': queryset_serializer.data}, status=201)595 else:596 return JsonResponse({'search_error': False}, status=403)597class MatchRate(APIView):598 sub_user_id = 8599 sub_user = SubUser.objects.get(pk=sub_user_id)...
build_and_run.py
Source:build_and_run.py
1#!/usr/bin/env python32import os3import pickle4import re5import shutil6import socket7import sys8from copy import deepcopy9from glob import glob10from itertools import chain11from pathlib import Path12from subprocess import STDOUT, CalledProcessError, check_call, check_output13import requests14from kerncraft.incore_model import (15 asm_instrumentation,16 iaca_analyse_instrumented_binary,17 llvm_mca_analyse_instrumented_assembly,18 osaca_analyse_instrumented_assembly,19 parse_asm,20)21from kerncraft.models import benchmark22from osaca.osaca import reduce_to_section23# Scaling of inner dimension for 1D, 2D and 3D kernels24# * consider kernels to be compiled with multiple compilers and different options25# * find best performing run (min cy/it over all runs)26# * statistics on performance overall (cy/it over inner length)27# * validate that L2 traffic is neglegible28# * measure other performance metrics, such as port utilization (optionally)29# * scale to highlevel iterations30# Collect inner loop body assembly for each kernel/compiler/options combination31# * analyze with OSACA, IACA and LLVM-MCA32hosts_arch_map = {33 r"skylakesp2": "SKX",34 r"ivyep1": "IVB",35 r"naples1": "ZEN",36 r"rome1": "ZEN2",37 r"warmup": "TX2",38 r"qp4-node-[0-9]+": "A64FX",39}40arch_info = {41 "SKX": {42 "prepare": ["likwid-setFrequencies -f 2.4 -t 0".split()],43 "IACA": "SKX",44 "OSACA": "SKX",45 "LLVM-MCA": "-mcpu=skylake-avx512",46 "Ithemal": "skl",47 "isa": "x86",48 "perfevents": [],49 "cflags": {50 "icc": {51 "Ofast": (52 "-Ofast -fno-alias -xCORE-AVX512 -qopt-zmm-usage=high -nolib-inline "53 "-ffreestanding -falign-loops"54 ).split(),55 "O3": (56 "-O3 -fno-alias -xCORE-AVX512 -qopt-zmm-usage=high -nolib-inline "57 "-ffreestanding -falign-loops"58 ).split(),59 "O2": (60 "-O2 -fno-alias -xCORE-AVX512 -qopt-zmm-usage=high -nolib-inline "61 "-ffreestanding -falign-loops"62 ).split(),63 "O1": (64 "-O1 -fno-alias -xCORE-AVX512 -qopt-zmm-usage=high -nolib-inline "65 "-ffreestanding -falign-loops"66 ).split(),67 },68 "clang": {69 "Ofast": "-Ofast -march=skylake-avx512 -ffreestanding".split(),70 "O3": "-O3 -march=skylake-avx512 -ffreestanding".split(),71 "O2": "-O2 -march=skylake-avx512 -ffreestanding".split(),72 "O1": "-O1 -march=skylake-avx512 -ffreestanding".split(),73 },74 "gcc": {75 "Ofast": "-Ofast -march=skylake-avx512 -lm -ffreestanding -falign-loops=16".split(),76 "O3": "-O3 -march=skylake-avx512 -lm -ffreestanding -falign-loops=16".split(),77 "O2": "-O2 -march=skylake-avx512 -lm -ffreestanding -falign-loops=16".split(),78 "O1": "-O1 -march=skylake-avx512 -lm -ffreestanding -falign-loops=16".split(),79 },80 },81 },82 "IVB": {83 "prepare": ["likwid-setFrequencies -f 3.0 -t 0".split()],84 "IACA": "IVB",85 "OSACA": "IVB",86 "LLVM-MCA": "-mcpu=ivybridge",87 "Ithemal": "ivb",88 "isa": "x86",89 "perfevents": [],90 "cflags": {91 "icc": {92 "Ofast": (93 "-Ofast -xAVX -fno-alias -nolib-inline -ffreestanding -falign-loops"94 ).split(),95 "O3": "-O3 -xAVX -fno-alias -nolib-inline -ffreestanding -falign-loops".split(),96 "O2": "-O2 -xAVX -fno-alias -nolib-inline -ffreestanding -falign-loops".split(),97 "O1": "-O1 -xAVX -fno-alias -nolib-inline -ffreestanding -falign-loops".split(),98 },99 "clang": {100 "Ofast": "-Ofast -mavx -ffreestanding".split(),101 "O3": "-O3 -mavx -ffreestanding".split(),102 "O2": "-O2 -mavx -ffreestanding".split(),103 "O1": "-O1 -mavx -ffreestanding".split(),104 },105 "gcc": {106 "Ofast": "-Ofast -march=corei7-avx -lm -ffreestanding -falign-loops=16".split(),107 "O3": "-O3 -march=corei7-avx -lm -ffreestanding -falign-loops=16".split(),108 "O2": "-O2 -march=corei7-avx -lm -ffreestanding -falign-loops=16".split(),109 "O1": "-O1 -march=corei7-avx -lm -ffreestanding -falign-loops=16".split(),110 },111 },112 },113 "ZEN": {114 "prepare": ["likwid-setFrequencies -f 2.3 -t 0".split()],115 "IACA": None,116 "OSACA": "ZEN1",117 "LLVM-MCA": "-mcpu=znver1",118 "Ithemal": None,119 "isa": "x86",120 "perfevents": [],121 "cflags": {122 "clang": {123 "Ofast": "-Ofast -march=znver1 -ffreestanding".split(),124 "O3": "-O3 -march=znver1 -ffreestanding".split(),125 "O2": "-O2 -march=znver1 -ffreestanding".split(),126 "O1": "-O1 -march=znver1 -ffreestanding".split(),127 },128 "gcc": {129 "Ofast": "-Ofast -march=znver1 -ffreestanding -falign-loops=16".split(),130 "O3": "-O3 -march=znver1 -ffreestanding -falign-loops=16".split(),131 "O2": "-O2 -march=znver1 -ffreestanding -falign-loops=16".split(),132 "O1": "-O1 -march=znver1 -ffreestanding -falign-loops=16".split(),133 },134 "icc": {135 "Ofast": (136 "-Ofast -xAVX2 -fno-alias -nolib-inline -ffreestanding -falign-loops"137 ).split(),138 "O3": "-O3 -xAVX2 -fno-alias -nolib-inline -ffreestanding -falign-loops".split(),139 "O2": "-O2 -xAVX2 -fno-alias -nolib-inline -ffreestanding -falign-loops".split(),140 "O1": "-O1 -xAVX2 -fno-alias -nolib-inline -ffreestanding -falign-loops".split(),141 },142 },143 },144 "ZEN2": {145 "prepare": ["likwid-setFrequencies -f 2.35 -t 0".split()],146 "IACA": None,147 "OSACA": "ZEN2",148 "LLVM-MCA": "-mcpu=znver2",149 "Ithemal": None,150 "isa": "x86",151 "perfevents": [],152 "cflags": {153 "clang": {154 "Ofast": "-Ofast -march=znver2 -ffreestanding".split(),155 "O3": "-O3 -march=znver2 -ffreestanding".split(),156 "O2": "-O2 -march=znver2 -ffreestanding".split(),157 "O1": "-O1 -march=znver2 -ffreestanding".split(),158 },159 "gcc": {160 "Ofast": "-Ofast -march=znver2 -ffreestanding -falign-loops=16".split(),161 "O3": "-O3 -march=znver2 -ffreestanding -falign-loops=16".split(),162 "O2": "-O2 -march=znver2 -ffreestanding -falign-loops=16".split(),163 "O1": "-O1 -march=znver2 -ffreestanding -falign-loops=16".split(),164 },165 "icc": {166 "Ofast": (167 "-Ofast -xAVX2 -fno-alias -nolib-inline -ffreestanding -falign-loops"168 ).split(),169 "O3": "-O3 -xAVX2 -fno-alias -nolib-inline -ffreestanding -falign-loops".split(),170 "O2": "-O2 -xAVX2 -fno-alias -nolib-inline -ffreestanding -falign-loops".split(),171 "O1": "-O1 -xAVX2 -fno-alias -nolib-inline -ffreestanding -falign-loops".split(),172 },173 },174 },175 "TX2": {176 "Clock [MHz]": 2200, # reading out via perf. counters is not supported177 "IACA": None,178 "OSACA": "TX2",179 "assign_optimal_throughput": True,180 "LLVM-MCA": "-mcpu=thunderx2t99 -march=aarch64",181 "Ithemal": None,182 "isa": "aarch64",183 "perfevents": [],184 "cflags": {185 "clang": {186 "Ofast": "-Ofast -target aarch64-unknown-linux-gnu -ffreestanding".split(),187 "O3": "-O3 -target aarch64-unknown-linux-gnu -ffreestanding".split(),188 "O2": "-O2 -target aarch64-unknown-linux-gnu -ffreestanding".split(),189 "O1": "-O1 -target aarch64-unknown-linux-gnu -ffreestanding".split(),190 },191 "gcc": {192 "Ofast": "-Ofast -march=armv8.1-a -ffreestanding".split(),193 "O3": "-O3 -march=armv8.1-a -ffreestanding".split(),194 "O2": "-O2 -march=armv8.1-a -ffreestanding".split(),195 "O1": "-O1 -march=armv8.1-a -ffreestanding".split(),196 },197 },198 },199 "A64FX": {200 "Clock [MHz]": 1800, # reading out via perf. counters is not supported201 "L2_volume_metric": "L1<->L2 data volume [GBytes]",202 "IACA": None,203 "OSACA": "A64FX",204 "assign_optimal_throughput": False,205 "LLVM-MCA": "-mcpu=a64fx -march=aarch64",206 "Ithemal": None,207 "isa": "aarch64",208 "perfevents": [],209 "cflags": {210 "gcc": {211 "Ofast": "-Ofast -msve-vector-bits=512 -march=armv8.2-a+sve -ffreestanding".split(),212 "O3": "-O3 -msve-vector-bits=512 -march=armv8.2-a+sve -ffreestanding".split(),213 "O2": "-O2 -msve-vector-bits=512 -march=armv8.2-a+sve -ffreestanding".split(),214 "O1": "-O1 -msve-vector-bits=512 -march=armv8.2-a+sve -ffreestanding".split(),215 },216 "clang": {217 "Ofast": "-Ofast -target aarch64-unknown-linux-gnu -ffreestanding".split(),218 "O3": "-O3 -target aarch64-unknown-linux-gnu -ffreestanding".split(),219 "O2": "-O2 -target aarch64-unknown-linux-gnu -ffreestanding".split(),220 "O1": "-O1 -target aarch64-unknown-linux-gnu -ffreestanding".split(),221 },222 },223 },224}225def get_current_arch():226 hostname = socket.gethostname()227 if hostname in hosts_arch_map:228 return hosts_arch_map[hostname]229 for matchstr, arch in hosts_arch_map.items():230 if re.match(matchstr, hostname):231 return arch232 # raise KeyError(f"{hostname} not matched in hosts_arch_map.")233 return None234def get_kernels(kernels=None):235 if kernels is None:236 kernels = []237 for f in glob("kernels/*.c"):238 f = f.rsplit(".", 1)[0].split("/", 1)[1]239 if f == "dummy":240 continue241 kernels.append(f)242 return kernels243# Columns:244# arch245# kernel246# compiler247# cflags_name248# element_size249# pointer_increment250# IACA_raw251# IACA_scaled [dict with cy/it]252# IACA_scaled_max [float with cy/it]253# OSACA_raw254# OSACA_scaled [dict with cy/it]255# OSACA_scaled_max [float with cy/it]256# LLVM-MCA_raw257# LLVM-MCA_scaled [dict with cy/it]258# LLVM-MCA_scaled_max [float with cy/it]259# best_length260# best_runtime [cy/it]261# L2_traffic [B/it]262# allruns [list (length, repetitions, cy/it, L2 B/it)]263# perfevents [dict event: counter/it]264def build_mark_run_all_kernels(measurements=True, osaca=True, iaca=True, llvm_mca=True):265 arch = get_current_arch()266 if arch is None:267 arches = arch_info.keys()268 islocal = False269 else:270 islocal = True271 arches = [arch]272 ainfo = arch_info.get(arch)273 if "prepare" in ainfo:274 for cmd in ainfo["prepare"]:275 check_call(cmd)276 for arch in arches:277 ainfo = arch_info.get(arch)278 print(arch)279 data_path = Path(f"build/{arch}/data.pkl")280 if data_path.exists():281 with data_path.open("rb") as f:282 data = pickle.load(f)283 else:284 data = []285 data_lastsaved = deepcopy(data)286 for compiler, compiler_cflags in ainfo["cflags"].items():287 if not shutil.which(compiler) and islocal:288 print(compiler, "not found in path! Skipping...")289 continue290 for cflags_name, cflags in compiler_cflags.items():291 for kernel in get_kernels():292 print(293 f"{kernel:<15} {arch:>5} {compiler:>5} {cflags_name:>6}",294 end=": ",295 flush=True,296 )297 row = list(298 [299 r300 for r in data301 if r["arch"] == arch302 and r["kernel"] == kernel303 and r["compiler"] == compiler304 and r["cflags_name"] == cflags_name305 ]306 )307 if row:308 row = row[0]309 else:310 row = {311 "arch": arch,312 "kernel": kernel,313 "compiler": compiler,314 "cflags_name": cflags_name,315 "element_size": 8,316 }317 data.append(row)318 # Build319 print("build", end="", flush=True)320 asm_path, exec_path, overwrite = build_kernel(321 kernel,322 arch,323 compiler,324 cflags,325 cflags_name,326 dontbuild=not islocal,327 )328 if overwrite:329 # clear all measurment information330 row["best_length"] = None331 row["best_runtime"] = None332 row["L2_traffic"] = None333 row["allruns"] = None334 row["perfevents"] = None335 # Mark for IACA, OSACA and LLVM-MCA336 print("mark", end="", flush=True)337 try:338 (339 marked_asmfile,340 marked_objfile,341 row["pointer_increment"],342 overwrite,343 ) = mark(344 asm_path,345 compiler,346 cflags,347 isa=ainfo["isa"],348 overwrite=overwrite,349 )350 row["marking_error"] = None351 except ValueError as e:352 row["marking_error"] = str(e)353 print(":", e)354 continue355 if overwrite:356 # clear all model generated information357 for model in ["IACA", "OSACA", "LLVM-MCA", "Ithemal"]:358 for k in [359 "ports",360 "prediction",361 "throughput",362 "cp",363 "lcd",364 "raw",365 ]:366 row[model + "_" + k] = None367 for model in ["IACA", "OSACA", "LLVM-MCA", "Ithemal"]:368 for k in [369 "ports",370 "prediction",371 "throughput",372 "cp",373 "lcd",374 "raw",375 ]:376 if model + "_" + k not in row:377 row[model + "_" + k] = None378 # Analyze with IACA, if requested and configured379 if iaca and ainfo["IACA"] is not None:380 print("IACA", end="", flush=True)381 if not row.get("IACA_ports"):382 row["IACA_raw"] = iaca_analyse_instrumented_binary(383 marked_objfile, micro_architecture=ainfo["IACA"]384 )385 row["IACA_ports"] = {386 k: v / (row["pointer_increment"] / row["element_size"])387 for k, v in row["IACA_raw"]["port cycles"].items()388 }389 row["IACA_prediction"] = row["IACA_raw"]["throughput"] / (390 row["pointer_increment"] / row["element_size"]391 )392 row["IACA_throughput"] = max(row["IACA_ports"].values())393 print(". ", end="", flush=True)394 else:395 print("! ", end="", flush=True)396 # Analyze with OSACA, if requested397 if osaca:398 print("OSACA", end="", flush=True)399 if not row.get("OSACA_ports"):400 row["OSACA_raw"] = osaca_analyse_instrumented_assembly(401 marked_asmfile,402 micro_architecture=ainfo["OSACA"],403 assign_optimal_throughput=ainfo.get(404 "assign_optimal_throughput", True405 ),406 )407 row["OSACA_ports"] = {408 k: v / (row["pointer_increment"] / row["element_size"])409 for k, v in row["OSACA_raw"]["port cycles"].items()410 }411 row["OSACA_prediction"] = row["OSACA_raw"]["throughput"] / (412 row["pointer_increment"] / row["element_size"]413 )414 row["OSACA_throughput"] = max(row["OSACA_ports"].values())415 row["OSACA_cp"] = row["OSACA_raw"]["cp_latency"] / (416 row["pointer_increment"] / row["element_size"]417 )418 row["OSACA_lcd"] = row["OSACA_raw"]["lcd"] / (419 row["pointer_increment"] / row["element_size"]420 )421 print(". ", end="", flush=True)422 else:423 print("! ", end="", flush=True)424 # Analyze with LLVM-MCA, if requested and configured425 if llvm_mca and ainfo["LLVM-MCA"] is not None:426 print("LLVM-MCA", end="", flush=True)427 if not row.get("LLVM-MCA_ports"):428 row["LLVM-MCA_raw"] = llvm_mca_analyse_instrumented_assembly(429 marked_asmfile,430 micro_architecture=ainfo["LLVM-MCA"],431 isa=ainfo["isa"],432 )433 row["LLVM-MCA_ports"] = {434 k: v / (row["pointer_increment"] / row["element_size"])435 for k, v in row["LLVM-MCA_raw"]["port cycles"].items()436 }437 row["LLVM-MCA_prediction"] = row["LLVM-MCA_raw"]["throughput"] / (438 row["pointer_increment"] / row["element_size"]439 )440 row["LLVM-MCA_throughput"] = max(row["LLVM-MCA_ports"].values())441 row["LLVM-MCA_cp"] = row["LLVM-MCA_raw"]["cp_latency"] / (442 row["pointer_increment"] / row["element_size"]443 )444 row["LLVM-MCA_lcd"] = row["LLVM-MCA_raw"]["lcd"] / (445 row["pointer_increment"] / row["element_size"]446 )447 print(". ", end="", flush=True)448 else:449 print("! ", end="", flush=True)450 # Analyze with Ithemal, if not running local and configured451 if ainfo["Ithemal"] is not None and not islocal:452 print("Ithemal", end="", flush=True)453 if not row.get("Ithemal_prediction"):454 with open(marked_asmfile) as f:455 parsed_code = parse_asm(f.read(), ainfo["isa"])456 kernel = reduce_to_section(parsed_code, ainfo["isa"])457 row["Ithemal_prediction"] = get_ithemal_prediction(458 get_intel_style_code(marked_objfile),459 model=ainfo["Ithemal"],460 )461 print(". ", end="", flush=True)462 else:463 print("! ", end="", flush=True)464 if measurements and islocal:465 # run measurements if on same hardware466 print("scale", end="", flush=True)467 if not row.get("allruns"):468 # find best length with concurrent L2 measurement469 scaling_runs, best = scalingrun(exec_path)470 row["best_length"] = best[0]471 row["best_runtime"] = best[2]472 row["L2_traffic"] = best[3]473 row["allruns"] = scaling_runs474 print(f"({best[0]}). ", end="", flush=True)475 else:476 print(477 f"({row.get('best_length', None)})! ",478 end="",479 flush=True,480 )481 print()482 # dump to file483 if data != data_lastsaved:484 print("saving... ", end="", flush=True)485 with data_path.open("wb") as f:486 try:487 pickle.dump(data, f)488 data_lastsaved = deepcopy(data)489 print("saved!")490 except KeyboardInterrupt:491 f.seek(0)492 pickle.dump(data, f)493 print("saved!")494 sys.exit()495def scalingrun(kernel_exec, total_iterations=25000000, lengths=range(8, 1 * 1024 + 1)):496 # print('{:>8} {:>10} {:>10}'.format("x", "cy/it", "L2 B/it"))497 parameters = chain(*[[total_iterations // i, i] for i in lengths])498 # TODO use arch specific events and grooup499 r, o = perfctr(chain([kernel_exec], map(str, parameters)), 1, group="L2")500 global_infos = {}501 for m in [re.match(r"(:?([a-z_\-0-9]+):)?([a-z]+): ([a-z\_\-0-9]+)", line) for line in o]:502 if m is not None:503 try:504 v = int(m.group(4))505 except ValueError:506 v = m.group(4)507 if m.group(1) is None:508 global_infos[m.group(3)] = v509 else:510 r[m.group(2)][m.group(3)] = v511 results = []512 best = (float("inf"), None)513 for markername, mmetrics in r.items():514 kernelname, repetitions, *_, xlength = markername.split("_")515 repetitions = int(repetitions)516 xlength = int(xlength)517 total_iterations = mmetrics["repetitions"] * mmetrics["iterations"]518 if "Clock [MHz]" in mmetrics:519 clock_hz = mmetrics["Clock [MHz]"] * 1e6520 else:521 clock_hz = arch_info[get_current_arch()]["Clock [MHz]"] * 1e6522 cyperit = mmetrics["Runtime (RDTSC) [s]"] * clock_hz / total_iterations523 # TODO use arch specific events and grooup524 if "L2D load data volume [GBytes]" in mmetrics:525 l2perit = (526 (527 mmetrics["L2D load data volume [GBytes]"]528 + mmetrics.get("L2D evict data volume [GBytes]", 0)529 )530 * 1e9531 / total_iterations532 )533 else:534 l2perit = (535 mmetrics[arch_info[get_current_arch()]["L2_volume_metric"]]536 * 1e9537 / total_iterations538 )539 results.append((xlength, repetitions, cyperit, l2perit))540 if cyperit < best[0]:541 best = cyperit, results[-1]542 return results, best[1]543def mark(asm_path, compiler, cflags, isa, overwrite=False):544 # Mark assembly for IACA, OSACA and LLVM-MCA545 marked_asm_path = Path(asm_path).with_suffix(".marked.s")546 if not marked_asm_path.exists() or overwrite:547 overwrite = True548 with open(asm_path) as fa, open(marked_asm_path, "w") as fm:549 try:550 _, pointer_increment = asm_instrumentation(fa, fm, isa=isa)551 except KeyboardInterrupt:552 fm.close()553 marked_asm_path.unlink()554 print(". ", end="", flush=True)555 else:556 # use maked assembly and extract asm_block and pointer_increment557 with open(marked_asm_path) as f:558 marked_asm = f.read()559 m = re.search(r"pointer_increment=([0-9]+)", marked_asm)560 if m:561 pointer_increment = int(m.group(1))562 else:563 os.unlink(marked_asm_path)564 raise ValueError(565 "Could not find `pointer_increment=<byte increment>`. Plase place into file."566 )567 print("! ", end="", flush=True)568 # Compile marked assembly to object for IACA569 marked_obj = Path(asm_path).with_suffix(".marked.o")570 if not marked_obj.exists():571 check_call([compiler] + ["-c", str(marked_asm_path), "-o", str(marked_obj)])572 return str(marked_asm_path), str(marked_obj), pointer_increment, overwrite573def build_kernel(574 kernel,575 architecture,576 compiler,577 cflags,578 cflags_name,579 overwrite=False,580 dontbuild=False,581):582 build_path = f"build/{architecture}/{compiler}/{cflags_name}"583 kernel_assembly = f"{build_path}/{kernel}.s"584 kernel_object = f"{build_path}/{kernel}.o"585 executable = f"{build_path}/{kernel}"586 Path(build_path).mkdir(parents=True, exist_ok=True)587 if not overwrite:588 # Overwrite if any kernel specific file is missing589 overwrite = (590 not os.path.exists(kernel_object)591 or not os.path.exists(kernel_assembly)592 or not os.path.exists(executable)593 )594 if dontbuild and overwrite:595 raise ValueError("Must build, but not allowed.")596 if not Path(f"{build_path}/dummy.o").exists():597 check_call([compiler] + cflags + ["-c", "kernels/dummy.c", "-o", f"{build_path}/dummy.o"])598 if not Path(f"{build_path}/compiler_version").exists():599 # Document compiler version600 with open(f"{build_path}/compiler_version", "w") as f:601 f.write(check_output([compiler, "-v"], encoding="utf8", stderr=STDOUT))602 if overwrite:603 # build object + assembly604 check_call([compiler] + cflags + ["-c", f"kernels/{kernel}.c", "-o", kernel_object])605 check_call(606 [compiler] + cflags + ["-c", f"kernels/{kernel}.c", "-S", "-o", kernel_assembly]607 )608 # build main and link executable609 executable_cflags = [610 os.environ["LIKWID_DEFINES"],611 os.environ["LIKWID_INC"],612 os.environ["LIKWID_LIB"],613 ] + ["-Ofast"]614 check_call(615 [compiler]616 + executable_cflags617 + [618 f"{build_path}/dummy.o",619 kernel_object,620 "-DMAIN",621 f"kernels/{kernel}.c",622 "-llikwid",623 "-o",624 executable,625 ]626 )627 print(". ", end="", flush=True)628 else:629 print("! ", end="", flush=True)630 return kernel_assembly, executable, overwrite631def perfctr(cmd, cores, group="MEM", code_markers=True, verbose=0):632 """633 Run *cmd* with likwid-perfctr and returns result as dict.634 *group* may be a performance group known to likwid-perfctr or an event string.635 if CLI argument cores > 1, running with multi-core, otherwise single-core636 """637 # Making sure likwid-perfctr is available:638 if benchmark.find_executable("likwid-perfctr") is None:639 print(640 "likwid-perfctr was not found. Make sure likwid is installed and found in PATH.",641 file=sys.stderr,642 )643 sys.exit(1)644 # FIXME currently only single core measurements support!645 perf_cmd = ["likwid-perfctr", "-f", "-O", "-g", group]646 cpu = "S0:0"647 if cores > 1:648 cpu += "-" + str(cores - 1)649 # Pinned and measured on cpu650 perf_cmd += ["-C", cpu]651 # code must be marked using likwid markers652 perf_cmd.append("-m")653 perf_cmd += cmd654 if verbose > 1:655 print(" ".join(perf_cmd))656 try:657 with benchmark.fix_env_variable("OMP_NUM_THREADS", None):658 output = check_output(perf_cmd).decode("utf-8").split("\n")659 except CalledProcessError as e:660 print("Executing benchmark failed: {!s}".format(e), file=sys.stderr)661 sys.exit(1)662 # TODO multicore output is different and needs to be considered here!663 results = {}664 cur_region_name = None665 cur_region_data = {}666 for line in output:667 if line == "STRUCT,Info,3" and cur_region_name is not None:668 results[cur_region_name] = cur_region_data669 cur_region_name = None670 cur_region_data = {}671 m = re.match(r"TABLE,Region ([a-z\-0-9_]+),", line)672 if m:673 cur_region_name = m.group(1)674 line = line.split(",")675 try:676 # Metrics677 cur_region_data[line[0]] = float(line[1])678 continue679 except ValueError:680 # Would not convert to float681 pass682 except IndexError:683 # Not a parable line (did not contain any commas)684 continue685 try:686 # Event counters687 if line[2] == "-" or line[2] == "nan":688 counter_value = 0689 else:690 counter_value = int(line[2])691 if re.fullmatch(r"[A-Z0-9_]+", line[0]) and re.fullmatch(692 r"[A-Z0-9]+(:[A-Z0-9]+=[0-9A-Fa-fx]+)*", line[1]693 ):694 cur_region_data.setdefault(line[0], {})695 cur_region_data[line[0]][line[1]] = counter_value696 continue697 except (IndexError, ValueError):698 pass699 if line[0].endswith(":") and len(line) == 3 and line[2] == "":700 # CPU information strings701 cur_region_data[line[0]] = line[1]702 continue703 results[cur_region_name] = cur_region_data704 return results, output705def remove_html_tags(text):706 return re.sub("<.*?>", "", text)707def get_intel_style_code(marked_objfile):708 # Disassembl with Intel syntax709 cmd = (710 "objdump -d --demangle --no-leading-addr --no-leading-headers --no-show-raw-insn "711 "--x86-asm-syntax=intel"712 ).split(" ") + [marked_objfile]713 asm_raw = check_output(cmd).decode()714 asm_raw = "\n".join([line.strip() for line in asm_raw.split("\n")])715 kernel_raw = asm_raw[716 asm_raw.index("mov\tebx, 111\nnop")717 + len("mov\tebx, 111\nnop") : asm_raw.index("mov\tebx, 222\nnop")718 ]719 kernel_lines = kernel_raw.split("\n")720 # Ignore label and jump721 return "\n".join(kernel_lines[:-2])722def get_ithemal_prediction(code, model="skl"):723 url = "http://3.18.198.23/predict"724 assert model in ["skl", "hsw", "ivb"]725 r = requests.post(url, {"code": code, "model": model})726 raw_text = remove_html_tags(r.text)727 m = re.search("Could not generate a prediction: (.*)", raw_text)728 if m:729 print(" error:", m.group(1).strip(), end=" ")730 return float("nan")731 m = re.search("Prediction: ([0-9.]+) cycles per iteration", raw_text)732 if m:733 return float(m.group(1))734 else:735 return float("nan")736def main():737 # Check for correct LLVM-MCA version738 try:739 llvm_mca = "LLVM version 12.0.0" in check_output(["llvm-mca", "-version"]).decode()740 except FileNotFoundError:741 llvm_mca = False742 build_mark_run_all_kernels(measurements="--no-measurements" not in sys.argv, llvm_mca=llvm_mca)743 sys.exit()744if __name__ == "__main__":...
main.py
Source:main.py
1'''2some classes3'''4import json5from collections import deque6class DemoEntityLinking(object):7 def __init__(self, span, input=None, result=None):8 self.raw = input9 self.beg, self.end = span10 if result:11 self.entities = result['entities']12 self.entity_idxs = result['idx']13 self.entity_types = result['type']14 self.entity_spans = result['spans']15 self.__bad_entity = -116 self.statistics = {}17 def decorate_entity(self, html_id, idx, entity_text):18 eidx = self.entity_idxs[idx]19 e_type = self.entity_types[idx].lower()20 html_id = 'e_%d' % html_id21 is_new_entity = self._is_new_entity(idx)22 # if e_type == 'per':23 # print 'hehe:', is_new_entity24 # new, total = self.statistics.get(e_type, (0, 0))25 sup = eidx26 if is_new_entity:27 # new += 128 if eidx == -1:29 sup = ''30 e_class = 'marked_span newe'31 else:32 e_class = 'marked_span olde'33 # total += 134 # self.statistics.update({e_type: (new, total)})35 decorated_text = '<span id="%s" class="%s et_%s">%s<sup>%s</sup></span>' \36 % (html_id, e_class, e_type, entity_text, sup)37 return decorated_text38 def _format_desc(self, desc, name, id, type):39 if not name or not id:40 return ('<span class="%s">%s'41 '[<b>%s</b>]</span>') % (type.lower(), desc, type)42 return ('<span class="%s">%s'43 '[<i>%s</i> (%s), <b>%s</b>]. </span>') % (type.lower(), desc, name, id, type)44 def build_entity_infobox(self, html_id, eidx):45 box_id = 'bub_%d' % html_id46 infobox = '<div id="%s" class="bub_div">' % box_id47 # build mesh link48 hint = 'Click to edit.'49 chebi_desc_info = ''50 mesh_desc_info = ''51 wiki_desc_info = ''52 # chebi_syno_info = self._build_list_group([hint])53 # mesh_syno_info = self._build_list_group([hint])54 # wiki_link_info = self._build_list_group([hint])55 chebi_syno_info = ''56 mesh_syno_info = ''57 wiki_link_info = ''58 tree_info = {'mesh': [], 'wiki': []}59 tree_info = json.dumps(tree_info)60 if eidx != self.__bad_entity:61 entity = self.entities[str(eidx)]62 mesh_id = entity.get('mesh-id', None)63 chebi_id = entity.get('chebi-id', None)64 if chebi_id:65 chebi_name = entity['chebi-name']66 chebi_desc = entity['chebi-description']67 chebi_desc_info = self._format_desc(chebi_desc, chebi_name, chebi_id, 'ChEBI')68 chebi_syno_info = self._build_list_group(entity['chebi-synonyms'])69 if mesh_id:70 mesh_name = entity['mesh-name']71 mesh_desc = entity['mesh-description']72 mesh_desc_info = self._format_desc(mesh_desc, mesh_name, mesh_id, 'MeSH')73 if entity['mesh-synonyms']:74 mesh_syno_info = self._build_list_group(entity['mesh-synonyms'])75 # building tree76 mesh_tn = entity['mesh-tn']77 tree_info = []78 for tn in mesh_tn:79 mesh_parents = entity['mesh-parents'][tn]80 leaf = [[mesh_id, mesh_name, tn]]81 subtree = self._build_tree(leaf, mesh_parents)82 tree_info += subtree83 # leafs.append([mesh_id, mesh_name, tn])84 # entity_node = [[mesh_id, mesh_name, mesh_tn]]85 # tree_info = self._build_tree(leafs, mesh_parents)86 # extra tree information (from wikipedia is-a rule)87 extra_parents = entity.get('extra-parent', [])88 extra_trees = []89 if extra_parents:90 for e_parents in extra_parents:91 if e_parents:92 extra_tree = self._build_tree([], e_parents)93 head = extra_tree[0]['text']94 extra_tree[0].update({'text': head})95 extra_trees += extra_tree96 # tree_info += extra_trees97 tree_info = {'mesh': tree_info, 'wiki': extra_trees}98 tree_info = json.dumps(tree_info)99 # build wikipedia link100 # print eidx101 wiki_id = entity.get('wid', None)102 if wiki_id:103 wiki_title = entity.get('wiki-title', None)104 if wiki_title:105 wiki_url = 'https://en.wikipedia.org/w/index.php?curid=%s' % wiki_id106 wiki_desc = entity.get('wiki-text', None)107 wiki_name = '<a href="%s"><i>%s</i></a>' % (wiki_url, wiki_title)108 wiki_desc_info = self._format_desc(wiki_desc, wiki_name, wiki_id, 'Wikipedia')109 wiki_link_list = []110 for l in entity['wiki-links']:111 s = '_'.join(l.split())112 wiki_link_list.append('https://en.wikipedia.org/wiki/%s' % s)113 wiki_link_info = self._build_list_group(entity['wiki-links'], wiki_link_list)114 # wiki_link_info = '<div id="wiki">%s</div>' % wiki_link_info115 # syno_info = '<div id="mesh">%s</div>' % syno_info116 mesh_syno_info = '<div id="mesh">%s</div>' % mesh_syno_info117 chebi_syno_info = '<div id="chebi">%s</div>' % chebi_syno_info118 wiki_link_info = '<div id="wiki">%s</div>' % wiki_link_info119 all_desc_info = '%s%s%s' % (mesh_desc_info, chebi_desc_info, wiki_desc_info)120 if all_desc_info == '':121 all_desc_info = '<span>Click to edit</span>'122 all_desc_info = ('<div id="edesc"><p class="editable" data-type="textarea">'123 '%s</p><hr></div>') % (all_desc_info,)124 all_syno_info = '<div id="esyno">%s%s%s</div>' % (mesh_syno_info, chebi_syno_info, wiki_link_info)125 tree_info = '<div id="etree">%s<hr></div>' % tree_info126 infobox += all_desc_info + all_syno_info + tree_info + '</div>'127 return infobox128 def _build_list_group(self, text, link=None):129 links = ''130 for i, t in enumerate(text):131 if not link:132 href = '<li class="list-group-item">%s</li>' % t133 else:134 href = '<a class="list-group-item" href="%s">%s</a>' % (link[i], t)135 links += href136 if not link:137 list_group = '<ul class="list-group">%s</ul>' % links138 else:139 list_group = '<div class="list-group">%s</div>' % links140 return list_group141 def _build_tree(self, leafs, nodes):142 # nodes.append(entity)143 all_nodes = nodes + leafs144 fake_tree = {}145 tree_nodes = {}146 for node in all_nodes:147 fake_tree.update({node[2]: []})148 real_node = {'text': node[1],149 'href': '#%s (%s)' % (node[2], node[0]),150 'nodes': [] }151 if node in leafs:152 real_node = {'text': node[1],153 'href': '#%s (%s)' % (node[2], node[0]),154 'color': '#D9534F'}155 tree_nodes.update({node[2]: real_node})156 roots = []157 for node in all_nodes:158 steps = node[2].split('.')159 if len(steps) == 1:160 roots.append(node[2])161 continue162 parent_key = '.'.join(steps[0:-1])163 children = fake_tree.get(parent_key, [])164 children.append(node[2])165 leafs = deque()166 for root, children in fake_tree.items():167 if not len(children):168 leafs.append(root)169 while len(leafs):170 leaf = leafs.popleft()171 steps = leaf.split('.')172 if len(steps) == 1:173 continue174 parent_key = '.'.join(steps[0:-1])175 leaf_node = tree_nodes[leaf]176 children = tree_nodes[parent_key]['nodes']177 children.append(leaf_node)178 tree_nodes[parent_key].update({'nodes': children})179 fake_tree[parent_key].remove(leaf)180 if not len(fake_tree[parent_key]):181 leafs.append(parent_key)182 tree_info = []183 for root in roots:184 tree_nodes[root].update({'color': '#489cdf'})185 tree_info.append(tree_nodes[root])186 return tree_info187 def _is_span_inpage(self, w_beg, w_end):188 if w_beg >= self.beg and w_beg < self.end:189 return True190 return False191 def _is_new_entity(self, i):192 if self.entity_idxs[i] == self.__bad_entity:193 return True194 else:195 eidx = str(self.entity_idxs[i])196 mesh_id = self.entities[eidx].get('mesh-id', None)197 chebi_id = self.entities[eidx].get('chebi-id', None)198 has_mesh_link = False199 has_chebi_link = False200 if mesh_id: has_mesh_link = True201 if chebi_id: has_chebi_link = True202 if not has_mesh_link and not has_chebi_link:203 return True204 return False205 def do_stat(self):206 total_len = len(self.entity_idxs)207 for i in xrange(total_len):208 is_new_entity = self._is_new_entity(i)209 e_type = self.entity_types[i].lower()210 new, total = self.statistics.get(e_type, (0, 0))211 if is_new_entity: new += 1212 total += 1213 self.statistics.update({e_type: (new, total)})214 return self.statistics215 def do_demo(self):216 demo_text = ''217 pointer = 0218 html_id = 0219 for i, span in enumerate(self.entity_spans):220 if not self._is_span_inpage(*span):221 continue222 # filter...223 if self.entity_types[i].lower() == 'gpe':224 continue225 # is_new_entity = self._is_new_entity(i)226 entity_beg = span[0] - self.beg227 entity_end = span[1] - self.beg228 demo_text += self.raw[pointer : entity_beg]229 entity_text = self.raw[entity_beg : entity_end + 1]230 # if span[0] == 4120:231 # print 'from main:', entity_text232 decorated_text = self.decorate_entity(html_id, i,233 entity_text)234 demo_text += decorated_text235 infobox_text = self.build_entity_infobox(html_id, self.entity_idxs[i])236 if isinstance(infobox_text, str):237 infobox_text = infobox_text.decode('utf-8')238 demo_text += infobox_text239 pointer = entity_end + 1240 html_id += 1241 demo_text += self.raw[pointer:]242 # html_id = 0243 # for eidx, span in zip(self.entity_idxs, self.entity_spans):244 # if not self._is_span_inpage(*span):245 # continue246 # is_new_entity = self._is_new_entity()247 # infobox_text = self.build_entity_infobox(html_id, str(eidx))248 # # print type(infobox_text)249 # demo_text += infobox_text.encode('utf-8')250 # html_id += 1251 return demo_text252class DemoRelatinDiscovery(object):253 def __init__(self, span, input=None, result=None):254 self.beg, self.end = span255 self.raw = input256 self.sf_sents = result257 def _is_span_inpage(self, w_beg, w_end):258 if w_beg >= self.beg and w_end < self.end:259 return True260 return False261 def decorate_sent(self, idx, spans, sent_text):262 subj_span, obj_span = spans263 sx, sy = subj_span264 ox, oy = obj_span265 # print subj_span, obj_span266 subj_text = sent_text[sx: sy]267 obj_text = sent_text[ox: oy]268 subj_text = '<span id="subj_%s" class="%s">%s</span>' % (idx, 'marked_subj', subj_text)269 obj_text = '<span id="obj_%s" class="%s">%s</span>' % (idx, 'marked_obj', obj_text)270 if sx < ox:271 demo_sent = sent_text[:sx]272 demo_sent += subj_text + sent_text[sy:ox]273 demo_sent += obj_text + sent_text[oy:]274 else:275 demo_sent = sent_text[:ox]276 demo_sent += subj_text + sent_text[oy:sx]277 demo_sent += obj_text + sent_text[sy:]278 demo_sent = '<span id="sf_sent_%s" class="%s">%s</span>' % (idx, 'marked_sf_sent', demo_sent)279 return demo_sent280 def _correct_span(self, span):281 return (span[0] - self.beg, span[1] - self.beg)282 def do_demo(self):283 demo_sf_text = ''284 pointer = 0285 idx = 0286 for sent in self.sf_sents:287 sent_span = sent['sentences']288 if not self._is_span_inpage(*sent_span):289 continue290 subj_span = self._correct_span(sent['chemical'])291 obj_span = self._correct_span(sent['disease'])292 sent_span = self._correct_span(sent_span)293 b, e = sent_span294 demo_sf_text += self.raw[pointer: b]295 sent_text = self.raw[b: e]296 # print 'hehe:', sent_text297 xs, ys = subj_span298 xo, yo = obj_span299 subj_span = xs - b, ys - b300 obj_span = xo - b, yo - b301 spans = (subj_span, obj_span)302 d_sent_text = self.decorate_sent(idx,303 spans,304 sent_text)305 demo_sf_text += d_sent_text306 # sf_infobox_text = self.build_sf_infobox(idx, sent)307 pointer = e308 idx += 1309 demo_sf_text += self.raw[pointer:]...
test_object_marker.py
Source:test_object_marker.py
...46 def func(self):47 pass48 return Blap.func49@pytest.fixture(params=mark_factories)50def marked_obj(request, mark_name, mark_value):51 returned = request.param(mark_name, mark_value)52 return returned53@pytest.fixture(params=['mark_name'])54def mark_name(request):55 return request.param56@pytest.fixture(params=['mark_value', 1, True, 1.0])57def mark_value(request):...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!