Best JavaScript code snippet using wpt
feature_map_generators_test.py
Source:feature_map_generators_test.py
1# Copyright 2017 The TensorFlow Authors. All Rights Reserved.2#3# Licensed under the Apache License, Version 2.0 (the "License");4# you may not use this file except in compliance with the License.5# You may obtain a copy of the License at6#7# http://www.apache.org/licenses/LICENSE-2.08#9# Unless required by applicable law or agreed to in writing, software10# distributed under the License is distributed on an "AS IS" BASIS,11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.12# See the License for the specific language governing permissions and13# limitations under the License.14# ==============================================================================15"""Tests for feature map generators."""16from absl.testing import parameterized17import numpy as np18import tensorflow as tf19from google.protobuf import text_format20from object_detection.builders import hyperparams_builder21from object_detection.models import feature_map_generators22from object_detection.protos import hyperparams_pb223INCEPTION_V2_LAYOUT = {24 'from_layer': ['Mixed_3c', 'Mixed_4c', 'Mixed_5c', '', '', ''],25 'layer_depth': [-1, -1, -1, 512, 256, 256],26 'anchor_strides': [16, 32, 64, -1, -1, -1],27 'layer_target_norm': [20.0, -1, -1, -1, -1, -1],28}29INCEPTION_V3_LAYOUT = {30 'from_layer': ['Mixed_5d', 'Mixed_6e', 'Mixed_7c', '', '', ''],31 'layer_depth': [-1, -1, -1, 512, 256, 128],32 'anchor_strides': [16, 32, 64, -1, -1, -1],33 'aspect_ratios': [1.0, 2.0, 1.0/2, 3.0, 1.0/3]34}35EMBEDDED_SSD_MOBILENET_V1_LAYOUT = {36 'from_layer': ['Conv2d_11_pointwise', 'Conv2d_13_pointwise', '', '', ''],37 'layer_depth': [-1, -1, 512, 256, 256],38 'conv_kernel_size': [-1, -1, 3, 3, 2],39}40SSD_MOBILENET_V1_WEIGHT_SHARED_LAYOUT = {41 'from_layer': ['Conv2d_13_pointwise', '', '', ''],42 'layer_depth': [-1, 256, 256, 256],43}44@parameterized.parameters(45 {'use_keras': False},46 {'use_keras': True},47)48class MultiResolutionFeatureMapGeneratorTest(tf.test.TestCase):49 def _build_conv_hyperparams(self):50 conv_hyperparams = hyperparams_pb2.Hyperparams()51 conv_hyperparams_text_proto = """52 regularizer {53 l2_regularizer {54 }55 }56 initializer {57 truncated_normal_initializer {58 }59 }60 """61 text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams)62 return hyperparams_builder.KerasLayerHyperparams(conv_hyperparams)63 def _build_feature_map_generator(self, feature_map_layout, use_keras,64 pool_residual=False):65 if use_keras:66 return feature_map_generators.KerasMultiResolutionFeatureMaps(67 feature_map_layout=feature_map_layout,68 depth_multiplier=1,69 min_depth=32,70 insert_1x1_conv=True,71 freeze_batchnorm=False,72 is_training=True,73 conv_hyperparams=self._build_conv_hyperparams(),74 name='FeatureMaps'75 )76 else:77 def feature_map_generator(image_features):78 return feature_map_generators.multi_resolution_feature_maps(79 feature_map_layout=feature_map_layout,80 depth_multiplier=1,81 min_depth=32,82 insert_1x1_conv=True,83 image_features=image_features,84 pool_residual=pool_residual)85 return feature_map_generator86 def test_get_expected_feature_map_shapes_with_inception_v2(self, use_keras):87 image_features = {88 'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32),89 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32),90 'Mixed_5c': tf.random_uniform([4, 7, 7, 1024], dtype=tf.float32)91 }92 feature_map_generator = self._build_feature_map_generator(93 feature_map_layout=INCEPTION_V2_LAYOUT,94 use_keras=use_keras95 )96 feature_maps = feature_map_generator(image_features)97 expected_feature_map_shapes = {98 'Mixed_3c': (4, 28, 28, 256),99 'Mixed_4c': (4, 14, 14, 576),100 'Mixed_5c': (4, 7, 7, 1024),101 'Mixed_5c_2_Conv2d_3_3x3_s2_512': (4, 4, 4, 512),102 'Mixed_5c_2_Conv2d_4_3x3_s2_256': (4, 2, 2, 256),103 'Mixed_5c_2_Conv2d_5_3x3_s2_256': (4, 1, 1, 256)}104 init_op = tf.global_variables_initializer()105 with self.test_session() as sess:106 sess.run(init_op)107 out_feature_maps = sess.run(feature_maps)108 out_feature_map_shapes = dict(109 (key, value.shape) for key, value in out_feature_maps.items())110 self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes)111 def test_get_expected_feature_map_shapes_with_inception_v2_use_depthwise(112 self, use_keras):113 image_features = {114 'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32),115 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32),116 'Mixed_5c': tf.random_uniform([4, 7, 7, 1024], dtype=tf.float32)117 }118 layout_copy = INCEPTION_V2_LAYOUT.copy()119 layout_copy['use_depthwise'] = True120 feature_map_generator = self._build_feature_map_generator(121 feature_map_layout=layout_copy,122 use_keras=use_keras123 )124 feature_maps = feature_map_generator(image_features)125 expected_feature_map_shapes = {126 'Mixed_3c': (4, 28, 28, 256),127 'Mixed_4c': (4, 14, 14, 576),128 'Mixed_5c': (4, 7, 7, 1024),129 'Mixed_5c_2_Conv2d_3_3x3_s2_512': (4, 4, 4, 512),130 'Mixed_5c_2_Conv2d_4_3x3_s2_256': (4, 2, 2, 256),131 'Mixed_5c_2_Conv2d_5_3x3_s2_256': (4, 1, 1, 256)}132 init_op = tf.global_variables_initializer()133 with self.test_session() as sess:134 sess.run(init_op)135 out_feature_maps = sess.run(feature_maps)136 out_feature_map_shapes = dict(137 (key, value.shape) for key, value in out_feature_maps.items())138 self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes)139 def test_get_expected_feature_map_shapes_use_explicit_padding(140 self, use_keras):141 image_features = {142 'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32),143 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32),144 'Mixed_5c': tf.random_uniform([4, 7, 7, 1024], dtype=tf.float32)145 }146 layout_copy = INCEPTION_V2_LAYOUT.copy()147 layout_copy['use_explicit_padding'] = True148 feature_map_generator = self._build_feature_map_generator(149 feature_map_layout=layout_copy,150 use_keras=use_keras151 )152 feature_maps = feature_map_generator(image_features)153 expected_feature_map_shapes = {154 'Mixed_3c': (4, 28, 28, 256),155 'Mixed_4c': (4, 14, 14, 576),156 'Mixed_5c': (4, 7, 7, 1024),157 'Mixed_5c_2_Conv2d_3_3x3_s2_512': (4, 4, 4, 512),158 'Mixed_5c_2_Conv2d_4_3x3_s2_256': (4, 2, 2, 256),159 'Mixed_5c_2_Conv2d_5_3x3_s2_256': (4, 1, 1, 256)}160 init_op = tf.global_variables_initializer()161 with self.test_session() as sess:162 sess.run(init_op)163 out_feature_maps = sess.run(feature_maps)164 out_feature_map_shapes = dict(165 (key, value.shape) for key, value in out_feature_maps.items())166 self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes)167 def test_get_expected_feature_map_shapes_with_inception_v3(self, use_keras):168 image_features = {169 'Mixed_5d': tf.random_uniform([4, 35, 35, 256], dtype=tf.float32),170 'Mixed_6e': tf.random_uniform([4, 17, 17, 576], dtype=tf.float32),171 'Mixed_7c': tf.random_uniform([4, 8, 8, 1024], dtype=tf.float32)172 }173 feature_map_generator = self._build_feature_map_generator(174 feature_map_layout=INCEPTION_V3_LAYOUT,175 use_keras=use_keras176 )177 feature_maps = feature_map_generator(image_features)178 expected_feature_map_shapes = {179 'Mixed_5d': (4, 35, 35, 256),180 'Mixed_6e': (4, 17, 17, 576),181 'Mixed_7c': (4, 8, 8, 1024),182 'Mixed_7c_2_Conv2d_3_3x3_s2_512': (4, 4, 4, 512),183 'Mixed_7c_2_Conv2d_4_3x3_s2_256': (4, 2, 2, 256),184 'Mixed_7c_2_Conv2d_5_3x3_s2_128': (4, 1, 1, 128)}185 init_op = tf.global_variables_initializer()186 with self.test_session() as sess:187 sess.run(init_op)188 out_feature_maps = sess.run(feature_maps)189 out_feature_map_shapes = dict(190 (key, value.shape) for key, value in out_feature_maps.items())191 self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes)192 def test_get_expected_feature_map_shapes_with_embedded_ssd_mobilenet_v1(193 self, use_keras):194 image_features = {195 'Conv2d_11_pointwise': tf.random_uniform([4, 16, 16, 512],196 dtype=tf.float32),197 'Conv2d_13_pointwise': tf.random_uniform([4, 8, 8, 1024],198 dtype=tf.float32),199 }200 feature_map_generator = self._build_feature_map_generator(201 feature_map_layout=EMBEDDED_SSD_MOBILENET_V1_LAYOUT,202 use_keras=use_keras203 )204 feature_maps = feature_map_generator(image_features)205 expected_feature_map_shapes = {206 'Conv2d_11_pointwise': (4, 16, 16, 512),207 'Conv2d_13_pointwise': (4, 8, 8, 1024),208 'Conv2d_13_pointwise_2_Conv2d_2_3x3_s2_512': (4, 4, 4, 512),209 'Conv2d_13_pointwise_2_Conv2d_3_3x3_s2_256': (4, 2, 2, 256),210 'Conv2d_13_pointwise_2_Conv2d_4_2x2_s2_256': (4, 1, 1, 256)}211 init_op = tf.global_variables_initializer()212 with self.test_session() as sess:213 sess.run(init_op)214 out_feature_maps = sess.run(feature_maps)215 out_feature_map_shapes = dict(216 (key, value.shape) for key, value in out_feature_maps.items())217 self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes)218 def test_feature_map_shapes_with_pool_residual_ssd_mobilenet_v1(219 self, use_keras):220 image_features = {221 'Conv2d_13_pointwise': tf.random_uniform([4, 8, 8, 1024],222 dtype=tf.float32),223 }224 feature_map_generator = self._build_feature_map_generator(225 feature_map_layout=SSD_MOBILENET_V1_WEIGHT_SHARED_LAYOUT,226 use_keras=use_keras,227 pool_residual=True228 )229 feature_maps = feature_map_generator(image_features)230 expected_feature_map_shapes = {231 'Conv2d_13_pointwise': (4, 8, 8, 1024),232 'Conv2d_13_pointwise_2_Conv2d_1_3x3_s2_256': (4, 4, 4, 256),233 'Conv2d_13_pointwise_2_Conv2d_2_3x3_s2_256': (4, 2, 2, 256),234 'Conv2d_13_pointwise_2_Conv2d_3_3x3_s2_256': (4, 1, 1, 256)}235 init_op = tf.global_variables_initializer()236 with self.test_session() as sess:237 sess.run(init_op)238 out_feature_maps = sess.run(feature_maps)239 out_feature_map_shapes = dict(240 (key, value.shape) for key, value in out_feature_maps.items())241 self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes)242 def test_get_expected_variable_names_with_inception_v2(self, use_keras):243 image_features = {244 'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32),245 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32),246 'Mixed_5c': tf.random_uniform([4, 7, 7, 1024], dtype=tf.float32)247 }248 feature_map_generator = self._build_feature_map_generator(249 feature_map_layout=INCEPTION_V2_LAYOUT,250 use_keras=use_keras251 )252 feature_maps = feature_map_generator(image_features)253 expected_slim_variables = set([254 'Mixed_5c_1_Conv2d_3_1x1_256/weights',255 'Mixed_5c_1_Conv2d_3_1x1_256/biases',256 'Mixed_5c_2_Conv2d_3_3x3_s2_512/weights',257 'Mixed_5c_2_Conv2d_3_3x3_s2_512/biases',258 'Mixed_5c_1_Conv2d_4_1x1_128/weights',259 'Mixed_5c_1_Conv2d_4_1x1_128/biases',260 'Mixed_5c_2_Conv2d_4_3x3_s2_256/weights',261 'Mixed_5c_2_Conv2d_4_3x3_s2_256/biases',262 'Mixed_5c_1_Conv2d_5_1x1_128/weights',263 'Mixed_5c_1_Conv2d_5_1x1_128/biases',264 'Mixed_5c_2_Conv2d_5_3x3_s2_256/weights',265 'Mixed_5c_2_Conv2d_5_3x3_s2_256/biases',266 ])267 expected_keras_variables = set([268 'FeatureMaps/Mixed_5c_1_Conv2d_3_1x1_256_conv/kernel',269 'FeatureMaps/Mixed_5c_1_Conv2d_3_1x1_256_conv/bias',270 'FeatureMaps/Mixed_5c_2_Conv2d_3_3x3_s2_512_conv/kernel',271 'FeatureMaps/Mixed_5c_2_Conv2d_3_3x3_s2_512_conv/bias',272 'FeatureMaps/Mixed_5c_1_Conv2d_4_1x1_128_conv/kernel',273 'FeatureMaps/Mixed_5c_1_Conv2d_4_1x1_128_conv/bias',274 'FeatureMaps/Mixed_5c_2_Conv2d_4_3x3_s2_256_conv/kernel',275 'FeatureMaps/Mixed_5c_2_Conv2d_4_3x3_s2_256_conv/bias',276 'FeatureMaps/Mixed_5c_1_Conv2d_5_1x1_128_conv/kernel',277 'FeatureMaps/Mixed_5c_1_Conv2d_5_1x1_128_conv/bias',278 'FeatureMaps/Mixed_5c_2_Conv2d_5_3x3_s2_256_conv/kernel',279 'FeatureMaps/Mixed_5c_2_Conv2d_5_3x3_s2_256_conv/bias',280 ])281 init_op = tf.global_variables_initializer()282 with self.test_session() as sess:283 sess.run(init_op)284 sess.run(feature_maps)285 actual_variable_set = set(286 [var.op.name for var in tf.trainable_variables()])287 if use_keras:288 self.assertSetEqual(expected_keras_variables, actual_variable_set)289 else:290 self.assertSetEqual(expected_slim_variables, actual_variable_set)291 def test_get_expected_variable_names_with_inception_v2_use_depthwise(292 self,293 use_keras):294 image_features = {295 'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32),296 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32),297 'Mixed_5c': tf.random_uniform([4, 7, 7, 1024], dtype=tf.float32)298 }299 layout_copy = INCEPTION_V2_LAYOUT.copy()300 layout_copy['use_depthwise'] = True301 feature_map_generator = self._build_feature_map_generator(302 feature_map_layout=layout_copy,303 use_keras=use_keras304 )305 feature_maps = feature_map_generator(image_features)306 expected_slim_variables = set([307 'Mixed_5c_1_Conv2d_3_1x1_256/weights',308 'Mixed_5c_1_Conv2d_3_1x1_256/biases',309 'Mixed_5c_2_Conv2d_3_3x3_s2_512_depthwise/depthwise_weights',310 'Mixed_5c_2_Conv2d_3_3x3_s2_512_depthwise/biases',311 'Mixed_5c_2_Conv2d_3_3x3_s2_512/weights',312 'Mixed_5c_2_Conv2d_3_3x3_s2_512/biases',313 'Mixed_5c_1_Conv2d_4_1x1_128/weights',314 'Mixed_5c_1_Conv2d_4_1x1_128/biases',315 'Mixed_5c_2_Conv2d_4_3x3_s2_256_depthwise/depthwise_weights',316 'Mixed_5c_2_Conv2d_4_3x3_s2_256_depthwise/biases',317 'Mixed_5c_2_Conv2d_4_3x3_s2_256/weights',318 'Mixed_5c_2_Conv2d_4_3x3_s2_256/biases',319 'Mixed_5c_1_Conv2d_5_1x1_128/weights',320 'Mixed_5c_1_Conv2d_5_1x1_128/biases',321 'Mixed_5c_2_Conv2d_5_3x3_s2_256_depthwise/depthwise_weights',322 'Mixed_5c_2_Conv2d_5_3x3_s2_256_depthwise/biases',323 'Mixed_5c_2_Conv2d_5_3x3_s2_256/weights',324 'Mixed_5c_2_Conv2d_5_3x3_s2_256/biases',325 ])326 expected_keras_variables = set([327 'FeatureMaps/Mixed_5c_1_Conv2d_3_1x1_256_conv/kernel',328 'FeatureMaps/Mixed_5c_1_Conv2d_3_1x1_256_conv/bias',329 ('FeatureMaps/Mixed_5c_2_Conv2d_3_3x3_s2_512_depthwise_conv/'330 'depthwise_kernel'),331 ('FeatureMaps/Mixed_5c_2_Conv2d_3_3x3_s2_512_depthwise_conv/'332 'bias'),333 'FeatureMaps/Mixed_5c_2_Conv2d_3_3x3_s2_512_conv/kernel',334 'FeatureMaps/Mixed_5c_2_Conv2d_3_3x3_s2_512_conv/bias',335 'FeatureMaps/Mixed_5c_1_Conv2d_4_1x1_128_conv/kernel',336 'FeatureMaps/Mixed_5c_1_Conv2d_4_1x1_128_conv/bias',337 ('FeatureMaps/Mixed_5c_2_Conv2d_4_3x3_s2_256_depthwise_conv/'338 'depthwise_kernel'),339 ('FeatureMaps/Mixed_5c_2_Conv2d_4_3x3_s2_256_depthwise_conv/'340 'bias'),341 'FeatureMaps/Mixed_5c_2_Conv2d_4_3x3_s2_256_conv/kernel',342 'FeatureMaps/Mixed_5c_2_Conv2d_4_3x3_s2_256_conv/bias',343 'FeatureMaps/Mixed_5c_1_Conv2d_5_1x1_128_conv/kernel',344 'FeatureMaps/Mixed_5c_1_Conv2d_5_1x1_128_conv/bias',345 ('FeatureMaps/Mixed_5c_2_Conv2d_5_3x3_s2_256_depthwise_conv/'346 'depthwise_kernel'),347 ('FeatureMaps/Mixed_5c_2_Conv2d_5_3x3_s2_256_depthwise_conv/'348 'bias'),349 'FeatureMaps/Mixed_5c_2_Conv2d_5_3x3_s2_256_conv/kernel',350 'FeatureMaps/Mixed_5c_2_Conv2d_5_3x3_s2_256_conv/bias',351 ])352 init_op = tf.global_variables_initializer()353 with self.test_session() as sess:354 sess.run(init_op)355 sess.run(feature_maps)356 actual_variable_set = set(357 [var.op.name for var in tf.trainable_variables()])358 if use_keras:359 self.assertSetEqual(expected_keras_variables, actual_variable_set)360 else:361 self.assertSetEqual(expected_slim_variables, actual_variable_set)362@parameterized.parameters({'use_native_resize_op': True, 'use_keras': False},363 {'use_native_resize_op': False, 'use_keras': False},364 {'use_native_resize_op': True, 'use_keras': True},365 {'use_native_resize_op': False, 'use_keras': True})366class FPNFeatureMapGeneratorTest(tf.test.TestCase, parameterized.TestCase):367 def _build_conv_hyperparams(self):368 conv_hyperparams = hyperparams_pb2.Hyperparams()369 conv_hyperparams_text_proto = """370 regularizer {371 l2_regularizer {372 }373 }374 initializer {375 truncated_normal_initializer {376 }377 }378 """379 text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams)380 return hyperparams_builder.KerasLayerHyperparams(conv_hyperparams)381 def _build_feature_map_generator(382 self, image_features, depth, use_keras, use_bounded_activations=False,383 use_native_resize_op=False, use_explicit_padding=False,384 use_depthwise=False):385 if use_keras:386 return feature_map_generators.KerasFpnTopDownFeatureMaps(387 num_levels=len(image_features),388 depth=depth,389 is_training=True,390 conv_hyperparams=self._build_conv_hyperparams(),391 freeze_batchnorm=False,392 use_depthwise=use_depthwise,393 use_explicit_padding=use_explicit_padding,394 use_bounded_activations=use_bounded_activations,395 use_native_resize_op=use_native_resize_op,396 scope=None,397 name='FeatureMaps',398 )399 else:400 def feature_map_generator(image_features):401 return feature_map_generators.fpn_top_down_feature_maps(402 image_features=image_features,403 depth=depth,404 use_depthwise=use_depthwise,405 use_explicit_padding=use_explicit_padding,406 use_bounded_activations=use_bounded_activations,407 use_native_resize_op=use_native_resize_op)408 return feature_map_generator409 def test_get_expected_feature_map_shapes(410 self, use_native_resize_op, use_keras):411 image_features = [412 ('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)),413 ('block3', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)),414 ('block4', tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)),415 ('block5', tf.random_uniform([4, 1, 1, 256], dtype=tf.float32))416 ]417 feature_map_generator = self._build_feature_map_generator(418 image_features=image_features,419 depth=128,420 use_keras=use_keras,421 use_native_resize_op=use_native_resize_op)422 feature_maps = feature_map_generator(image_features)423 expected_feature_map_shapes = {424 'top_down_block2': (4, 8, 8, 128),425 'top_down_block3': (4, 4, 4, 128),426 'top_down_block4': (4, 2, 2, 128),427 'top_down_block5': (4, 1, 1, 128)428 }429 init_op = tf.global_variables_initializer()430 with self.test_session() as sess:431 sess.run(init_op)432 out_feature_maps = sess.run(feature_maps)433 out_feature_map_shapes = {key: value.shape434 for key, value in out_feature_maps.items()}435 self.assertDictEqual(out_feature_map_shapes, expected_feature_map_shapes)436 def test_get_expected_feature_map_shapes_with_explicit_padding(437 self, use_native_resize_op, use_keras):438 image_features = [439 ('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)),440 ('block3', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)),441 ('block4', tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)),442 ('block5', tf.random_uniform([4, 1, 1, 256], dtype=tf.float32))443 ]444 feature_map_generator = self._build_feature_map_generator(445 image_features=image_features,446 depth=128,447 use_keras=use_keras,448 use_explicit_padding=True,449 use_native_resize_op=use_native_resize_op)450 feature_maps = feature_map_generator(image_features)451 expected_feature_map_shapes = {452 'top_down_block2': (4, 8, 8, 128),453 'top_down_block3': (4, 4, 4, 128),454 'top_down_block4': (4, 2, 2, 128),455 'top_down_block5': (4, 1, 1, 128)456 }457 init_op = tf.global_variables_initializer()458 with self.test_session() as sess:459 sess.run(init_op)460 out_feature_maps = sess.run(feature_maps)461 out_feature_map_shapes = {key: value.shape462 for key, value in out_feature_maps.items()}463 self.assertDictEqual(out_feature_map_shapes, expected_feature_map_shapes)464 def test_use_bounded_activations_add_operations(465 self, use_native_resize_op, use_keras):466 tf_graph = tf.Graph()467 with tf_graph.as_default():468 image_features = [('block2',469 tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)),470 ('block3',471 tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)),472 ('block4',473 tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)),474 ('block5',475 tf.random_uniform([4, 1, 1, 256], dtype=tf.float32))]476 feature_map_generator = self._build_feature_map_generator(477 image_features=image_features,478 depth=128,479 use_keras=use_keras,480 use_bounded_activations=True,481 use_native_resize_op=use_native_resize_op)482 feature_map_generator(image_features)483 if use_keras:484 expected_added_operations = dict.fromkeys([485 'FeatureMaps/top_down/clip_by_value/clip_by_value',486 'FeatureMaps/top_down/clip_by_value_1/clip_by_value',487 'FeatureMaps/top_down/clip_by_value_2/clip_by_value',488 'FeatureMaps/top_down/clip_by_value_3/clip_by_value',489 'FeatureMaps/top_down/clip_by_value_4/clip_by_value',490 'FeatureMaps/top_down/clip_by_value_5/clip_by_value',491 'FeatureMaps/top_down/clip_by_value_6/clip_by_value',492 ])493 else:494 expected_added_operations = dict.fromkeys([495 'top_down/clip_by_value', 'top_down/clip_by_value_1',496 'top_down/clip_by_value_2', 'top_down/clip_by_value_3',497 'top_down/clip_by_value_4', 'top_down/clip_by_value_5',498 'top_down/clip_by_value_6'499 ])500 op_names = {op.name: None for op in tf_graph.get_operations()}501 self.assertDictContainsSubset(expected_added_operations, op_names)502 def test_use_bounded_activations_clip_value(503 self, use_native_resize_op, use_keras):504 tf_graph = tf.Graph()505 with tf_graph.as_default():506 image_features = [507 ('block2', 255 * tf.ones([4, 8, 8, 256], dtype=tf.float32)),508 ('block3', 255 * tf.ones([4, 4, 4, 256], dtype=tf.float32)),509 ('block4', 255 * tf.ones([4, 2, 2, 256], dtype=tf.float32)),510 ('block5', 255 * tf.ones([4, 1, 1, 256], dtype=tf.float32))511 ]512 feature_map_generator = self._build_feature_map_generator(513 image_features=image_features,514 depth=128,515 use_keras=use_keras,516 use_bounded_activations=True,517 use_native_resize_op=use_native_resize_op)518 feature_map_generator(image_features)519 if use_keras:520 expected_clip_by_value_ops = dict.fromkeys([521 'FeatureMaps/top_down/clip_by_value/clip_by_value',522 'FeatureMaps/top_down/clip_by_value_1/clip_by_value',523 'FeatureMaps/top_down/clip_by_value_2/clip_by_value',524 'FeatureMaps/top_down/clip_by_value_3/clip_by_value',525 'FeatureMaps/top_down/clip_by_value_4/clip_by_value',526 'FeatureMaps/top_down/clip_by_value_5/clip_by_value',527 'FeatureMaps/top_down/clip_by_value_6/clip_by_value',528 ])529 else:530 expected_clip_by_value_ops = [531 'top_down/clip_by_value', 'top_down/clip_by_value_1',532 'top_down/clip_by_value_2', 'top_down/clip_by_value_3',533 'top_down/clip_by_value_4', 'top_down/clip_by_value_5',534 'top_down/clip_by_value_6'535 ]536 # Gathers activation tensors before and after clip_by_value operations.537 activations = {}538 for clip_by_value_op in expected_clip_by_value_ops:539 clip_input_tensor = tf_graph.get_operation_by_name(540 '{}/Minimum'.format(clip_by_value_op)).inputs[0]541 clip_output_tensor = tf_graph.get_tensor_by_name(542 '{}:0'.format(clip_by_value_op))543 activations.update({544 'before_{}'.format(clip_by_value_op): clip_input_tensor,545 'after_{}'.format(clip_by_value_op): clip_output_tensor,546 })547 expected_lower_bound = -feature_map_generators.ACTIVATION_BOUND548 expected_upper_bound = feature_map_generators.ACTIVATION_BOUND549 init_op = tf.global_variables_initializer()550 with self.test_session() as session:551 session.run(init_op)552 activations_output = session.run(activations)553 for clip_by_value_op in expected_clip_by_value_ops:554 # Before clipping, activations are beyound the expected bound because555 # of large input image_features values.556 activations_before_clipping = (557 activations_output['before_{}'.format(clip_by_value_op)])558 before_clipping_lower_bound = np.amin(activations_before_clipping)559 before_clipping_upper_bound = np.amax(activations_before_clipping)560 self.assertLessEqual(before_clipping_lower_bound,561 expected_lower_bound)562 self.assertGreaterEqual(before_clipping_upper_bound,563 expected_upper_bound)564 # After clipping, activations are bounded as expectation.565 activations_after_clipping = (566 activations_output['after_{}'.format(clip_by_value_op)])567 after_clipping_lower_bound = np.amin(activations_after_clipping)568 after_clipping_upper_bound = np.amax(activations_after_clipping)569 self.assertGreaterEqual(after_clipping_lower_bound,570 expected_lower_bound)571 self.assertLessEqual(after_clipping_upper_bound, expected_upper_bound)572 def test_get_expected_feature_map_shapes_with_depthwise(573 self, use_native_resize_op, use_keras):574 image_features = [575 ('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)),576 ('block3', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)),577 ('block4', tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)),578 ('block5', tf.random_uniform([4, 1, 1, 256], dtype=tf.float32))579 ]580 feature_map_generator = self._build_feature_map_generator(581 image_features=image_features,582 depth=128,583 use_keras=use_keras,584 use_depthwise=True,585 use_native_resize_op=use_native_resize_op)586 feature_maps = feature_map_generator(image_features)587 expected_feature_map_shapes = {588 'top_down_block2': (4, 8, 8, 128),589 'top_down_block3': (4, 4, 4, 128),590 'top_down_block4': (4, 2, 2, 128),591 'top_down_block5': (4, 1, 1, 128)592 }593 init_op = tf.global_variables_initializer()594 with self.test_session() as sess:595 sess.run(init_op)596 out_feature_maps = sess.run(feature_maps)597 out_feature_map_shapes = {key: value.shape598 for key, value in out_feature_maps.items()}599 self.assertDictEqual(out_feature_map_shapes, expected_feature_map_shapes)600 def test_get_expected_variable_names(601 self, use_native_resize_op, use_keras):602 image_features = [603 ('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)),604 ('block3', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)),605 ('block4', tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)),606 ('block5', tf.random_uniform([4, 1, 1, 256], dtype=tf.float32))607 ]608 feature_map_generator = self._build_feature_map_generator(609 image_features=image_features,610 depth=128,611 use_keras=use_keras,612 use_native_resize_op=use_native_resize_op)613 feature_maps = feature_map_generator(image_features)614 expected_slim_variables = set([615 'projection_1/weights',616 'projection_1/biases',617 'projection_2/weights',618 'projection_2/biases',619 'projection_3/weights',620 'projection_3/biases',621 'projection_4/weights',622 'projection_4/biases',623 'smoothing_1/weights',624 'smoothing_1/biases',625 'smoothing_2/weights',626 'smoothing_2/biases',627 'smoothing_3/weights',628 'smoothing_3/biases',629 ])630 expected_keras_variables = set([631 'FeatureMaps/top_down/projection_1/kernel',632 'FeatureMaps/top_down/projection_1/bias',633 'FeatureMaps/top_down/projection_2/kernel',634 'FeatureMaps/top_down/projection_2/bias',635 'FeatureMaps/top_down/projection_3/kernel',636 'FeatureMaps/top_down/projection_3/bias',637 'FeatureMaps/top_down/projection_4/kernel',638 'FeatureMaps/top_down/projection_4/bias',639 'FeatureMaps/top_down/smoothing_1_conv/kernel',640 'FeatureMaps/top_down/smoothing_1_conv/bias',641 'FeatureMaps/top_down/smoothing_2_conv/kernel',642 'FeatureMaps/top_down/smoothing_2_conv/bias',643 'FeatureMaps/top_down/smoothing_3_conv/kernel',644 'FeatureMaps/top_down/smoothing_3_conv/bias'645 ])646 init_op = tf.global_variables_initializer()647 with self.test_session() as sess:648 sess.run(init_op)649 sess.run(feature_maps)650 actual_variable_set = set(651 [var.op.name for var in tf.trainable_variables()])652 if use_keras:653 self.assertSetEqual(expected_keras_variables, actual_variable_set)654 else:655 self.assertSetEqual(expected_slim_variables, actual_variable_set)656 def test_get_expected_variable_names_with_depthwise(657 self, use_native_resize_op, use_keras):658 image_features = [659 ('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)),660 ('block3', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)),661 ('block4', tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)),662 ('block5', tf.random_uniform([4, 1, 1, 256], dtype=tf.float32))663 ]664 feature_map_generator = self._build_feature_map_generator(665 image_features=image_features,666 depth=128,667 use_keras=use_keras,668 use_depthwise=True,669 use_native_resize_op=use_native_resize_op)670 feature_maps = feature_map_generator(image_features)671 expected_slim_variables = set([672 'projection_1/weights',673 'projection_1/biases',674 'projection_2/weights',675 'projection_2/biases',676 'projection_3/weights',677 'projection_3/biases',678 'projection_4/weights',679 'projection_4/biases',680 'smoothing_1/depthwise_weights',681 'smoothing_1/pointwise_weights',682 'smoothing_1/biases',683 'smoothing_2/depthwise_weights',684 'smoothing_2/pointwise_weights',685 'smoothing_2/biases',686 'smoothing_3/depthwise_weights',687 'smoothing_3/pointwise_weights',688 'smoothing_3/biases',689 ])690 expected_keras_variables = set([691 'FeatureMaps/top_down/projection_1/kernel',692 'FeatureMaps/top_down/projection_1/bias',693 'FeatureMaps/top_down/projection_2/kernel',694 'FeatureMaps/top_down/projection_2/bias',695 'FeatureMaps/top_down/projection_3/kernel',696 'FeatureMaps/top_down/projection_3/bias',697 'FeatureMaps/top_down/projection_4/kernel',698 'FeatureMaps/top_down/projection_4/bias',699 'FeatureMaps/top_down/smoothing_1_depthwise_conv/depthwise_kernel',700 'FeatureMaps/top_down/smoothing_1_depthwise_conv/pointwise_kernel',701 'FeatureMaps/top_down/smoothing_1_depthwise_conv/bias',702 'FeatureMaps/top_down/smoothing_2_depthwise_conv/depthwise_kernel',703 'FeatureMaps/top_down/smoothing_2_depthwise_conv/pointwise_kernel',704 'FeatureMaps/top_down/smoothing_2_depthwise_conv/bias',705 'FeatureMaps/top_down/smoothing_3_depthwise_conv/depthwise_kernel',706 'FeatureMaps/top_down/smoothing_3_depthwise_conv/pointwise_kernel',707 'FeatureMaps/top_down/smoothing_3_depthwise_conv/bias'708 ])709 init_op = tf.global_variables_initializer()710 with self.test_session() as sess:711 sess.run(init_op)712 sess.run(feature_maps)713 actual_variable_set = set(714 [var.op.name for var in tf.trainable_variables()])715 if use_keras:716 self.assertSetEqual(expected_keras_variables, actual_variable_set)717 else:718 self.assertSetEqual(expected_slim_variables, actual_variable_set)719class GetDepthFunctionTest(tf.test.TestCase):720 def test_return_min_depth_when_multiplier_is_small(self):721 depth_fn = feature_map_generators.get_depth_fn(depth_multiplier=0.5,722 min_depth=16)723 self.assertEqual(depth_fn(16), 16)724 def test_return_correct_depth_with_multiplier(self):725 depth_fn = feature_map_generators.get_depth_fn(depth_multiplier=0.5,726 min_depth=16)727 self.assertEqual(depth_fn(64), 32)728@parameterized.parameters(729 {'replace_pool_with_conv': False},730 {'replace_pool_with_conv': True},731)732class PoolingPyramidFeatureMapGeneratorTest(tf.test.TestCase):733 def test_get_expected_feature_map_shapes(self, replace_pool_with_conv):734 image_features = {735 'image_features': tf.random_uniform([4, 19, 19, 1024])736 }737 feature_maps = feature_map_generators.pooling_pyramid_feature_maps(738 base_feature_map_depth=1024,739 num_layers=6,740 image_features=image_features,741 replace_pool_with_conv=replace_pool_with_conv)742 expected_pool_feature_map_shapes = {743 'Base_Conv2d_1x1_1024': (4, 19, 19, 1024),744 'MaxPool2d_0_2x2': (4, 10, 10, 1024),745 'MaxPool2d_1_2x2': (4, 5, 5, 1024),746 'MaxPool2d_2_2x2': (4, 3, 3, 1024),747 'MaxPool2d_3_2x2': (4, 2, 2, 1024),748 'MaxPool2d_4_2x2': (4, 1, 1, 1024),749 }750 expected_conv_feature_map_shapes = {751 'Base_Conv2d_1x1_1024': (4, 19, 19, 1024),752 'Conv2d_0_3x3_s2_1024': (4, 10, 10, 1024),753 'Conv2d_1_3x3_s2_1024': (4, 5, 5, 1024),754 'Conv2d_2_3x3_s2_1024': (4, 3, 3, 1024),755 'Conv2d_3_3x3_s2_1024': (4, 2, 2, 1024),756 'Conv2d_4_3x3_s2_1024': (4, 1, 1, 1024),757 }758 init_op = tf.global_variables_initializer()759 with self.test_session() as sess:760 sess.run(init_op)761 out_feature_maps = sess.run(feature_maps)762 out_feature_map_shapes = {key: value.shape763 for key, value in out_feature_maps.items()}764 if replace_pool_with_conv:765 self.assertDictEqual(expected_conv_feature_map_shapes,766 out_feature_map_shapes)767 else:768 self.assertDictEqual(expected_pool_feature_map_shapes,769 out_feature_map_shapes)770 def test_get_expected_variable_names(self, replace_pool_with_conv):771 image_features = {772 'image_features': tf.random_uniform([4, 19, 19, 1024])773 }774 feature_maps = feature_map_generators.pooling_pyramid_feature_maps(775 base_feature_map_depth=1024,776 num_layers=6,777 image_features=image_features,778 replace_pool_with_conv=replace_pool_with_conv)779 expected_pool_variables = set([780 'Base_Conv2d_1x1_1024/weights',781 'Base_Conv2d_1x1_1024/biases',782 ])783 expected_conv_variables = set([784 'Base_Conv2d_1x1_1024/weights',785 'Base_Conv2d_1x1_1024/biases',786 'Conv2d_0_3x3_s2_1024/weights',787 'Conv2d_0_3x3_s2_1024/biases',788 'Conv2d_1_3x3_s2_1024/weights',789 'Conv2d_1_3x3_s2_1024/biases',790 'Conv2d_2_3x3_s2_1024/weights',791 'Conv2d_2_3x3_s2_1024/biases',792 'Conv2d_3_3x3_s2_1024/weights',793 'Conv2d_3_3x3_s2_1024/biases',794 'Conv2d_4_3x3_s2_1024/weights',795 'Conv2d_4_3x3_s2_1024/biases',796 ])797 init_op = tf.global_variables_initializer()798 with self.test_session() as sess:799 sess.run(init_op)800 sess.run(feature_maps)801 actual_variable_set = set(802 [var.op.name for var in tf.trainable_variables()])803 if replace_pool_with_conv:804 self.assertSetEqual(expected_conv_variables, actual_variable_set)805 else:806 self.assertSetEqual(expected_pool_variables, actual_variable_set)807if __name__ == '__main__':...
pif_calculator.py
Source:pif_calculator.py
1import collections2import math3import operator4import random5import itertools6import pandas as pd7import numpy as np8from piflib.data_util import calculate_distribution, complete_feature_priors9from piflib.entropy import create_conditional_entropy_table10def compute_cigs(dataframe,11 feature_priors={},12 feature_accuracies={},13 samples=None):14 """Compute the cell information gain (CIG) for all cells in the dataset.15 Find the risk (as KL divergence from prior) for all attributes.16 :param dataframe: a Pandas DataFrame object containing tabular data17 :param feature_priors: feature_priors are optional. It is a dictionary mapping the18 feature index to an assumed prior. If not provided, the prior for19 the feature is calculated from the global distribution.20 :param feature_accuracies: `feature_accuracies` maps the feature index to the accuracy of the21 feature. If not provided for a feature, it defaults to 1.22 :return: a Pandas DataFrame containing the CIG values. The CIG values are at the same index as their corresponding23 cell values in the input dataframe.24 """25 unknown_features = 126 dataset = dataframe.values27 num_features = len(dataset[0])28 assert all(len(row) == num_features for row in dataset)29 feature_priors = complete_feature_priors(dataframe, feature_priors)30 feature_accuracies = feature_accuracies.copy()31 for i in range(num_features):32 if i not in feature_accuracies:33 feature_accuracies[i] = 134 feature_counts = [0] * num_features35 feature_kls = [[0] * len(dataset) for _ in range(num_features)]36 for is_ in sample_is(num_features, unknown_features, samples):37 feature_kls_this = find_kls_for_features(38 dataset,39 is_,40 feature_priors,41 feature_accuracies)42 for i, feature_kl in zip(is_, feature_kls_this):43 feature_kl_previous = feature_kls[i]44 feature_kls[i] = tuple(map(45 operator.add, feature_kl, feature_kl_previous))46 for i in is_:47 feature_counts[i] += 148 for i, denom in enumerate(feature_counts):49 feature_kls[i] = tuple(map(50 operator.truediv,51 feature_kls[i],52 itertools.repeat(denom)))53 return pd.DataFrame(list(zip(*feature_kls)), columns=dataframe.columns)54def compute_weighted_cigs(dataframe, feature_priors={}, feature_accuracies={}):55 """Compute the Weighted Cell Information Gain (wCIG) for all cells in the dataset.56 Find the risk (as KL divergence from prior) for all attributes.57 :param dataframe: a Pandas DataFrame object containing tabular data58 :param feature_priors: feature_priors are optional. It is a dictionary mapping the59 feature index to an assumed prior. If not provided, the prior for60 the feature is calculated from the global distribution.61 :param feature_accuracies: `feature_accuracies` maps the feature index to the accuracy of the62 feature. If not provided for a feature, it defaults to 1.63 :return: a Pandas DataFrame containing the wCIG values. The wCIG values are at the same index as their64 corresponding cell values in the input dataframe.65 """66 cigs = compute_cigs(dataframe, feature_priors=feature_priors, feature_accuracies=feature_accuracies)67 cond_entropy = create_conditional_entropy_table(dataframe)68 weights = cond_entropy['H(X|Y)'].values / cond_entropy['H(X)']69 weights = np.nan_to_num(weights)70 return (cigs * weights).round(2)71def compute_csfs(df, feature_priors={}, feature_accuracies={}):72 """Compute the Cell Surprise Factor (CSF) for all cells in the dataset.73 The CSF id defined as the change in probability for a cell value between the prior and the posterior distribution.74 :param dataframe: a Pandas DataFrame object containing tabular data75 :param feature_priors: feature_priors are optional. It is a dictionary mapping the76 feature index to an assumed prior. If not provided, the prior for77 the feature is calculated from the global distribution.78 :param feature_accuracies: `feature_accuracies` maps the feature index to the accuracy of the79 feature. If not provided for a feature, it defaults to 1.80 :return: a Pandas DataFrame containing the CSF values. The CSF values are at the same index as their81 corresponding cell values in the input dataframe.82 """83 dataset = df.values84 num_features = len(dataset[0])85 # compute priors86 feature_priors = complete_feature_priors(df, feature_priors)87 feature_accuracies = feature_accuracies.copy()88 for i in range(num_features):89 if i not in feature_accuracies:90 feature_accuracies[i] = 191 feature_csfs = [[0] * len(dataset) for _ in range(num_features)]92 for is_ in sample_is(num_features, 1, None):93 feature_csfs[is_[0]] = apply_to_posterior_and_prior(94 dataset,95 is_,96 feature_priors,97 feature_accuracies,98 calculate_prob_change)99 return pd.DataFrame(list(zip(*feature_csfs)), columns=df.columns)100def compute_pif(cigs, percentile):101 """ compute the PIF.102 The PIF is defined as the n-th percentile of the individual RIG values. Or in other words, the RIG of n percent of103 the entities in the dataset does not exceed the PIF value.104 RIG stands for row information gain. It represents the overall information gain for an entity in the dataset. The105 RIG is computed by summing the CIG values of an entity.106 The percentile value can be chosen between 0 and 100. 100 will return the maximum RIG value. Often, the RIG values107 from a long tail distribution with few high value outliers. Choosing a percentile value lower than 100 will ignore108 (some of) the highest values. If ignoring the risk of some entities in the dataset fits within your risk framework,109 then specifying a percentile value of less than 100 will make the PIF value less susceptible to RIG outliers.110 :param cigs: The CIG values of the dataset (see the compute_cigs function in this module)111 :param percentile: Which percentile of RIG values should be included in the PIF.112 :returns: the PIF_percentile value of the given CIGs113 """114 rigs = cigs.sum(axis=1)115 pif = np.percentile(rigs, percentile)116 return pif117def compute_posterior_distributions(feature, df):118 known_features = tuple(col_name for col_name in df.columns if col_name != feature)119 bucket = collections.defaultdict(list)120 bucket_map = []121 for idx, row in df.iterrows():122 key = tuple(row[known_feature] for known_feature in known_features)123 bucket[key].append(row[feature])124 bucket_map.append(key)125 bucket_distributions = {key: calculate_distribution(el_bucket) for key, el_bucket in bucket.items()}126 feature_vals = df[feature].unique()127 dists = {}128 for key, distribution in bucket_distributions.items():129 dists[str(key)] = [distribution.get(feature_val, 0) for feature_val in feature_vals]130 return dists, feature_vals131def binom(n, r):132 """ return binomial coefficient: n choose k"""133 return math.factorial(n) // math.factorial(n - r) // math.factorial(r)134def sample_is(n, r, samples):135 if samples is None:136 yield from itertools.combinations(range(n), r)137 else:138 total_combinations = binom(n, r)139 if samples > total_combinations:140 raise ValueError('more samples than combinations')141 if samples >= total_combinations >> 1:142 all_combinations = list(itertools.combinations(range(n), r))143 random.shuffle(all_combinations)144 num_produced = 0145 feature_produced = [False] * n146 for i, comb in enumerate(all_combinations):147 if num_produced >= samples:148 break149 if all(map(feature_produced.__getitem__, comb)):150 continue151 for j in comb:152 feature_produced[j] = True153 num_produced += 1154 all_combinations[i] = None155 yield comb156 for comb in all_combinations:157 if num_produced >= samples:158 break159 if comb is not None:160 yield comb161 else:162 already_produced = set()163 feature_produced = [False] * n164 while len(already_produced) < samples:165 comb = random.sample(range(n), r)166 comb = tuple(sorted(comb))167 if (comb not in already_produced168 and (all(already_produced)169 or not all(map(already_produced.__getitem__,170 comb)))):171 already_produced.add(comb)172 for i in comb:173 feature_produced[i] = True174 yield comb175def apply_to_posterior_and_prior(dataset, feature_idx, prior_distributions, accuracies, fun):176 num_features = len(dataset[0])177 assert all(len(row) == num_features for row in dataset)178 feature_idx = feature_idx[0]179 buckets = collections.defaultdict(list)180 bucket_map = []181 for row in dataset:182 key = tuple(row[i] for i in range(num_features) if not i == feature_idx)183 buckets[key].append(row[feature_idx])184 bucket_map.append((key, row[feature_idx]))185 bucket_values = {186 key: fun(187 calculate_distribution(bucket,188 accuracy=accuracies[feature_idx],189 feature_distribution=prior_distributions[feature_idx]),190 prior_distributions[feature_idx])191 for key, bucket in buckets.items()}192 return [bucket_values[post_key][val] for post_key, val in bucket_map]193def find_kls_for_features(dataset, feature_is, feature_distributions, accuracies):194 """Find the KL divergence of feature values against the prior.195 We find the true distribution of the features taking into account196 the accuracy. We then compute the KL divergence.197 """198 num_features = len(dataset[0])199 assert all(len(row) == num_features for row in dataset)200 # one bucket per set of 'known' features201 buckets = [collections.defaultdict(list) for _ in range(len(feature_is))]202 bucket_map = [[] for _ in range(len(feature_is))]203 for row in dataset:204 key = tuple(row[i] for i in range(num_features) if i not in feature_is)205 for i, j in enumerate(feature_is):206 buckets[i][key].append(row[j])207 bucket_map[i].append(key)208 bucket_kls = [209 {210 key: calculate_kl(211 calculate_distribution(bucket,212 accuracy=accuracies[feature_is[i]],213 feature_distribution=feature_distributions[feature_is[i]]),214 feature_distributions[feature_is[i]])215 for key, bucket in feature_buckets.items()}216 for i, feature_buckets in enumerate(buckets)]217 return [tuple(map(bucket_kls[i].__getitem__, bucket_map[i]))218 for i in range(len(feature_is))]219def calculate_kl(p, q):220 """Calculate D_KL(P || Q) (the KL-divergence) in bits.221 D_KL(P || Q) is the `information gained when one revises one's222 beliefs from the prior probability distribution Q to the posterior223 probability distribution P`. (Wikipedia, KullbackâLeibler224 divergence)225 `p` and `q` are both dictionaries mapping some hashable to a number.226 It is assumed that they are both normalised: their values should add227 up to 0. `q` must not have any 0 values unless the corresponding `p`228 value is also 0.229 """230 return sum(pk * math.log2(pk / q[k])231 for k, pk in p.items()232 if pk > 0)233def calculate_prob_change(p, q):234 """ calculate the change in probability for each element of the posterior compared to the prior"""...
trip_quad.py
Source:trip_quad.py
...181 if len(myindex3)==0:182 continue183 myindex=myindex1+myindex2+myindex3184 original_img=cv2.imread(image_path[n], cv2.IMREAD_UNCHANGED)185 original_feature=predictor.img2feature(original_img)186 occluded_feature=deepcopy(original_feature)187 for i in range(len(myindex)):188 fhi=int(loc_set[myindex[i]][7])189 fwi=int(loc_set[myindex[i]][8])190 191 # Gause occlusion192 occluded_feature[0][fhi][fwi]=0193 occluded_feature[0][fhi-1][fwi]=0.25*occluded_feature[0][fhi-1][fwi]194 occluded_feature[0][fhi][fwi-1]=0.25*occluded_feature[0][fhi][fwi-1]195 occluded_feature[0][fhi+1][fwi]=0.25*occluded_feature[0][fhi+1][fwi]196 occluded_feature[0][fhi][fwi+1]=0.25*occluded_feature[0][fhi][fwi+1]197 occluded_feature[0][fhi-1][fwi-1]=0.375*occluded_feature[0][fhi-1][fwi-1]198 occluded_feature[0][fhi+1][fwi-1]=0.375*occluded_feature[0][fhi+1][fwi-1]199 occluded_feature[0][fhi+1][fwi+1]=0.375*occluded_feature[0][fhi+1][fwi+1]200 occluded_feature[0][fhi-1][fwi+1]=0.375*occluded_feature[0][fhi-1][fwi+1]201 202 occluded_feature[0][fhi+2][fwi]=0.625*occluded_feature[0][fhi+2][fwi]203 occluded_feature[0][fhi-2][fwi]=0.625*occluded_feature[0][fhi-2][fwi]204 occluded_feature[0][fhi][fwi-2]=0.625*occluded_feature[0][fhi][fwi-2]205 occluded_feature[0][fhi][fwi+2]=0.625*occluded_feature[0][fhi][fwi+2]206 occluded_feature[0][fhi-1][fwi-2]=0.75*occluded_feature[0][fhi-1][fwi-2]207 occluded_feature[0][fhi-1][fwi+2]=0.75*occluded_feature[0][fhi-1][fwi+2]208 occluded_feature[0][fhi+1][fwi-2]=0.75*occluded_feature[0][fhi+1][fwi-2]209 occluded_feature[0][fhi+1][fwi+2]=0.75*occluded_feature[0][fhi+1][fwi+2]210 occluded_feature[0][fhi-2][fwi-1]=0.75*occluded_feature[0][fhi-2][fwi-1]211 occluded_feature[0][fhi-2][fwi+1]=0.75*occluded_feature[0][fhi-2][fwi+1]212 occluded_feature[0][fhi+2][fwi-1]=0.75*occluded_feature[0][fhi+2][fwi-1]213 occluded_feature[0][fhi+2][fwi+1]=0.75*occluded_feature[0][fhi+2][fwi+1]214 occluded_feature[0][fhi-2][fwi-2]=0.875*occluded_feature[0][fhi-21][fwi-2]215 occluded_feature[0][fhi-2][fwi+2]=0.875*occluded_feature[0][fhi-2][fwi+2]216 occluded_feature[0][fhi+2][fwi-2]=0.875*occluded_feature[0][fhi+2][fwi-2]217 occluded_feature[0][fhi+2][fwi+2]=0.875*occluded_feature[0][fhi+2][fwi+2] 218 # print(hi)219 # print(wi)220 # print(patch_size)221 drop=GetPossDecrease(original_feature,occluded_feature,int(cat))222 if drop!=-1:223 myscore.append(drop)224 trip_img_vc_score.append(float(np.sum(myscore))/img_num)225np.savez(trip_save_file,vc_score=trip_img_vc_score,vc=trip_img_vc)226for s in quad_img_vc:227 dot1=s.find('_')228 k1=int(s[:dot1])229 s2=s[dot1+1:]230 dot2=s2.find('_')231 k2=int(s2[:dot2])232 s3=s2[dot2+1:]233 dot3=s3.find('_')234 k3=int(s3[:dot3])235 s4=s3[dot3+1:]236 k4=int(s4)237 target1=centers[k1]238 index1=np.where(assignment==k1)[0]239 index1=disttresh(index1,target1)240 target2=centers[k2]241 index2=np.where(assignment==k2)[0]242 index2=disttresh(index2,target2)243 target3=centers[k3]244 index3=np.where(assignment==k3)[0]245 index3=disttresh(index3,target3)246 target4=centers[k4]247 index4=np.where(assignment==k4)[0]248 index4=disttresh(index4,target4)249 myscore=[]250 for n in range(0,img_num):251 myindex1=[]252 for i in range(len(index1)):253 if image_path[n]==originimage[index1[i]]:254 myindex1.append(index1[i])255 #myindex=OnlyTheClosest(myindex,target), or other preprocessing method256 myindex2=[]257 for i in range(len(index2)):258 if image_path[n]==originimage[index2[i]]:259 myindex2.append(index2[i])260 myindex3=[]261 for i in range(len(index3)):262 if image_path[n]==originimage[index3[i]]:263 myindex3.append(index3[i])264 myindex4=[]265 for i in range(len(index4)):266 if image_path[n]==originimage[index4[i]]:267 myindex4.append(index4[i])268 if len(myindex1)==0:269 continue270 if len(myindex2)==0:271 continue272 if len(myindex3)==0:273 continue274 if len(myindex4)==0:275 continue276 myindex=myindex1+myindex2+myindex3+myindex4277 original_img=cv2.imread(image_path[n], cv2.IMREAD_UNCHANGED)278 original_feature=predictor.img2feature(original_img)279 occluded_feature=deepcopy(original_feature)280 for i in range(len(myindex)):281 fhi=int(loc_set[myindex[i]][7])282 fwi=int(loc_set[myindex[i]][8])283 284 # Gause occlusion285 occluded_feature[0][fhi][fwi]=0286 occluded_feature[0][fhi-1][fwi]=0.25*occluded_feature[0][fhi-1][fwi]287 occluded_feature[0][fhi][fwi-1]=0.25*occluded_feature[0][fhi][fwi-1]288 occluded_feature[0][fhi+1][fwi]=0.25*occluded_feature[0][fhi+1][fwi]289 occluded_feature[0][fhi][fwi+1]=0.25*occluded_feature[0][fhi][fwi+1]290 occluded_feature[0][fhi-1][fwi-1]=0.375*occluded_feature[0][fhi-1][fwi-1]291 occluded_feature[0][fhi+1][fwi-1]=0.375*occluded_feature[0][fhi+1][fwi-1]292 occluded_feature[0][fhi+1][fwi+1]=0.375*occluded_feature[0][fhi+1][fwi+1]...
DragFeature.js
Source:DragFeature.js
1/* Copyright (c) 2006-2013 by OpenLayers Contributors (see authors.txt for2 * full list of contributors). Published under the 2-clause BSD license.3 * See license.txt in the OpenLayers distribution or repository for the4 * full text of the license. */5/**6 * @requires OpenLayers/Control.js7 * @requires OpenLayers/Handler/Drag.js8 * @requires OpenLayers/Handler/Feature.js9 */10/**11 * Class: OpenLayers.Control.DragFeature12 * The DragFeature control moves a feature with a drag of the mouse. Create a13 * new control with the <OpenLayers.Control.DragFeature> constructor.14 *15 * Inherits From:16 * - <OpenLayers.Control>17 */18OpenLayers.Control.DragFeature = OpenLayers.Class(OpenLayers.Control, {19 /**20 * APIProperty: geometryTypes21 * {Array(String)} To restrict dragging to a limited set of geometry types,22 * send a list of strings corresponding to the geometry class names.23 */24 geometryTypes: null,25 26 /**27 * APIProperty: onStart28 * {Function} Define this function if you want to know when a drag starts.29 * The function should expect to receive two arguments: the feature30 * that is about to be dragged and the pixel location of the mouse.31 *32 * Parameters:33 * feature - {<OpenLayers.Feature.Vector>} The feature that is about to be34 * dragged.35 * pixel - {<OpenLayers.Pixel>} The pixel location of the mouse.36 */37 onStart: function(feature, pixel) {},38 /**39 * APIProperty: onDrag40 * {Function} Define this function if you want to know about each move of a41 * feature. The function should expect to receive two arguments: the42 * feature that is being dragged and the pixel location of the mouse.43 *44 * Parameters:45 * feature - {<OpenLayers.Feature.Vector>} The feature that was dragged.46 * pixel - {<OpenLayers.Pixel>} The pixel location of the mouse.47 */48 onDrag: function(feature, pixel) {},49 /**50 * APIProperty: onComplete51 * {Function} Define this function if you want to know when a feature is52 * done dragging. The function should expect to receive two arguments:53 * the feature that is being dragged and the pixel location of the54 * mouse.55 *56 * Parameters:57 * feature - {<OpenLayers.Feature.Vector>} The feature that was dragged.58 * pixel - {<OpenLayers.Pixel>} The pixel location of the mouse.59 */60 onComplete: function(feature, pixel) {},61 /**62 * APIProperty: onEnter63 * {Function} Define this function if you want to know when the mouse64 * goes over a feature and thereby makes this feature a candidate65 * for dragging.66 *67 * Parameters:68 * feature - {<OpenLayers.Feature.Vector>} The feature that is ready69 * to be dragged.70 */71 onEnter: function(feature) {},72 /**73 * APIProperty: onLeave74 * {Function} Define this function if you want to know when the mouse75 * goes out of the feature that was dragged.76 *77 * Parameters:78 * feature - {<OpenLayers.Feature.Vector>} The feature that was dragged.79 */80 onLeave: function(feature) {},81 /**82 * APIProperty: documentDrag83 * {Boolean} If set to true, mouse dragging will continue even if the84 * mouse cursor leaves the map viewport. Default is false.85 */86 documentDrag: false,87 88 /**89 * Property: layer90 * {<OpenLayers.Layer.Vector>}91 */92 layer: null,93 94 /**95 * Property: feature96 * {<OpenLayers.Feature.Vector>}97 */98 feature: null,99 /**100 * Property: dragCallbacks101 * {Object} The functions that are sent to the drag handler for callback.102 */103 dragCallbacks: {},104 /**105 * Property: featureCallbacks106 * {Object} The functions that are sent to the feature handler for callback.107 */108 featureCallbacks: {},109 110 /**111 * Property: lastPixel112 * {<OpenLayers.Pixel>}113 */114 lastPixel: null,115 /**116 * Constructor: OpenLayers.Control.DragFeature117 * Create a new control to drag features.118 *119 * Parameters:120 * layer - {<OpenLayers.Layer.Vector>} The layer containing features to be121 * dragged.122 * options - {Object} Optional object whose properties will be set on the123 * control.124 */125 initialize: function(layer, options) {126 OpenLayers.Control.prototype.initialize.apply(this, [options]);127 this.layer = layer;128 this.handlers = {129 drag: new OpenLayers.Handler.Drag(130 this, OpenLayers.Util.extend({131 down: this.downFeature,132 move: this.moveFeature,133 up: this.upFeature,134 out: this.cancel,135 done: this.doneDragging136 }, this.dragCallbacks), {137 documentDrag: this.documentDrag138 }139 ),140 feature: new OpenLayers.Handler.Feature(141 this, this.layer, OpenLayers.Util.extend({142 // 'click' and 'clickout' callback are for the mobile143 // support: no 'over' or 'out' in touch based browsers.144 click: this.clickFeature,145 clickout: this.clickoutFeature,146 over: this.overFeature,147 out: this.outFeature148 }, this.featureCallbacks),149 {geometryTypes: this.geometryTypes}150 )151 };152 },153 /**154 * Method: clickFeature155 * Called when the feature handler detects a click-in on a feature.156 *157 * Parameters:158 * feature - {<OpenLayers.Feature.Vector>}159 */160 clickFeature: function(feature) {161 if (this.handlers.feature.touch && !this.over && this.overFeature(feature)) {162 this.handlers.drag.dragstart(this.handlers.feature.evt);163 // to let the events propagate to the feature handler (click callback)164 this.handlers.drag.stopDown = false;165 }166 },167 /**168 * Method: clickoutFeature169 * Called when the feature handler detects a click-out on a feature.170 *171 * Parameters:172 * feature - {<OpenLayers.Feature.Vector>}173 */174 clickoutFeature: function(feature) {175 if (this.handlers.feature.touch && this.over) {176 this.outFeature(feature);177 this.handlers.drag.stopDown = true;178 }179 },180 /**181 * APIMethod: destroy182 * Take care of things that are not handled in superclass183 */184 destroy: function() {185 this.layer = null;186 OpenLayers.Control.prototype.destroy.apply(this, []);187 },188 /**189 * APIMethod: activate190 * Activate the control and the feature handler.191 * 192 * Returns:193 * {Boolean} Successfully activated the control and feature handler.194 */195 activate: function() {196 return (this.handlers.feature.activate() &&197 OpenLayers.Control.prototype.activate.apply(this, arguments));198 },199 /**200 * APIMethod: deactivate201 * Deactivate the control and all handlers.202 * 203 * Returns:204 * {Boolean} Successfully deactivated the control.205 */206 deactivate: function() {207 // the return from the handlers is unimportant in this case208 this.handlers.drag.deactivate();209 this.handlers.feature.deactivate();210 this.feature = null;211 this.dragging = false;212 this.lastPixel = null;213 OpenLayers.Element.removeClass(214 this.map.viewPortDiv, this.displayClass + "Over"215 );216 return OpenLayers.Control.prototype.deactivate.apply(this, arguments);217 },218 /**219 * Method: overFeature220 * Called when the feature handler detects a mouse-over on a feature.221 * This activates the drag handler.222 *223 * Parameters:224 * feature - {<OpenLayers.Feature.Vector>} The selected feature.225 *226 * Returns:227 * {Boolean} Successfully activated the drag handler.228 */229 overFeature: function(feature) {230 var activated = false;231 if(!this.handlers.drag.dragging) {232 this.feature = feature;233 this.handlers.drag.activate();234 activated = true;235 this.over = true;236 OpenLayers.Element.addClass(this.map.viewPortDiv, this.displayClass + "Over");237 this.onEnter(feature);238 } else {239 if(this.feature.id == feature.id) {240 this.over = true;241 } else {242 this.over = false;243 }244 }245 return activated;246 },247 /**248 * Method: downFeature249 * Called when the drag handler detects a mouse-down.250 *251 * Parameters:252 * pixel - {<OpenLayers.Pixel>} Location of the mouse event.253 */254 downFeature: function(pixel) {255 this.lastPixel = pixel;256 this.onStart(this.feature, pixel);257 },258 /**259 * Method: moveFeature260 * Called when the drag handler detects a mouse-move. Also calls the261 * optional onDrag method.262 * 263 * Parameters:264 * pixel - {<OpenLayers.Pixel>} Location of the mouse event.265 */266 moveFeature: function(pixel) {267 var res = this.map.getResolution();268 this.feature.geometry.move(res * (pixel.x - this.lastPixel.x),269 res * (this.lastPixel.y - pixel.y));270 this.layer.drawFeature(this.feature);271 this.lastPixel = pixel;272 this.onDrag(this.feature, pixel);273 },274 /**275 * Method: upFeature276 * Called when the drag handler detects a mouse-up.277 * 278 * Parameters:279 * pixel - {<OpenLayers.Pixel>} Location of the mouse event.280 */281 upFeature: function(pixel) {282 if(!this.over) {283 this.handlers.drag.deactivate();284 }285 },286 /**287 * Method: doneDragging288 * Called when the drag handler is done dragging.289 *290 * Parameters:291 * pixel - {<OpenLayers.Pixel>} The last event pixel location. If this event292 * came from a mouseout, this may not be in the map viewport.293 */294 doneDragging: function(pixel) {295 this.onComplete(this.feature, pixel);296 },297 /**298 * Method: outFeature299 * Called when the feature handler detects a mouse-out on a feature.300 *301 * Parameters:302 * feature - {<OpenLayers.Feature.Vector>} The feature that the mouse left.303 */304 outFeature: function(feature) {305 if(!this.handlers.drag.dragging) {306 this.over = false;307 this.handlers.drag.deactivate();308 OpenLayers.Element.removeClass(309 this.map.viewPortDiv, this.displayClass + "Over"310 );311 this.onLeave(feature);312 this.feature = null;313 } else {314 if(this.feature.id == feature.id) {315 this.over = false;316 }317 }318 },319 320 /**321 * Method: cancel322 * Called when the drag handler detects a mouse-out (from the map viewport).323 */324 cancel: function() {325 this.handlers.drag.deactivate();326 this.over = false;327 },328 /**329 * Method: setMap330 * Set the map property for the control and all handlers.331 *332 * Parameters: 333 * map - {<OpenLayers.Map>} The control's map.334 */335 setMap: function(map) {336 this.handlers.drag.setMap(map);337 this.handlers.feature.setMap(map);338 OpenLayers.Control.prototype.setMap.apply(this, arguments);339 },340 CLASS_NAME: "OpenLayers.Control.DragFeature"...
ToolboxView.js
Source:ToolboxView.js
1define(function (require) {2 var featureManager = require('./featureManager');3 var zrUtil = require('zrender/core/util');4 var graphic = require('../../util/graphic');5 var Model = require('../../model/Model');6 var DataDiffer = require('../../data/DataDiffer');7 var listComponentHelper = require('../helper/listComponent');8 var textContain = require('zrender/contain/text');9 return require('../../echarts').extendComponentView({10 type: 'toolbox',11 render: function (toolboxModel, ecModel, api) {12 var group = this.group;13 group.removeAll();14 if (!toolboxModel.get('show')) {15 return;16 }17 var itemSize = +toolboxModel.get('itemSize');18 var featureOpts = toolboxModel.get('feature') || {};19 var features = this._features || (this._features = {});20 var featureNames = [];21 zrUtil.each(featureOpts, function (opt, name) {22 featureNames.push(name);23 });24 (new DataDiffer(this._featureNames || [], featureNames))25 .add(process)26 .update(process)27 .remove(zrUtil.curry(process, null))28 .execute();29 // Keep for diff.30 this._featureNames = featureNames;31 function process(newIndex, oldIndex) {32 var featureName = featureNames[newIndex];33 var oldName = featureNames[oldIndex];34 var featureOpt = featureOpts[featureName];35 var featureModel = new Model(featureOpt, toolboxModel, toolboxModel.ecModel);36 var feature;37 if (featureName && !oldName) { // Create38 if (isUserFeatureName(featureName)) {39 feature = {40 model: featureModel,41 onclick: featureModel.option.onclick,42 featureName: featureName43 };44 }45 else {46 var Feature = featureManager.get(featureName);47 if (!Feature) {48 return;49 }50 feature = new Feature(featureModel);51 }52 features[featureName] = feature;53 }54 else {55 feature = features[oldName];56 // If feature does not exsit.57 if (!feature) {58 return;59 }60 feature.model = featureModel;61 }62 if (!featureName && oldName) {63 feature.dispose && feature.dispose(ecModel, api);64 return;65 }66 if (!featureModel.get('show') || feature.unusable) {67 feature.remove && feature.remove(ecModel, api);68 return;69 }70 createIconPaths(featureModel, feature, featureName);71 featureModel.setIconStatus = function (iconName, status) {72 var option = this.option;73 var iconPaths = this.iconPaths;74 option.iconStatus = option.iconStatus || {};75 option.iconStatus[iconName] = status;76 // FIXME77 iconPaths[iconName] && iconPaths[iconName].trigger(status);78 };79 if (feature.render) {80 feature.render(featureModel, ecModel, api);81 }82 }83 function createIconPaths(featureModel, feature, featureName) {84 var iconStyleModel = featureModel.getModel('iconStyle');85 // If one feature has mutiple icon. they are orginaized as86 // {87 // icon: {88 // foo: '',89 // bar: ''90 // },91 // title: {92 // foo: '',93 // bar: ''94 // }95 // }96 var icons = feature.getIcons ? feature.getIcons() : featureModel.get('icon');97 var titles = featureModel.get('title') || {};98 if (typeof icons === 'string') {99 var icon = icons;100 var title = titles;101 icons = {};102 titles = {};103 icons[featureName] = icon;104 titles[featureName] = title;105 }106 var iconPaths = featureModel.iconPaths = {};107 zrUtil.each(icons, function (icon, iconName) {108 var normalStyle = iconStyleModel.getModel('normal').getItemStyle();109 var hoverStyle = iconStyleModel.getModel('emphasis').getItemStyle();110 var style = {111 x: -itemSize / 2,112 y: -itemSize / 2,113 width: itemSize,114 height: itemSize115 };116 var path = icon.indexOf('image://') === 0117 ? (118 style.image = icon.slice(8),119 new graphic.Image({style: style})120 )121 : graphic.makePath(122 icon.replace('path://', ''),123 {124 style: normalStyle,125 hoverStyle: hoverStyle,126 rectHover: true127 },128 style,129 'center'130 );131 graphic.setHoverStyle(path);132 if (toolboxModel.get('showTitle')) {133 path.__title = titles[iconName];134 path.on('mouseover', function () {135 path.setStyle({136 text: titles[iconName],137 textPosition: hoverStyle.textPosition || 'bottom',138 textFill: hoverStyle.fill || hoverStyle.stroke || '#000',139 textAlign: hoverStyle.textAlign || 'center'140 });141 })142 .on('mouseout', function () {143 path.setStyle({144 textFill: null145 });146 });147 }148 path.trigger(featureModel.get('iconStatus.' + iconName) || 'normal');149 group.add(path);150 path.on('click', zrUtil.bind(151 feature.onclick, feature, ecModel, api, iconName152 ));153 iconPaths[iconName] = path;154 });155 }156 listComponentHelper.layout(group, toolboxModel, api);157 // Render background after group is layout158 // FIXME159 listComponentHelper.addBackground(group, toolboxModel);160 // Adjust icon title positions to avoid them out of screen161 group.eachChild(function (icon) {162 var titleText = icon.__title;163 var hoverStyle = icon.hoverStyle;164 // May be background element165 if (hoverStyle && titleText) {166 var rect = textContain.getBoundingRect(167 titleText, hoverStyle.font168 );169 var offsetX = icon.position[0] + group.position[0];170 var offsetY = icon.position[1] + group.position[1] + itemSize;171 var needPutOnTop = false;172 if (offsetY + rect.height > api.getHeight()) {173 hoverStyle.textPosition = 'top';174 needPutOnTop = true;175 }176 var topOffset = needPutOnTop ? (-5 - rect.height) : (itemSize + 8);177 if (offsetX + rect.width / 2 > api.getWidth()) {178 hoverStyle.textPosition = ['100%', topOffset];179 hoverStyle.textAlign = 'right';180 }181 else if (offsetX - rect.width / 2 < 0) {182 hoverStyle.textPosition = [0, topOffset];183 hoverStyle.textAlign = 'left';184 }185 }186 });187 },188 remove: function (ecModel, api) {189 zrUtil.each(this._features, function (feature) {190 feature.remove && feature.remove(ecModel, api);191 });192 this.group.removeAll();193 },194 dispose: function (ecModel, api) {195 zrUtil.each(this._features, function (feature) {196 feature.dispose && feature.dispose(ecModel, api);197 });198 }199 });200 function isUserFeatureName(featureName) {201 return featureName.indexOf('my') === 0;202 }...
Using AI Code Generation
1var wpt = require('wpt');2var wpt = new WebPageTest('www.webpagetest.org');3 if (err) return console.error(err);4 console.log(data);5});6var wpt = require('wpt');7var wpt = new WebPageTest('www.webpagetest.org');8 if (err) return console.error(err);9 console.log(data);10});11var wpt = require('wpt');12var wpt = new WebPageTest('www.webpagetest.org');13 if (err) return console.error(err);14 console.log(data);15});16var wpt = require('wpt');17var wpt = new WebPageTest('www.webpagetest.org');18 if (err) return console.error(err);19 console.log(data);20});21var wpt = require('wpt');22var wpt = new WebPageTest('www.webpagetest.org');23 if (err) return console.error(err);24 console.log(data);25});26var wpt = require('wpt');27var wpt = new WebPageTest('www.webpagetest.org');28 if (err) return console.error(err);29 console.log(data);30});31var wpt = require('wpt');32var wpt = new WebPageTest('www.webpagetest.org');33 if (err) return console.error(err);
Using AI Code Generation
1var wpt = require('wpt');2wpt.feature(function(data) {3 console.log(data);4});5var wpt = require('wpt');6wpt.feature(function(data) {7 console.log(data);8});9var wpt = require('wpt');10wpt.feature(function(data) {11 console.log(data);12});13var wpt = require('wpt');14wpt.feature(function(data) {15 console.log(data);16});17var wpt = require('wpt');18wpt.feature(function(data) {19 console.log(data);20});21var wpt = require('wpt');22wpt.feature(function(data) {23 console.log(data);24});25var wpt = require('wpt');26wpt.feature(function(data) {27 console.log(data);28});29var wpt = require('wpt');30wpt.feature(function(data) {31 console.log(data);32});33var wpt = require('wpt');34wpt.feature(function(data) {35 console.log(data);36});37var wpt = require('wpt');38wpt.feature(function(data) {39 console.log(data);40});41var wpt = require('wpt');42wpt.feature(function(data) {43 console.log(data);44});45var wpt = require('wpt');46wpt.feature(function(data) {47 console.log(data);48});49var wpt = require('wpt');50wpt.feature(function(data) {51 console.log(data);52});
Using AI Code Generation
1var wpt = require('webpagetest');2var test = wpt('APIKEY');3 if (err) return console.error(err);4 test.getTestResults(data.data.testId, function(err, data) {5 if (err) return console.error(err);6 console.log(data);7 });8});9 at Request._callback (/home/ubuntu/node_modules/webpagetest/lib/webpagetest.js:81:7)10 at Request.self.callback (/home/ubuntu/node_modules/webpagetest/node_modules/request/request.js:123:22)11 at Request.emit (events.js:107:17)12 at Request.<anonymous> (/home/ubuntu/node_modules/webpagetest/node_modules/request/request.js:1047:14)13 at Request.emit (events.js:107:17)14 at IncomingMessage.<anonymous> (/home/ubuntu/node_modules/webpagetest/node_modules/request/request.js:986:12)15 at IncomingMessage.emit (events.js:107:17)16 at process._tickCallback (node.js:415:13)
Using AI Code Generation
1var wptools = require('wptools');2var page = wptools.page('Albert Einstein');3page.get(function(err, resp) {4 console.log(resp);5});6var wptools = require('wptools');7var page = wptools.page('Albert Einstein');8page.get(function(err, resp) {9 console.log(resp);10});11var wptools = require('wptools');12var page = wptools.page('Albert Einstein');13page.get(function(err, resp) {14 console.log(resp);15});16var wptools = require('wptools');17var page = wptools.page('Albert Einstein');18page.get(function(err, resp) {19 console.log(resp);20});21var wptools = require('wptools');22var page = wptools.page('Albert Einstein');23page.get(function(err, resp) {24 console.log(resp);25});26var wptools = require('wptools');27var page = wptools.page('Albert Einstein');28page.get(function(err, resp) {29 console.log(resp);30});31var wptools = require('wptools');32var page = wptools.page('Albert Einstein');33page.get(function(err, resp) {34 console.log(resp);35});36var wptools = require('wptools');37var page = wptools.page('Albert Einstein');38page.get(function(err, resp) {39 console.log(resp);40});41var wptools = require('wptools');42var page = wptools.page('Albert Einstein');43page.get(function(err, resp) {44 console.log(resp);45});46var wptools = require('wptools');47var page = wptools.page('Albert
Using AI Code Generation
1var wptools = require('wptools');2var feature = wptools.feature('London');3feature.get(function(err, resp) {4 console.log("err: " + err);5 console.log("resp: " + resp);6});7var wptools = require('wptools');8var filePath = path.join(directoryPath, fileName);9var filePath = directoryPath + fileName;10var filePath = path.join(directoryPath + fileName);11var filePath = directoryPath + "/" + fileName;12var filePath = path.join(directoryPath, "/", fileName);13var filePath = path.join(directoryPath, "\\", fileName);14var filePath = path.join(directoryPath, "\\", fileName);15var filePath = path.join(directoryPath, "\\", fileName);16var filePath = path.join(directoryPath, "\\", fileName);
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!