Best Python code snippet using playwright-python
privacy_ledger.py
Source:privacy_ledger.py
1# Copyright 2019 The TensorFlow Authors. All Rights Reserved.2#3# Licensed under the Apache License, Version 2.0 (the "License");4# you may not use this file except in compliance with the License.5# You may obtain a copy of the License at6#7# http://www.apache.org/licenses/LICENSE-2.08#9# Unless required by applicable law or agreed to in writing, software10# distributed under the License is distributed on an "AS IS" BASIS,11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.12# See the License for the specific language governing permissions and13# limitations under the License.14"""PrivacyLedger class for keeping a record of private queries.15"""16from __future__ import absolute_import17from __future__ import division18from __future__ import print_function19import collections20import tensorflow as tf21from privacy.analysis import tensor_buffer22from privacy.optimizers import dp_query23nest = tf.contrib.framework.nest24SampleEntry = collections.namedtuple( # pylint: disable=invalid-name25 'SampleEntry', ['population_size', 'selection_probability', 'queries'])26GaussianSumQueryEntry = collections.namedtuple( # pylint: disable=invalid-name27 'GaussianSumQueryEntry', ['l2_norm_bound', 'noise_stddev'])28class PrivacyLedger(object):29 """Class for keeping a record of private queries.30 The PrivacyLedger keeps a record of all queries executed over a given dataset31 for the purpose of computing privacy guarantees.32 """33 def __init__(34 self,35 population_size,36 selection_probability,37 max_samples,38 max_queries):39 """Initialize the PrivacyLedger.40 Args:41 population_size: An integer (may be variable) specifying the size of the42 population.43 selection_probability: A float (may be variable) specifying the44 probability each record is included in a sample.45 max_samples: The maximum number of samples. An exception is thrown if46 more than this many samples are recorded.47 max_queries: The maximum number of queries. An exception is thrown if48 more than this many queries are recorded.49 """50 self._population_size = population_size51 self._selection_probability = selection_probability52 # The query buffer stores rows corresponding to GaussianSumQueryEntries.53 self._query_buffer = tensor_buffer.TensorBuffer(54 max_queries, [3], tf.float32, 'query')55 self._sample_var = tf.Variable(56 initial_value=tf.zeros([3]), trainable=False, name='sample')57 # The sample buffer stores rows corresponding to SampleEntries.58 self._sample_buffer = tensor_buffer.TensorBuffer(59 max_samples, [3], tf.float32, 'sample')60 self._sample_count = tf.Variable(61 initial_value=0.0, trainable=False, name='sample_count')62 self._query_count = tf.Variable(63 initial_value=0.0, trainable=False, name='query_count')64 try:65 # Newer versions of TF66 self._cs = tf.CriticalSection()67 except AttributeError:68 # Older versions of TF69 self._cs = tf.contrib.framework.CriticalSection()70 def record_sum_query(self, l2_norm_bound, noise_stddev):71 """Records that a query was issued.72 Args:73 l2_norm_bound: The maximum l2 norm of the tensor group in the query.74 noise_stddev: The standard deviation of the noise applied to the sum.75 Returns:76 An operation recording the sum query to the ledger.77 """78 def _do_record_query():79 with tf.control_dependencies([80 tf.assign(self._query_count, self._query_count + 1)]):81 return self._query_buffer.append(82 [self._sample_count, l2_norm_bound, noise_stddev])83 return self._cs.execute(_do_record_query)84 def finalize_sample(self):85 """Finalizes sample and records sample ledger entry."""86 with tf.control_dependencies([87 tf.assign(88 self._sample_var,89 [self._population_size,90 self._selection_probability,91 self._query_count])]):92 with tf.control_dependencies([93 tf.assign(self._sample_count, self._sample_count + 1),94 tf.assign(self._query_count, 0)]):95 return self._sample_buffer.append(self._sample_var)96 def _format_ledger(self, sample_array, query_array):97 """Converts underlying representation into a list of SampleEntries."""98 samples = []99 query_pos = 0100 sample_pos = 0101 for sample in sample_array:102 num_queries = int(sample[2])103 queries = []104 for _ in range(num_queries):105 query = query_array[query_pos]106 assert int(query[0]) == sample_pos107 queries.append(GaussianSumQueryEntry(*query[1:]))108 query_pos += 1109 samples.append(SampleEntry(sample[0], sample[1], queries))110 sample_pos += 1111 return samples112 def get_formatted_ledger(self, sess):113 """Gets the formatted query ledger.114 Args:115 sess: The tensorflow session in which the ledger was created.116 Returns:117 The query ledger as a list of SampleEntries.118 """119 sample_array = sess.run(self._sample_buffer.values)120 query_array = sess.run(self._query_buffer.values)121 return self._format_ledger(sample_array, query_array)122 def get_formatted_ledger_eager(self):123 """Gets the formatted query ledger.124 Returns:125 The query ledger as a list of SampleEntries.126 """127 sample_array = self._sample_buffer.values.numpy()128 query_array = self._query_buffer.values.numpy()129 return self._format_ledger(sample_array, query_array)130class QueryWithLedger(dp_query.DPQuery):131 """A class for DP queries that record events to a PrivacyLedger.132 QueryWithLedger should be the top-level query in a structure of queries that133 may include sum queries, nested queries, etc. It should simply wrap another134 query and contain a reference to the ledger. Any contained queries (including135 those contained in the leaves of a nested query) should also contain a136 reference to the same ledger object.137 For example usage, see privacy_ledger_test.py.138 """139 def __init__(self, query, ledger):140 """Initializes the QueryWithLedger.141 Args:142 query: The query whose events should be recorded to the ledger. Any143 subqueries (including those in the leaves of a nested query) should144 also contain a reference to the same ledger given here.145 ledger: A PrivacyLedger to which privacy events should be recorded.146 """147 self._query = query148 self._ledger = ledger149 def initial_global_state(self):150 """See base class."""151 return self._query.initial_global_state()152 def derive_sample_params(self, global_state):153 """See base class."""154 return self._query.derive_sample_params(global_state)155 def initial_sample_state(self, global_state, tensors):156 """See base class."""157 return self._query.initial_sample_state(global_state, tensors)158 def accumulate_record(self, params, sample_state, record):159 """See base class."""160 return self._query.accumulate_record(params, sample_state, record)161 def get_noised_result(self, sample_state, global_state):162 """Ensures sample is recorded to the ledger and returns noised result."""163 with tf.control_dependencies(nest.flatten(sample_state)):164 with tf.control_dependencies([self._ledger.finalize_sample()]):...
context.py
Source:context.py
1# Copyright 2016 Google LLC. All Rights Reserved.2#3# Licensed under the Apache License, Version 2.0 (the "License");4# you may not use this file except in compliance with the License.5# You may obtain a copy of the License at6#7# http://www.apache.org/licenses/LICENSE-2.08#9# Unless required by applicable law or agreed to in writing, software10# distributed under the License is distributed on an "AS IS" BASIS,11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.12# See the License for the specific language governing permissions and13# limitations under the License.14import collections15import time16import flask17from sqlalchemy import event18from scoreboard import main19from scoreboard import models20from scoreboard import utils21app = main.get_app()22DEFAULT_CSP_POLICY = {23 'default-src': ["'self'"],24 'script-src': [25 "'self'",26 "'unsafe-eval'", # Needed for Charts.js27 ],28 'img-src': [29 "'self'",30 'data:',31 ],32 'object-src': ["'none'"],33 'font-src': [34 "'self'",35 'fonts.gstatic.com',36 ],37 'style-src': [38 "'self'",39 'fonts.googleapis.com',40 "'unsafe-inline'", # Needed for Charts.js41 ],42 }43_CSP_POLICY_STRING = None44def get_csp_policy():45 global _CSP_POLICY_STRING46 if _CSP_POLICY_STRING is not None:47 return _CSP_POLICY_STRING48 if app.config.get('CSP_POLICY'):49 policy = app.config.get('CSP_POLICY')50 elif app.config.get('EXTEND_CSP_POLICY'):51 policy = collections.defaultdict(list)52 for k, v in DEFAULT_CSP_POLICY.items():53 policy[k] = v54 for k, v in app.config.get('EXTEND_CSP_POLICY').items():55 policy[k].extend(v)56 else:57 policy = DEFAULT_CSP_POLICY58 components = []59 for k, v in policy.items():60 sources = ' '.join(v)61 components.append(k + ' ' + sources)62 _CSP_POLICY_STRING = '; '.join(components)63 return _CSP_POLICY_STRING64# Setup flask.g65@app.before_request66def load_globals():67 """Prepopulate flask.g.* with properties."""68 try:69 del flask.g.user70 except AttributeError:71 pass72 try:73 del flask.g.team74 except AttributeError:75 pass76 if load_apikey():77 return78 if (app.config.get('SESSION_EXPIRATION_SECONDS') and79 flask.session.get('expires') and80 flask.session.get('expires') < time.time()):81 flask.session.clear()82 flask.g.uid = flask.session.get('user')83 flask.g.tid = flask.session.get('team')84 flask.g.admin = flask.session.get('admin') or False85def load_apikey():86 """Load flask.g.user, flask.g.uid from an API key."""87 try:88 key = flask.request.headers.get('X-SCOREBOARD-API-KEY')89 if not key or len(key) != 32:90 return91 user = models.User.get_by_api_key(key)92 if not user:93 return94 flask.g.user = user95 flask.g.uid = user.uid96 flask.g.admin = user.admin97 flask.g.tid = None98 return True99 except Exception:100 # Don't want any API key problems to block requests101 pass102# Add headers to responses103@app.after_request104def add_headers(response):105 """Add security-related headers to all outgoing responses."""106 h = response.headers107 h.setdefault('Content-Security-Policy', get_csp_policy())108 h.setdefault('X-Frame-Options', 'DENY')109 h.add('X-XSS-Protection', '1', mode='block')110 return response111@app.context_processor112def util_contexts():113 return dict(gametime=utils.GameTime)114_query_count = 0115if app.config.get('COUNT_QUERIES'):116 @event.listens_for(models.db.engine, 'before_cursor_execute')117 def receive_before_cursor_execute(118 conn, cursor, statement, parameters, context, executemany):119 global _query_count120 _query_count += 1121 @app.after_request122 def count_queries(response):123 global _query_count124 if _query_count > 0:125 app.logger.info('Request issued %d queries.', _query_count)126 _query_count = 0127 return response128def ensure_setup():129 if not app:...
query_delegate.py
Source:query_delegate.py
1# Copyright 2016 The Chromium OS Authors. All rights reserved.2# Use of this source code is governed by a BSD-style license that can be3# found in the LICENSE file.4"""Feedback query delegate interfaces and implementation registry."""5import multiprocessing6import common7from autotest_lib.client.common_lib.feedback import client8# Mapping of query identifiers to delegate classes.9_query_delegate_registry = {}10class _QueryDelegate(object):11 """A base class for query delegates."""12 _query_count = multiprocessing.Value('d', 0)13 def __init__(self, test, dut, multiplexer, atomic=True):14 """Constructs the delegate.15 @param test: The name of the test.16 @param dut: The name of the DUT.17 @param multiplexer: Feedback request multiplexer object.18 @param atomic: Whether this is an atomic query.19 """20 super(_QueryDelegate, self).__init__()21 self.test = test22 self.dut = dut23 self._multiplexer = multiplexer24 self._atomic = atomic25 # Assign a unique query number.26 with self._query_count.get_lock():27 self._query_num = self._query_count.value28 self._query_count.value += 129 def _process_request(self, req):30 """Submits a given request to the multiplexer for processing."""31 return self._multiplexer.process_request(req, self._query_num,32 self._atomic)33 def prepare(self, **kwargs):34 """Delegate for a query's prepare() method."""35 return self._prepare_impl(**kwargs)36 def _prepare_impl(self, **kwargs):37 """Concrete implementation of the query's prepare() call."""38 raise NotImplementedError39 def validate(self, **kwargs):40 """Delegate for a query's validate() method.41 This clears the atomic sequence with the multiplexer to make sure it42 isn't blocked waiting for more requests from this query.43 """44 try:45 return self._validate_impl(**kwargs)46 finally:47 if self._atomic:48 self._multiplexer.end_atomic_seq(self._query_num)49 def _validate_impl(self, **kwargs):50 """Concrete implementation of the query's validate() call."""51 raise NotImplementedError52class OutputQueryDelegate(_QueryDelegate):53 """A base class for output query delegates."""54class InputQueryDelegate(_QueryDelegate):55 """A base class for input query delegates."""56 def emit(self):57 """Delegate for an input query's emit() method."""58 return self._emit_impl()59 def _emit_impl(self):60 """Concrete implementation of the query's emit() call."""61 raise NotImplementedError62def register_delegate_cls(query_id, delegate_cls):63 """Registers a delegate class with a given query identifier.64 @param query_id: Query identifier constant.65 @param delegate_cls: The class implementing a delegate for this query.66 """67 _query_delegate_registry[query_id] = delegate_cls68def get_delegate_cls(query_id):69 """Returns a query delegate class for a given query type.70 @param query_id: A query type identifier.71 @return A query delegate class.72 @raise ValueError: Unknown query type.73 @raise NotImplementedError: Query type not supported.74 """75 if query_id not in client.ALL_QUERIES:76 raise ValueError77 if query_id not in _query_delegate_registry:78 raise NotImplementedError...
OracleCache.py
Source:OracleCache.py
1# License: CC-02import hashlib3import json4import contextlib5import requests6class OracleCache():7 def __init__(self, oracle_uri):8 self._uri = oracle_uri9 self._sess = requests.Session()10 self._ora_cachefile = "ora_cache_" + hashlib.md5(oracle_uri.encode()).hexdigest() + ".json"11 try:12 with open(self._ora_cachefile) as f:13 self._cache = json.load(f)14 except (FileNotFoundError, json.decoder.JSONDecodeError):15 self._cache = { }16 self.write_cache()17 self._query_count = 018 @property19 def query_count(self):20 return self._query_count21 def write_cache(self):22 with open(self._ora_cachefile, "w") as f:23 json.dump(self._cache, f, separators = (",", ":"))24 def _execute(self, query):25 self._query_count += 126 response = self._sess.post(self._uri, headers = {27 "Content-Type": "application/json",28 }, data = json.dumps(query))29 if response.status_code == 200:30 response_data = response.json()31 return response_data32 else:33 raise Exception("Oracle responded with Turbogrütze: %s / %s" % (response, response.content))34 def execute(self, query):35 key = json.dumps(query, sort_keys = True, separators = (",", ":"))36 if key in self._cache:37 return self._cache[key]38 response = self._execute(query)39 self._cache[key] = response40 return response41 def __enter__(self):42 return self43 def __exit__(self, *args):44 self.write_cache()45if __name__ == "__main__":46 # This is how you use this. Be careful that you DO NOT hardcode the URI47 # (like shown below), but use the command line argument instead.48 with OracleCache("https://127.0.0.1:5000/oracle/pkcs7_padding") as oc:...
LambdaTest’s Playwright tutorial will give you a broader idea about the Playwright automation framework, its unique features, and use cases with examples to exceed your understanding of Playwright testing. This tutorial will give A to Z guidance, from installing the Playwright framework to some best practices and advanced concepts.
Get 100 minutes of automation test minutes FREE!!