Best Python code snippet using localstack_python
MysqlApi.py
Source:MysqlApi.py
1"""2# -*- coding: utf-8 -*-3# ===============================================================================4#5# Copyright (C) 2013/2017 Laurent Labatut / Laurent Champagnac6#7#8#9# This program is free software; you can redistribute it and/or10# modify it under the terms of the GNU General Public License11# as published by the Free Software Foundation; either version 212# of the License, or (at your option) any later version.13#14# This program is distributed in the hope that it will be useful,15# but WITHOUT ANY WARRANTY; without even the implied warranty of16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the17# GNU General Public License for more details.18#19# You should have received a copy of the GNU General Public License20# along with this program; if not, write to the Free Software21# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA22# ===============================================================================23"""24# noinspection PyUnresolvedReferences25import ujson26import logging27from contextlib import closing28from gevent.threading import Lock29from pysolmeters.Meters import Meters30from pysolmysql.Pool.mysql_pool import MysqlConnectionPool31logger = logging.getLogger(__name__)32class MysqlApi(object):33 """34 Mysql Api35 """36 # Lock37 POOL_LOCK = Lock()38 # Static pool instances (hash from config dict => MysqlConnectionPool)39 D_POOL_INSTANCES = dict()40 @classmethod41 def reset_pools(cls):42 """43 Reset all pools44 """45 with cls.POOL_LOCK:46 for s_hash, pool in cls.D_POOL_INSTANCES.items():47 logger.info("Closing pool, s_hash=%s", s_hash)48 pool.close_all()49 cls.D_POOL_INSTANCES = dict()50 @classmethod51 def _get_pool_hash(cls, conf_dict):52 """53 Get pool hash54 :param conf_dict: dict55 :type conf_dict: dict56 :return: str57 :rtype: str58 """59 s_hash = str(hash(ujson.dumps(conf_dict, sort_keys=True)))60 return s_hash61 @classmethod62 def _get_pool(cls, conf_dict):63 """64 Init static pool65 :param conf_dict: dict66 :type conf_dict: dict67 :return pysolmysql.Pool.mysql_pool.MysqlConnectionPool68 :rtype pysolmysql.Pool.mysql_pool.MysqlConnectionPool69 """70 # Hash71 s_hash = cls._get_pool_hash(conf_dict)72 # Alloc if needed73 if s_hash not in cls.D_POOL_INSTANCES:74 with cls.POOL_LOCK:75 if s_hash not in cls.D_POOL_INSTANCES:76 cls.D_POOL_INSTANCES[s_hash] = MysqlConnectionPool(conf_dict)77 logger.info("Allocated pool, s_hash=%s, pool.len=%s", s_hash, len(cls.D_POOL_INSTANCES))78 Meters.aii("k.db_pool.hash.cur")79 # Over80 return cls.D_POOL_INSTANCES[s_hash]81 @classmethod82 def _fix_type(cls, data):83 """84 Fix type85 :param data: data86 """87 if isinstance(data, bytearray):88 return data.decode("utf-8")89 else:90 return data91 @classmethod92 def exec_0(cls, conf_dict, statement):93 """94 Execute a sql statement, returning row affected.95 :param conf_dict: configuration dict96 :type conf_dict: dict97 :param statement: statement to execute98 :type statement: str99 :rtype: int100 :return rows affected101 """102 cnx = None103 try:104 cnx = cls._get_pool(conf_dict).connection_acquire()105 with closing(cnx.cursor()) as cur:106 cur.execute(statement)107 return cur.rowcount108 finally:109 cls._get_pool(conf_dict).connection_release(cnx)110 @classmethod111 def exec_n(cls, conf_dict, statement, fix_types=True):112 """113 Execute a sql statement, returning 0..N rows114 :param conf_dict: configuration dict115 :type conf_dict: dict116 :param statement: statement to execute117 :type statement: str118 :param fix_types: If true, fix data type119 :type fix_types: bool120 :return list of dict.121 :rtype list122 """123 cnx = None124 try:125 cnx = cls._get_pool(conf_dict).connection_acquire()126 with closing(cnx.cursor()) as cur:127 cur.execute(statement)128 rows = cur.fetchall()129 for row in rows:130 logger.debug("row=%s", row)131 for k, v in row.items():132 logger.debug("k=%s, %s, %s", k, type(v), v)133 if fix_types:134 row[k] = MysqlApi._fix_type(v)135 return rows136 finally:137 cls._get_pool(conf_dict).connection_release(cnx)138 @classmethod139 def exec_1(cls, conf_dict, statement, fix_types=True):140 """141 Execute a sql statement, returning 1 row.142 Method will fail if 1 row is not returned.143 :rtype: object144 :param conf_dict: configuration dict145 :type conf_dict: dict146 :param statement: statement to execute147 :type statement: str148 :param fix_types: If true, fix data type149 :type fix_types: bool150 :return dict151 :rtype dict152 """153 cnx = None154 try:155 cnx = cls._get_pool(conf_dict).connection_acquire()156 with closing(cnx.cursor()) as cur:157 cur.execute(statement)158 rows = cur.fetchall()159 for row in rows:160 logger.debug("row=%s", row)161 for k, v in row.items():162 logger.debug("k=%s, %s, %s", k, type(v), v)163 if fix_types:164 row[k] = MysqlApi._fix_type(v)165 if len(rows) != 1:166 raise Exception("Invalid row len, expecting 1, having={0}".format(len(rows)))167 return rows[0]168 finally:169 cls._get_pool(conf_dict).connection_release(cnx)170 @classmethod171 def exec_01(cls, conf_dict, statement, fix_types=True):172 """173 Execute a sql statement, returning 0 or 1 row.174 Method will fail if 0 or 1 row is not returned.175 :param conf_dict: configuration dict176 :type conf_dict: dict177 :param statement: statement to execute178 :type statement: str179 :param fix_types: If true, fix data type180 :type fix_types: bool181 :return dict, None182 :rtype dict, None183 """184 cnx = None185 try:186 cnx = cls._get_pool(conf_dict).connection_acquire()187 with closing(cnx.cursor()) as cur:188 cur.execute(statement)189 rows = cur.fetchall()190 for row in rows:191 logger.debug("row=%s", row)192 for k, v in row.items():193 logger.debug("k=%s, %s, %s", k, type(v), v)194 if fix_types:195 row[k] = MysqlApi._fix_type(v)196 if len(rows) == 0:197 return None198 elif len(rows) != 1:199 raise Exception("Invalid row len, expecting 1, having={0}".format(len(rows)))200 else:201 return rows[0]202 finally:203 cls._get_pool(conf_dict).connection_release(cnx)204 @classmethod205 def multi_n(cls, conf_dict, ar_statement):206 """207 Execute multiple sql statement, reading nothing from mysql.208 :type conf_dict: dict209 :param ar_statement: list of statements to execute (for instance, batch of insert or whatever)210 :type ar_statement: list211 """212 cnx = None213 try:214 cnx = cls._get_pool(conf_dict).connection_acquire()215 with closing(cnx.cursor()) as cur:216 for s in ar_statement:217 cur.execute(s)218 finally:...
jsonrenderer.py
Source:jsonrenderer.py
1"""2Outputs JSON representation of data.3 4"""5# Python imports6import glujson as json7# Glu imports8from glu.render.baserenderer import BaseRenderer9from glu.platform_specifics import *10from glu.core.util import Url11def _default(obj):12 """13 Take a non-standard data type and return its string representation.14 15 This function is given to simplejson as a fall back for any types16 that it doesn't know how to render.17 18 @param obj: A non-standard object to be rendered for JSON.19 @type obj: object20 21 @return: String representation suitable for JSON.22 23 """24 return str(obj)25def _recursive_type_fixer(obj):26 """27 Convert unusual types to strings in recursive structures.28 Under GAE we cannot specify a default for the JSON encoder,29 which is very annoying. So this method is only called when30 we are running under GAE. It traverses the entire data structure31 and converts the types specified in FIX_TYPES to strings.32 """33 FIX_TYPES = [ Url ]34 if type(obj) in FIX_TYPES:35 return str(obj)36 if type(obj) is list:37 new_list = []38 for e in obj:39 new_list.append(_recursive_type_fixer(e))40 return new_list41 if type(obj) is dict:42 new_dict = {}43 for k, v in obj.items():44 if type(k) in FIX_TYPES:45 k = str(k)46 if type(v) in FIX_TYPES:47 v = str(v)48 else:49 v = _recursive_type_fixer(v)50 new_dict[k] = v51 return new_dict52 return obj53 54class JsonRenderer(BaseRenderer):55 """56 Class to render data as JSON.57 58 """59 CONTENT_TYPE = "application/json"60 def render(self, data, top_level=False):61 """62 Render the provided data for output.63 64 @param data: An object containing the data to be rendered.65 @param data: object66 67 @param top_level: Flag indicating whether this we are at the68 top level for output (this function is called69 recursively and therefore may not always find70 itself at the top level). This is important for71 some renderers, since they can insert any framing72 elements that might be required at the top level.73 However, for the JSON renderer this is just74 ignored.75 @param top_level: boolean76 77 @return: Output buffer with completed representation.78 @rtype: string79 80 """81 # simplejson can only handle some of the base Python datatypes.82 # Since we also have other types in the output dictionaries (URIs83 # for example), we need to provide a 'default' method, which84 # simplejson calls in case it doesn't know what to do.85 # Need to use our newly defined Url encoder, since otherwise86 # json wouldn't know how to encode a URL87 if PLATFORM == PLATFORM_GAE:88 # That doesn't seem to be supported when running in89 # GAE, though. So, in that case we first perform a very90 # manual fixup of the object, replacing all occurrances91 # of unusual types with their string representations.92 data = _recursive_type_fixer(data)93 out = json.dumps(data, sort_keys=True, indent=4)94 else:95 out = json.dumps(data, default=_default, sort_keys=True, indent=4)...
add_features.py
Source:add_features.py
2import pandas as pd3import os4from db import DB5#===============================================GET DATA===============================================6def fix_types(df, names, types):7 for i in range(len(names)):8 name = names[i]9 t = types[i]10 df[name] = df[name].astype(t)11 return df12raw_x = pd.read_csv('data/data_x.csv') #id, time, x, y, covs13raw_xs = pd.read_csv('data/data_xs.csv') #id, time, x, y, covs14raw_grid_xs = pd.read_csv('data/data_x_test_grid.csv') #id, time, x, y, covs15raw_fine_grid_xs = pd.read_csv('data/data_x_test_fine_grid.csv') #id, time, x, y, covs16sat_x = pd.read_csv('data/sat_data_x.csv') #id, time, x, y, covs17column_names = ['src', 'id', 'datetime', 'epoch', 'lat', 'lon', 'val']18column_types = [np.int, np.int, np.str, np.int, np.float64, np.float64, np.float64]19raw_x = fix_types(raw_x, column_names, column_types)20raw_xs = fix_types(raw_xs, column_names, column_types)21raw_grid_xs = fix_types(raw_grid_xs, column_names, column_types)22raw_fine_grid_xs = fix_types(raw_fine_grid_xs, column_names, column_types)23sat_x = fix_types(sat_x, column_names, column_types)24total_df = pd.concat([raw_x, raw_xs, raw_grid_xs, raw_fine_grid_xs, sat_x], axis=0)25#===============================================INSERT TO DB===============================================26db = DB(name='postgis_test', connect=True)27SCHEMA = 'orca'28LOCATION_TABLE_NAME = 'nips_locations'29schema = """30drop table if exists {schema}.{table};31create table {schema}.{table} (32 src integer,33 id integer,34 datetime timestamp,35 epoch integer,36 lat double precision,37 lon double precision,...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!