How to use _supported_types method in pandera

Best Python code snippet using pandera_python

check_utils.py

Source:check_utils.py Github

copy

Full Screen

...11 ("multiindex_types", Tuple[type, ...]),12 ),13)14@lru_cache(maxsize=None)15def _supported_types():16 # pylint: disable=import-outside-toplevel17 table_types = [pd.DataFrame]18 field_types = [pd.Series]19 index_types = [pd.Index]20 multiindex_types = [pd.MultiIndex]21 try:22 import databricks.koalas as ks23 table_types.append(ks.DataFrame)24 field_types.append(ks.Series)25 index_types.append(ks.Index)26 multiindex_types.append(ks.MultiIndex)27 except ImportError:28 pass29 try: # pragma: no cover30 import modin.pandas as mpd31 table_types.append(mpd.DataFrame)32 field_types.append(mpd.Series)33 index_types.append(mpd.Index)34 multiindex_types.append(mpd.MultiIndex)35 except ImportError:36 pass37 try:38 import dask.dataframe as dd39 table_types.append(dd.DataFrame)40 field_types.append(dd.Series)41 index_types.append(dd.Index)42 except ImportError:43 pass44 return SupportedTypes(45 tuple(table_types),46 tuple(field_types),47 tuple(index_types),48 tuple(multiindex_types),49 )50def is_table(obj):51 """Verifies whether an object is table-like.52 Where a table is a 2-dimensional data matrix of rows and columns, which53 can be indexed in multiple different ways.54 """55 return isinstance(obj, _supported_types().table_types)56def is_field(obj):57 """Verifies whether an object is field-like.58 Where a field is a columnar representation of data in a table-like59 data structure.60 """61 return isinstance(obj, _supported_types().field_types)62def is_index(obj):63 """Verifies whether an object is a table index."""64 return isinstance(obj, _supported_types().index_types)65def is_multiindex(obj):66 """Verifies whether an object is a multi-level table index."""67 return isinstance(obj, _supported_types().multiindex_types)68def is_supported_check_obj(obj):69 """Verifies whether an object is table- or field-like."""70 return is_table(obj) or is_field(obj)71def prepare_series_check_output(72 check_obj: Union[pd.Series, pd.DataFrame],73 check_output: pd.Series,74 ignore_na: bool = True,75 n_failure_cases: Optional[int] = None,76) -> Tuple[pd.Series, pd.Series]:77 """Prepare the check output and failure cases for a Series check output.78 check_obj can be a dataframe, since a check function can potentially return79 a Series resulting from applying some check function that outputs a Series.80 """81 if ignore_na:...

Full Screen

Full Screen

aggregate_ops_test.py

Source:aggregate_ops_test.py Github

copy

Full Screen

...26 # after which it adds the remaining (N - M) tensors 8 at a time in a loop.27 # Test N in [1, 10] so we check each special-case from 1 to 9 and one28 # iteration of the loop.29 _MAX_N = 1030 def _supported_types(self):31 if test.is_gpu_available():32 return [dtypes.float16, dtypes.float32, dtypes.float64, dtypes.complex64,33 dtypes.complex128]34 return [dtypes.int8, dtypes.int16, dtypes.int32, dtypes.int64,35 dtypes.float16, dtypes.float32, dtypes.float64, dtypes.complex64,36 dtypes.complex128]37 def _buildData(self, shape, dtype):38 data = np.random.randn(*shape).astype(dtype.as_numpy_dtype)39 # For complex types, add an index-dependent imaginary component so we can40 # tell we got the right value.41 if dtype.is_complex:42 return data + 10j * data43 return data44 def testAddN(self):45 np.random.seed(12345)46 with self.test_session(use_gpu=True) as sess:47 for dtype in self._supported_types():48 for count in range(1, self._MAX_N + 1):49 data = [self._buildData((2, 2), dtype) for _ in range(count)]50 actual = sess.run(math_ops.add_n(data))51 expected = np.sum(np.vstack(52 [np.expand_dims(d, 0) for d in data]), axis=0)53 tol = 5e-3 if dtype == dtypes.float16 else 5e-754 self.assertAllClose(expected, actual, rtol=tol, atol=tol)55 def testUnknownShapes(self):56 np.random.seed(12345)57 with self.test_session(use_gpu=True) as sess:58 for dtype in self._supported_types():59 data = self._buildData((2, 2), dtype)60 for count in range(1, self._MAX_N + 1):61 data_ph = array_ops.placeholder(dtype=dtype)62 actual = sess.run(math_ops.add_n([data_ph] * count), {data_ph: data})63 expected = np.sum(np.vstack([np.expand_dims(data, 0)] * count),64 axis=0)65 tol = 5e-3 if dtype == dtypes.float16 else 5e-766 self.assertAllClose(expected, actual, rtol=tol, atol=tol)67if __name__ == "__main__":...

Full Screen

Full Screen

_serialize.py

Source:_serialize.py Github

copy

Full Screen

1"""2packages data into the raw format the clearvolume client expects3author: Martin Weigert4email: mweigert@mpi-cbg.de5"""6from __future__ import absolute_import7from __future__ import print_function8import numpy as np9import six10from six.moves import zip11DEFAULT_METADATA = {12 "index": 0,13 "time": 0,14 "channel": 0,15 "channelname": "python source",16 "viewmatrix": "1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1.",17 "dim": 3,18 "color": (1., 1., 1., 1.),19 "type": "UnsignedShort",20 "bytespervoxel": 2,21 "elementsize": 1,22 "voxelwidth": 1,23 "voxelheight": 1,24 "voxeldepth": 1,25 "realunit": 126}27_SUPPORTED_TYPES = {np.uint8: "UnsignedByte",28 np.uint16: "UnsignedShort"}29def _serialize_data(data, meta=DEFAULT_METADATA):30 """returns serialized version of data for clearvolume data viewer"""31 if not isinstance(data, np.ndarray):32 raise TypeError("data should be a numpy array (but is %s)" % type(data))33 if not data.dtype.type in _SUPPORTED_TYPES:34 raise ValueError("data type should be in (%s) (but is %s)" % (list(_SUPPORTED_TYPES.keys()), data.dtype))35 LenInt64 = len(np.int64(1).tostring())36 Ns = data.shape37 metaData = DEFAULT_METADATA.copy()38 # prepare header....39 metaData["type"] = _SUPPORTED_TYPES[data.dtype.type]40 for attrName, N in zip(["width", "height", "depth"], Ns[::-1]):41 metaData[attrName] = meta.get(attrName, N)42 for key, val in six.iteritems(meta):43 if key not in metaData:44 raise KeyError(" '%s' (= %s) as is an unknown property!" % (key, val))45 else:46 metaData[key] = val47 print(metaData)48 keyValPairs = [str(key) + ":" + str(val) for key, val in six.iteritems(metaData)]49 headerStr = ",".join(keyValPairs)50 headerStr = "[" + headerStr + "]"51 # headerStr = str(metaData).replace("{","[").replace("}","]").replace("'",'')#.replace(" ",'')52 headerLength = len(headerStr)53 dataStr = data.tostring()54 dataLength = len(dataStr)55 neededBufferLength = 3 * LenInt64 + headerLength + dataLength56 return "%s%s%s%s%s" % (np.int64(neededBufferLength).tostring(), np.int64(headerLength).tostring(), headerStr,57 np.int64(dataLength).tostring(), dataStr)58if __name__ == '__main__':59 Ns = [1, 2, 3]60 d = (123 * np.linspace(0, 200, np.prod(Ns))).reshape(Ns).astype(np.uint8)61 # dStr = _serialize_data(d,{"width": 5.,"color":"1. .5 .2 1."})62 dStr = _serialize_data(d, {"width": "5", "color": "1. .5 .2 1."})...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run pandera automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful