Best Python code snippet using lisa_python
augeas_cfg.py
Source:augeas_cfg.py
1# -*- coding: utf-8 -*-2"""3Manages configuration files via augeas4This module requires the ``augeas`` Python module.5.. _Augeas: http://augeas.net/6.. warning::7 Minimal installations of Debian and Ubuntu have been seen to have packaging8 bugs with python-augeas, causing the augeas module to fail to import. If9 the minion has the augeas module installed, but the functions in this10 execution module fail to run due to being unavailable, first restart the11 salt-minion service. If the problem persists past that, the following12 command can be run from the master to determine what is causing the import13 to fail:14 .. code-block:: bash15 salt minion-id cmd.run 'python -c "from augeas import Augeas"'16 For affected Debian/Ubuntu hosts, installing ``libpython2.7`` has been17 known to resolve the issue.18"""19from __future__ import absolute_import, print_function, unicode_literals20import logging21# Import python libs22import os23import re24# Import salt libs25import salt.utils.args26import salt.utils.data27import salt.utils.stringutils28from salt.exceptions import SaltInvocationError29from salt.ext import six30from salt.ext.six.moves import zip31# Make sure augeas python interface is installed32HAS_AUGEAS = False33try:34 from augeas import Augeas as _Augeas # pylint: disable=no-name-in-module35 HAS_AUGEAS = True36except ImportError:37 pass38log = logging.getLogger(__name__)39# Define the module's virtual name40__virtualname__ = "augeas"41METHOD_MAP = {42 "set": "set",43 "setm": "setm",44 "mv": "move",45 "move": "move",46 "ins": "insert",47 "insert": "insert",48 "rm": "remove",49 "remove": "remove",50}51def __virtual__():52 """53 Only run this module if the augeas python module is installed54 """55 if HAS_AUGEAS:56 return __virtualname__57 return (False, "Cannot load augeas_cfg module: augeas python module not installed")58def _recurmatch(path, aug):59 """60 Recursive generator providing the infrastructure for61 augtools print behavior.62 This function is based on test_augeas.py from63 Harald Hoyer <harald@redhat.com> in the python-augeas64 repository65 """66 if path:67 clean_path = path.rstrip("/*")68 yield (clean_path, aug.get(path))69 for i in aug.match(clean_path + "/*"):70 i = i.replace("!", "\\!") # escape some dirs71 for _match in _recurmatch(i, aug):72 yield _match73def _lstrip_word(word, prefix):74 """75 Return a copy of the string after the specified prefix was removed76 from the beginning of the string77 """78 if six.text_type(word).startswith(prefix):79 return six.text_type(word)[len(prefix) :]80 return word81def _check_load_paths(load_path):82 """83 Checks the validity of the load_path, returns a sanitized version84 with invalid paths removed.85 """86 if load_path is None or not isinstance(load_path, six.string_types):87 return None88 _paths = []89 for _path in load_path.split(":"):90 if os.path.isabs(_path) and os.path.isdir(_path):91 _paths.append(_path)92 else:93 log.info("Invalid augeas_cfg load_path entry: %s removed", _path)94 if not _paths:95 return None96 return ":".join(_paths)97def execute(context=None, lens=None, commands=(), load_path=None):98 """99 Execute Augeas commands100 .. versionadded:: 2014.7.0101 CLI Example:102 .. code-block:: bash103 salt '*' augeas.execute /files/etc/redis/redis.conf \\104 commands='["set bind 0.0.0.0", "set maxmemory 1G"]'105 context106 The Augeas context107 lens108 The Augeas lens to use109 commands110 The Augeas commands to execute111 .. versionadded:: 2016.3.0112 load_path113 A colon-spearated list of directories that modules should be searched114 in. This is in addition to the standard load path and the directories115 in AUGEAS_LENS_LIB.116 """117 ret = {"retval": False}118 arg_map = {119 "set": (1, 2),120 "setm": (2, 3),121 "move": (2,),122 "insert": (3,),123 "remove": (1,),124 }125 def make_path(path):126 """127 Return correct path128 """129 if not context:130 return path131 if path.lstrip("/"):132 if path.startswith(context):133 return path134 path = path.lstrip("/")135 return os.path.join(context, path)136 else:137 return context138 load_path = _check_load_paths(load_path)139 flags = _Augeas.NO_MODL_AUTOLOAD if lens and context else _Augeas.NONE140 aug = _Augeas(flags=flags, loadpath=load_path)141 if lens and context:142 aug.add_transform(lens, re.sub("^/files", "", context))143 aug.load()144 for command in commands:145 try:146 # first part up to space is always the147 # command name (i.e.: set, move)148 cmd, arg = command.split(" ", 1)149 if cmd not in METHOD_MAP:150 ret["error"] = "Command {0} is not supported (yet)".format(cmd)151 return ret152 method = METHOD_MAP[cmd]153 nargs = arg_map[method]154 parts = salt.utils.args.shlex_split(arg)155 if len(parts) not in nargs:156 err = "{0} takes {1} args: {2}".format(method, nargs, parts)157 raise ValueError(err)158 if method == "set":159 path = make_path(parts[0])160 value = parts[1] if len(parts) == 2 else None161 args = {"path": path, "value": value}162 elif method == "setm":163 base = make_path(parts[0])164 sub = parts[1]165 value = parts[2] if len(parts) == 3 else None166 args = {"base": base, "sub": sub, "value": value}167 elif method == "move":168 path = make_path(parts[0])169 dst = parts[1]170 args = {"src": path, "dst": dst}171 elif method == "insert":172 label, where, path = parts173 if where not in ("before", "after"):174 raise ValueError(175 'Expected "before" or "after", not {0}'.format(where)176 )177 path = make_path(path)178 args = {"path": path, "label": label, "before": where == "before"}179 elif method == "remove":180 path = make_path(parts[0])181 args = {"path": path}182 except ValueError as err:183 log.error(err)184 # if command.split fails arg will not be set185 if "arg" not in locals():186 arg = command187 ret["error"] = (188 "Invalid formatted command, "189 "see debug log for details: {0}".format(arg)190 )191 return ret192 args = salt.utils.data.decode(args, to_str=True)193 log.debug("%s: %s", method, args)194 func = getattr(aug, method)195 func(**args)196 try:197 aug.save()198 ret["retval"] = True199 except IOError as err:200 ret["error"] = six.text_type(err)201 if lens and not lens.endswith(".lns"):202 ret["error"] += (203 '\nLenses are normally configured as "name.lns". '204 'Did you mean "{0}.lns"?'.format(lens)205 )206 aug.close()207 return ret208def get(path, value="", load_path=None):209 """210 Get a value for a specific augeas path211 CLI Example:212 .. code-block:: bash213 salt '*' augeas.get /files/etc/hosts/1/ ipaddr214 path215 The path to get the value of216 value217 The optional value to get218 .. versionadded:: 2016.3.0219 load_path220 A colon-spearated list of directories that modules should be searched221 in. This is in addition to the standard load path and the directories222 in AUGEAS_LENS_LIB.223 """224 load_path = _check_load_paths(load_path)225 aug = _Augeas(loadpath=load_path)226 ret = {}227 path = path.rstrip("/")228 if value:229 path += "/{0}".format(value.strip("/"))230 try:231 _match = aug.match(path)232 except RuntimeError as err:233 return {"error": six.text_type(err)}234 if _match:235 ret[path] = aug.get(path)236 else:237 ret[path] = "" # node does not exist238 return ret239def setvalue(*args):240 """241 Set a value for a specific augeas path242 CLI Example:243 .. code-block:: bash244 salt '*' augeas.setvalue /files/etc/hosts/1/canonical localhost245 This will set the first entry in /etc/hosts to localhost246 CLI Example:247 .. code-block:: bash248 salt '*' augeas.setvalue /files/etc/hosts/01/ipaddr 192.168.1.1 \\249 /files/etc/hosts/01/canonical test250 Adds a new host to /etc/hosts the ip address 192.168.1.1 and hostname test251 CLI Example:252 .. code-block:: bash253 salt '*' augeas.setvalue prefix=/files/etc/sudoers/ \\254 "spec[user = '%wheel']/user" "%wheel" \\255 "spec[user = '%wheel']/host_group/host" 'ALL' \\256 "spec[user = '%wheel']/host_group/command[1]" 'ALL' \\257 "spec[user = '%wheel']/host_group/command[1]/tag" 'PASSWD' \\258 "spec[user = '%wheel']/host_group/command[2]" '/usr/bin/apt-get' \\259 "spec[user = '%wheel']/host_group/command[2]/tag" NOPASSWD260 Ensures that the following line is present in /etc/sudoers::261 %wheel ALL = PASSWD : ALL , NOPASSWD : /usr/bin/apt-get , /usr/bin/aptitude262 """263 load_path = None264 load_paths = [x for x in args if six.text_type(x).startswith("load_path=")]265 if load_paths:266 if len(load_paths) > 1:267 raise SaltInvocationError("Only one 'load_path=' value is permitted")268 else:269 load_path = load_paths[0].split("=", 1)[1]270 load_path = _check_load_paths(load_path)271 aug = _Augeas(loadpath=load_path)272 ret = {"retval": False}273 tuples = [274 x275 for x in args276 if not six.text_type(x).startswith("prefix=")277 and not six.text_type(x).startswith("load_path=")278 ]279 prefix = [x for x in args if six.text_type(x).startswith("prefix=")]280 if prefix:281 if len(prefix) > 1:282 raise SaltInvocationError("Only one 'prefix=' value is permitted")283 else:284 prefix = prefix[0].split("=", 1)[1]285 if len(tuples) % 2 != 0:286 raise SaltInvocationError("Uneven number of path/value arguments")287 tuple_iter = iter(tuples)288 for path, value in zip(tuple_iter, tuple_iter):289 target_path = path290 if prefix:291 target_path = os.path.join(prefix.rstrip("/"), path.lstrip("/"))292 try:293 aug.set(target_path, six.text_type(value))294 except ValueError as err:295 ret["error"] = "Multiple values: {0}".format(err)296 try:297 aug.save()298 ret["retval"] = True299 except IOError as err:300 ret["error"] = six.text_type(err)301 return ret302def match(path, value="", load_path=None):303 """304 Get matches for path expression305 CLI Example:306 .. code-block:: bash307 salt '*' augeas.match /files/etc/services/service-name ssh308 path309 The path to match310 value311 The value to match on312 .. versionadded:: 2016.3.0313 load_path314 A colon-spearated list of directories that modules should be searched315 in. This is in addition to the standard load path and the directories316 in AUGEAS_LENS_LIB.317 """318 load_path = _check_load_paths(load_path)319 aug = _Augeas(loadpath=load_path)320 ret = {}321 try:322 matches = aug.match(path)323 except RuntimeError:324 return ret325 for _match in matches:326 if value and aug.get(_match) == value:327 ret[_match] = value328 elif not value:329 ret[_match] = aug.get(_match)330 return ret331def remove(path, load_path=None):332 """333 Get matches for path expression334 CLI Example:335 .. code-block:: bash336 salt '*' augeas.remove \\337 /files/etc/sysctl.conf/net.ipv4.conf.all.log_martians338 path339 The path to remove340 .. versionadded:: 2016.3.0341 load_path342 A colon-spearated list of directories that modules should be searched343 in. This is in addition to the standard load path and the directories344 in AUGEAS_LENS_LIB.345 """346 load_path = _check_load_paths(load_path)347 aug = _Augeas(loadpath=load_path)348 ret = {"retval": False}349 try:350 count = aug.remove(path)351 aug.save()352 if count == -1:353 ret["error"] = "Invalid node"354 else:355 ret["retval"] = True356 except (RuntimeError, IOError) as err:357 ret["error"] = six.text_type(err)358 ret["count"] = count359 return ret360def ls(path, load_path=None): # pylint: disable=C0103361 """362 List the direct children of a node363 CLI Example:364 .. code-block:: bash365 salt '*' augeas.ls /files/etc/passwd366 path367 The path to list368 .. versionadded:: 2016.3.0369 load_path370 A colon-spearated list of directories that modules should be searched371 in. This is in addition to the standard load path and the directories372 in AUGEAS_LENS_LIB.373 """374 def _match(path):375 """ Internal match function """376 try:377 matches = aug.match(salt.utils.stringutils.to_str(path))378 except RuntimeError:379 return {}380 ret = {}381 for _ma in matches:382 ret[_ma] = aug.get(_ma)383 return ret384 load_path = _check_load_paths(load_path)385 aug = _Augeas(loadpath=load_path)386 path = path.rstrip("/") + "/"387 match_path = path + "*"388 matches = _match(match_path)389 ret = {}390 for key, value in six.iteritems(matches):391 name = _lstrip_word(key, path)392 if _match(key + "/*"):393 ret[name + "/"] = value # has sub nodes, e.g. directory394 else:395 ret[name] = value396 return ret397def tree(path, load_path=None):398 """399 Returns recursively the complete tree of a node400 CLI Example:401 .. code-block:: bash402 salt '*' augeas.tree /files/etc/403 path404 The base of the recursive listing405 .. versionadded:: 2016.3.0406 load_path407 A colon-spearated list of directories that modules should be searched408 in. This is in addition to the standard load path and the directories409 in AUGEAS_LENS_LIB.410 """411 load_path = _check_load_paths(load_path)412 aug = _Augeas(loadpath=load_path)413 path = path.rstrip("/") + "/"414 match_path = path...
load_data.py
Source:load_data.py
...49def load_path_sessions(basefolder,indices,analysis=True):50 datafolders = [path for path in [utils.directory_tree(basefolder,1)[i] for i in indices] if os.path.exists(path + '\\Analysis')]51 return load_pathlist(datafolders,analysis,parser.parse_session)5253def load_path(basefolder,folderslices=slice(None),analysis=True):54 datafolders = [path for path in utils.directory_tree(basefolder,1)[folderslices] if os.path.exists(path + '\\Analysis')]55 return load_pathlist(datafolders,analysis,parser.parse_session)56 57def annotate_jpak345(sessions):58 sessions[0].session_type = 'habituation'59 for i in range(5):60 sessions[i].session_type += ' (lowered)'61 sessions[9].session_type += ' (servos)'62 for i in range(10,14):63 sessions[i].session_type += ' (5th step acw)'64 for i in range(16,18):65 sessions[i].session_type += ' (4th step cw)'66 for i in range(19,22):67 sessions[i].session_type += ' (both)'68 69# I think these indices were incorrect (ARK) 70 71def annotate_jpak678(sessions):72 sessions[0].session_type = 'habituation'73 for i in range(5,9):74 sessions[i].session_type += ' (4th step cw)'75 for i in range(9,12):76 sessions[i].session_type += ' (all free)'7778#def annotate_jpak678(sessions):79# sessions[0].session_type = 'habituation'80# for i in range(5,14):81# sessions[i].session_type += ' (4th step cw)'82# for i in range(16,18):83# sessions[i].session_type += ' (4th step cw)'84# for i in range(19,22):85# sessions[i].session_type += ' (both)'86 87 88#==============================================================================89# jpak03 = True90# if jpak03:91# #jpak03habituation = load_path('../Data/JPAK_03/2012_04_24-13_26/Analysis', 'jpak03habituation',False,False)92# jpak03learning1 = load_path('../Data/JPAK_03/2012_04_25-14_46/Analysis', 'jpak03learning1',False,False)93# jpak03learning2 = load_path('../Data/JPAK_03/2012_04_26-15_41/Analysis', 'jpak03learning2',False,False)94# jpak03learning3 = load_path('../Data/JPAK_03/2012_04_28-16_57/Analysis', 'jpak03learning3',False,False)95# jpak03learning4 = load_path('../Data/JPAK_03/2012_04_29-15_13/Analysis', 'jpak03learning4',False,False)96# jpak03control1 = load_path('../Data/JPAK_03/2012_05_21-18_38/Analysis', 'jpak03control1')97# jpak03control2 = load_path('../Data/JPAK_03/2012_05_22-20_45/Analysis', 'jpak03control2')98# jpak03control3 = load_path('../Data/JPAK_03/2012_05_23-19_46/Analysis', 'jpak03control3')99# jpak03control4 = load_path('../Data/JPAK_03/2012_05_25-00_04/Analysis', 'jpak03control4')100# jpak03premanip = load_path('../Data/JPAK_03/2012_05_27-19_07/Analysis', 'jpak03premanip')101# jpak03manip1 = load_path('../Data/JPAK_03/2012_05_28-19_20/Analysis', 'jpak03manip1',True)102# jpak03manip2 = load_path('../Data/JPAK_03/2012_05_29-14_22/Analysis', 'jpak03manip2',True)103# jpak03manip3 = load_path('../Data/JPAK_03/2012_05_30-15_25/Analysis', 'jpak03manip3',True)104# jpak03manip4 = load_path('../Data/JPAK_03/2012_05_31-15_48/Analysis', 'jpak03manip4',True)105# jpak03manip5 = load_path('../Data/JPAK_03/2012_06_19-10_32/Analysis', 'jpak03manip5',True)106# jpak03stable1 = load_path('../Data/JPAK_03/2012_06_20-10_58/Analysis', 'jpak03stable1',False)107# jpak03forwardrot1 = load_path('../Data/JPAK_03/2012_06_21-10_46/Analysis', 'jpak03forwardrot1',True)108# jpak03forwardrot2 = load_path('../Data/JPAK_03/2012_06_22-12_55/Analysis', 'jpak03forwardrot2',True)109# jpak03stable2 = load_path('../Data/JPAK_03/2012_07_03-11_41/Analysis', 'jpak03stable2',False)110# jpak03learning = process_session.merge_sessions('jpak03learning',[jpak03learning1,jpak03learning2,jpak03learning3,jpak03learning4])111# jpak03control = process_session.merge_sessions('jpak03control',[jpak03control1,jpak03control2,jpak03control3,jpak03control4])112# jpak03manipA = process_session.merge_sessions('jpak03manip1',[jpak03premanip, jpak03manip1,jpak03manip2,jpak03manip3,jpak03manip4])113# jpak03manipB = process_session.merge_sessions('jpak03manip2',[jpak03forwardrot1,jpak03forwardrot2])114# 115# jpak04 = False116# if jpak04:117# #jpak04habituation = load_path('../Data/JPAK_04/2012_04_24-13_44/Analysis', 'jpak04habituation',False,False)118# jpak04learning1 = load_path('../Data/JPAK_04/2012_04_25-15_21/Analysis', 'jpak04learning1',False,False)119# jpak04learning2 = load_path('../Data/JPAK_04/2012_04_26-16_19/Analysis', 'jpak04learning2',False,False)120# jpak04learning3 = load_path('../Data/JPAK_04/2012_04_28-17_40/Analysis', 'jpak04learning3',False,False)121# jpak04learning4 = load_path('../Data/JPAK_04/2012_04_29-17_08/Analysis', 'jpak04learning4',False,False)122# jpak04control1 = load_path('../Data/JPAK_04/2012_05_21-19_13/Analysis', 'jpak04control1')123# jpak04control2 = load_path('../Data/JPAK_04/2012_05_22-21_19/Analysis', 'jpak04control2')124# jpak04control3 = load_path('../Data/JPAK_04/2012_05_23-20_21/Analysis', 'jpak04control3')125# jpak04control4 = load_path('../Data/JPAK_04/2012_05_25-00_40/Analysis', 'jpak04control4')126# jpak04premanip = load_path('../Data/JPAK_04/2012_05_27-19_39/Analysis', 'jpak04premanip')127# jpak04manip1 = load_path('../Data/JPAK_04/2012_05_28-19_59/Analysis', 'jpak04manip1',True)128# jpak04manip2 = load_path('../Data/JPAK_04/2012_05_29-15_01/Analysis', 'jpak04manip2',True)129# jpak04manip3 = load_path('../Data/JPAK_04/2012_05_30-16_09/Analysis', 'jpak04manip3',True)130# jpak04manip4 = load_path('../Data/JPAK_04/2012_05_31-16_25/Analysis', 'jpak04manip4',True)131# jpak04stable1 = load_path('../Data/JPAK_04/2012_06_19-11_09/Analysis', 'jpak04stable1',False)132# jpak04stable2 = load_path('../Data/JPAK_04/2012_06_20-11_34/Analysis', 'jpak04stable2',False)133# jpak04forwardrot1 = load_path('../Data/JPAK_04/2012_06_21-11_26/Analysis', 'jpak04forwardrot1',True)134# jpak04forwardrot2 = load_path('../Data/JPAK_04/2012_06_22-13_31/Analysis', 'jpak04forwardrot2',True)135# jpak04stable3 = load_path('../Data/JPAK_04/2012_07_03-12_21/Analysis', 'jpak04stable3',False)136# jpak04learning = process_session.merge_sessions('jpak04learning',[jpak04learning1,jpak04learning2,jpak04learning3,jpak04learning4])137# jpak04control = process_session.merge_sessions('jpak04control',[jpak04control1,jpak04control2,jpak04control3,jpak04control4])138# jpak04manipA = process_session.merge_sessions('jpak04manip1',[jpak04premanip, jpak04manip1,jpak04manip2,jpak04manip3,jpak04manip4])139# jpak04manipB = process_session.merge_sessions('jpak04manip2',[jpak04forwardrot1,jpak04forwardrot2])140# 141# jpak05 = False142# if jpak05:143# #jpak05habituation = load_path('../Data/JPAK_05/2012_04_24-13_58/Analysis', 'jpak05habituation',False,False)144# jpak05learning1 = load_path('../Data/JPAK_05/2012_04_25-15_55/Analysis', 'jpak05learning1',False,False)145# jpak05learning2 = load_path('../Data/JPAK_05/2012_04_26-16_54/Analysis', 'jpak05learning2',False,False)146# jpak05learning3 = load_path('../Data/JPAK_05/2012_04_28-18_12/Analysis', 'jpak05learning3',False,False)147# jpak05learning4 = load_path('../Data/JPAK_05/2012_04_29-16_17/Analysis', 'jpak05learning4',False,False)148# jpak05control1 = load_path('../Data/JPAK_05/2012_05_21-19_48/Analysis', 'jpak05control1')149# jpak05control2 = load_path('../Data/JPAK_05/2012_05_22-21_50/Analysis', 'jpak05control2')150# jpak05control3 = load_path('../Data/JPAK_05/2012_05_23-21_00/Analysis', 'jpak05control3')151# jpak05control4 = load_path('../Data/JPAK_05/2012_05_25-01_15/Analysis', 'jpak05control4')152# jpak05premanip = load_path('../Data/JPAK_05/2012_05_27-20_15/Analysis', 'jpak05premanip')153# jpak05manip1 = load_path('../Data/JPAK_05/2012_05_28-20_31/Analysis', 'jpak05manip1',True)154# jpak05manip2 = load_path('../Data/JPAK_05/2012_05_29-15_35/Analysis', 'jpak05manip2',True)155# jpak05manip3 = load_path('../Data/JPAK_05/2012_05_30-16_43/Analysis', 'jpak05manip3',True)156# jpak05manip4 = load_path('../Data/JPAK_05/2012_05_31-17_11/Analysis', 'jpak05manip4',True)157# jpak05stable1 = load_path('../Data/JPAK_05/2012_06_19-11_50/Analysis', 'jpak05stable1',False)158# jpak05stable2 = load_path('../Data/JPAK_05/2012_06_20-12_11/Analysis', 'jpak05stable2',False)159# jpak05forwardrot1 = load_path('../Data/JPAK_05/2012_06_21-12_04/Analysis', 'jpak05forwardrot1',True)160# jpak05forwardrot2 = load_path('../Data/JPAK_05/2012_06_22-14_06/Analysis', 'jpak05forwardrot2',True)161# jpak05stable3 = load_path('../Data/JPAK_05/2012_07_03-13_27/Analysis', 'jpak05stable3',False)162# jpak05learning = process_session.merge_sessions('jpak05learning',[jpak05learning1,jpak05learning2,jpak05learning3,jpak05learning4])163# jpak05control = process_session.merge_sessions('jpak05control',[jpak05control1,jpak05control2,jpak05control3,jpak05control4])164# jpak05manipA = process_session.merge_sessions('jpak05manip1',[jpak05premanip, jpak05manip1,jpak05manip2,jpak05manip3,jpak05manip4])165#==============================================================================
...
model.py
Source:model.py
1import pathlib2import tensorflow as tf3from .decom import DecomNet4from .dehaze import DehazeNet5from .enhance import EnhanceNet6def decom_loss():7 '''8 Decom Loss function adapted from DeepRetinex9 '''10 def concat(layers):11 return tf.concat(layers, axis=-1)12 def gradient(input_tensor, direction):13 smooth_kernel_x = tf.reshape(tf.constant([[0, 0], [-1, 1]], tf.float32), [2, 2, 1, 1])14 smooth_kernel_y = tf.transpose(smooth_kernel_x, [1, 0, 2, 3])15 if direction == "x":16 kernel = smooth_kernel_x17 elif direction == "y":18 kernel = smooth_kernel_y19 return tf.abs(tf.nn.conv2d(input_tensor, kernel, strides=[1, 1, 1, 1], padding='SAME'))20 def ave_gradient(input_tensor, direction):21 return tf.nn.avg_pool2d(gradient(input_tensor, direction), ksize=3, strides=1, padding='SAME')22 def smooth(input_I, input_R):23 input_R = tf.image.rgb_to_grayscale(input_R)24 return tf.reduce_mean(gradient(input_I, "x") * tf.exp(-10 * ave_gradient(input_R, "x")) + gradient(input_I, "y") * tf.exp(-10 * ave_gradient(input_R, "y")))25 def loss_fn(_y_true, _y_pred):26 y_true = _y_pred[:,:,:,:4]27 y_pred = _y_pred[:,:,:,4:8]28 img_low = _y_pred[:,:,:,8:11]29 img_high = _y_pred[:,:,:,11:14]30 I_low = y_pred[:,:,:,3:4]31 I_high = y_true[:,:,:,3:4]32 R_low = y_pred[:,:,:,0:3]33 R_high = y_true[:,:,:,0:3]34 I_low_3 = concat([I_low, I_low, I_low])35 I_high_3 = concat([I_high, I_high, I_high])36 output_R_low = R_low37 output_I_low = I_low_338 # loss39 recon_loss_low = tf.math.reduce_mean(tf.math.abs(R_low * I_low_3 - img_low))40 recon_loss_high = tf.math.reduce_mean(tf.math.abs(R_high * I_high_3 - img_high))41 recon_loss_mutal_low = tf.math.reduce_mean(tf.math.abs(R_high * I_low_3 - img_low))42 recon_loss_mutal_high = tf.math.reduce_mean(tf.math.abs(R_low * I_high_3 - img_high))43 equal_R_loss = tf.math.reduce_mean(tf.math.abs(R_low - R_high))44 Ismooth_loss_low = smooth(I_low, R_low)45 Ismooth_loss_high = smooth(I_high, R_high)46 loss_Decom = recon_loss_low + recon_loss_high + \47 0.001 * recon_loss_mutal_low + 0.001 * recon_loss_mutal_high + \48 0.1 * Ismooth_loss_low + 0.1 * Ismooth_loss_high + \49 0.0001 * equal_R_loss # 0.0150 return loss_Decom51 return loss_fn52def recon_loss(model_type='vgg', input_shape=(256, 256, 3), layers=None, weights=[8, 4, 2, 1]):53 '''54 Recon Loss function adapted from Perceptual Loss55 '''56 def make_perceptual_loss_model(model_type=model_type, input_shape=input_shape, layers=layers, weights=weights):57 if model_type == 'vgg':58 from tensorflow.keras.applications.vgg16 import VGG16, preprocess_input59 base_model = VGG16(include_top=False, weights='imagenet', input_shape=input_shape)60 base_preprocess = tf.keras.applications.vgg16.preprocess_input61 if layers is None:62 layers = [2, 5, 9]63 elif model_type == 'xception':64 from tensorflow.keras.applications.xception import Xception, preprocess_input65 base_model = Xception(include_top=False, weights='imagenet', input_shape=input_shape)66 base_preprocess = tf.keras.applications.xception.preprocess_input67 if layers is None:68 layers = [19, 42, 62]69 else:70 raise NotImplementedError('Perceptual Model using \'%s\' is not implemented!' % model_type)71 # Set up loss model72 outputs = [base_model.layers[idx].output for idx in layers]73 loss_model = tf.keras.Model(inputs=base_model.input, outputs=outputs)74 loss_model.trainable = False75 return loss_model, base_preprocess76 def perceptual_loss(y_true, y_pred):77 # extract y true and predicted features78 y_true_features = loss_model(base_preprocess(y_true * 255.))79 y_pred_features = loss_model(base_preprocess(y_pred * 255.))80 # calculate weighted loss81 loss = weights[0] * tf.math.reduce_mean(tf.math.square(y_true - y_pred))82 for idx in range(0, len(weights) - 1):83 loss += weights[idx + 1] * tf.math.reduce_mean(tf.math.square(y_true_features[idx] - y_pred_features[idx]))84 loss = loss / sum(weights)85 86 return loss87 loss_model, base_preprocess = make_perceptual_loss_model()88 return perceptual_loss89def build_train_model(input_size=(256, 256, 3), load_path=None):90 '''91 Builds and returns uncompiled Traing Phase model comprising of:92 * decom93 * dehaze94 * enhance95 '''96 if load_path is not None and not isinstance(load_path, pathlib.Path):97 load_path = pathlib.Path(load_path)98 # x = Hazed, Low-light input image99 # y = Dehazed, Illuminated ground truth image100 x = tf.keras.layers.Input(input_size)101 y = tf.keras.layers.Input(input_size)102 # Decomposition103 # z_R = z[:,:,:,:3]104 # z_I = z[:,:,:,3:4]105 if load_path is None:106 decomNet = DecomNet(input_size=input_size)107 else:108 decomNet = tf.keras.models.load_model(load_path/'decom.h5', compile=False)109 x_decom = decomNet(x)110 y_decom = decomNet(y)111 decomCombine = tf.keras.layers.Lambda(lambda z: tf.concat([z[0], z[1], z[2], z[3]], axis=-1), name='DecomCombine')((y_decom, x_decom, x, y))112 if load_path is None:113 dehazeNet = DehazeNet(input_size=input_size)114 else:115 dehazeNet = tf.keras.models.load_model(load_path/'dehaze.h5', compile=False)116 x_R_dehazed = dehazeNet(x_decom[:,:,:,:3])117 if load_path is None:118 enhanceNet = EnhanceNet(input_size=input_size[:-1]+(4,))119 else:120 enhanceNet = tf.keras.models.load_model(load_path/'enhance.h5', compile=False)121 x_I_illum = enhanceNet(x_decom)122 def recon_mul(dcpdn_out, enh_net_out):123 enh_net_out_3 = tf.concat([enh_net_out, enh_net_out, enh_net_out], axis=-1)124 recon = dcpdn_out * enh_net_out_3125 return recon126 y_hat = tf.keras.layers.Lambda(lambda x: recon_mul(x[0], x[1]), name='ReconFinal') ((x_R_dehazed, x_I_illum))127 combined_model = tf.keras.Model(inputs=[x, y], outputs=[y_hat, decomCombine], name='FinalModel')128 return combined_model129def build_vis_model(input_size=(256, 256, 3), load_path=None):130 '''131 Builds and returns model for inference132 '''133 if load_path is None:134 print('\nLoad Path not specified for Visualization model!!! The returned model has random weights!\n')135 136 if not isinstance(load_path, pathlib.Path):137 load_path = pathlib.Path(load_path)138 # Model building starts here139 140 x = tf.keras.layers.Input(input_size)141 142 if load_path is None:143 DecomNet = DecomNet(input_size=input_size)144 else:145 DecomNet = tf.keras.models.load_model(load_path/'decom.h5', compile=False)146 x_decom = DecomNet(x)147 if load_path is None:148 dehazeNet = DehazeNet(input_size=input_size)149 else:150 dehazeNet = tf.keras.models.load_model(load_path/'dehaze.h5', compile=False)151 x_R_dehazed = dehazeNet(x_decom[:,:,:,:3])152 if load_path is None:153 enhanceNet = EnhanceNet(input_size=input_size[:-1]+(4,))154 else:155 enhanceNet = tf.keras.models.load_model(load_path/'enhance.h5', compile=False)156 157 x_I_illum = enhanceNet(x_decom)158 def recon_mul(dcpdn_out, enh_net_out):159 enh_net_out_3 = tf.concat([enh_net_out, enh_net_out, enh_net_out], axis=-1)160 recon = dcpdn_out * enh_net_out_3161 return recon162 y_hat = tf.keras.layers.Lambda(lambda x: recon_mul(x[0], x[1]), name = 'ReconFinal') ((x_R_dehazed, x_I_illum))163 model = tf.keras.Model(inputs=x, outputs=[y_hat, x_decom[:,:,:,:3], x_decom[:,:,:,3], x_R_dehazed, x_I_illum[:,:,:,0]], name='FinalModel')164 # Model building ends here165 return model166def build_inference_model(input_size=(256, 256, 3), load_path=None):167 '''168 Builds and returns model for inference169 '''170 if load_path is None:171 print('\nLoad Path not specified for Inference model!!! The returned model has random weights!\n')172 173 if not isinstance(load_path, pathlib.Path):174 load_path = pathlib.Path(load_path)175 # Model building starts here176 177 x = tf.keras.layers.Input(input_size)178 179 if load_path is None:180 DecomNet = DecomNet(input_size=input_size)181 else:182 DecomNet = tf.keras.models.load_model(load_path/'decom.h5', compile=False)183 x_decom = DecomNet(x)184 if load_path is None:185 dehazeNet = DehazeNet(input_size=input_size)186 else:187 dehazeNet = tf.keras.models.load_model(load_path/'dehaze.h5', compile=False)188 x_R_dehazed = dehazeNet(x_decom[:,:,:,:3])189 if load_path is None:190 enhanceNet = EnhanceNet(input_size=input_size[:-1]+(4,))191 else:192 enhanceNet = tf.keras.models.load_model(load_path/'enhance.h5', compile=False)193 194 x_I_illum = enhanceNet(x_decom)195 def recon_mul(dcpdn_out, enh_net_out):196 enh_net_out_3 = tf.concat([enh_net_out, enh_net_out, enh_net_out], axis=-1)197 recon = dcpdn_out * enh_net_out_3198 return recon199 y_hat = tf.keras.layers.Lambda(lambda x: recon_mul(x[0], x[1]), name = 'ReconFinal') ((x_R_dehazed, x_I_illum))200 model = tf.keras.Model(inputs=x, outputs=y_hat, name='FinalModel')201 # Model building ends here...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!