Best JavaScript code snippet using devicefarmer-stf
tracker_file.py
Source:tracker_file.py
1# -*- coding: utf-8 -*-2# Copyright 2015 Google Inc. All Rights Reserved.3#4# Licensed under the Apache License, Version 2.0 (the "License");5# you may not use this file except in compliance with the License.6# You may obtain a copy of the License at7#8# http://www.apache.org/licenses/LICENSE-2.09#10# Unless required by applicable law or agreed to in writing, software11# distributed under the License is distributed on an "AS IS" BASIS,12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.13# See the License for the specific language governing permissions and14# limitations under the License.15"""Helper functions for tracker file functionality."""16import errno17import hashlib18import json19import os20import re21from boto import config22from gslib.exception import CommandException23from gslib.util import CreateDirIfNeeded24from gslib.util import GetGsutilStateDir25from gslib.util import ResumableThreshold26from gslib.util import UTF827# The maximum length of a file name can vary wildly between different28# operating systems, so we always ensure that tracker files are less29# than 100 characters in order to avoid any such issues.30MAX_TRACKER_FILE_NAME_LENGTH = 10031TRACKER_FILE_UNWRITABLE_EXCEPTION_TEXT = (32 'Couldn\'t write tracker file (%s): %s. This can happen if gsutil is '33 'configured to save tracker files to an unwritable directory)')34# Format for upload tracker files.35ENCRYPTION_UPLOAD_TRACKER_ENTRY = 'encryption_key_sha256'36SERIALIZATION_UPLOAD_TRACKER_ENTRY = 'serialization_data'37class TrackerFileType(object):38 UPLOAD = 'upload'39 DOWNLOAD = 'download'40 DOWNLOAD_COMPONENT = 'download_component'41 PARALLEL_UPLOAD = 'parallel_upload'42 SLICED_DOWNLOAD = 'sliced_download'43 REWRITE = 'rewrite'44def _HashFilename(filename):45 """Apply a hash function (SHA1) to shorten the passed file name.46 The spec for the hashed file name is as follows:47 TRACKER_<hash>_<trailing>48 where hash is a SHA1 hash on the original file name and trailing is49 the last 16 chars from the original file name. Max file name lengths50 vary by operating system so the goal of this function is to ensure51 the hashed version takes fewer than 100 characters.52 Args:53 filename: file name to be hashed.54 Returns:55 shorter, hashed version of passed file name56 """57 if isinstance(filename, unicode):58 filename = filename.encode(UTF8)59 else:60 filename = unicode(filename, UTF8).encode(UTF8)61 m = hashlib.sha1(filename)62 return 'TRACKER_' + m.hexdigest() + '.' + filename[-16:]63def CreateTrackerDirIfNeeded():64 """Looks up or creates the gsutil tracker file directory.65 This is the configured directory where gsutil keeps its resumable transfer66 tracker files. This function creates it if it doesn't already exist.67 Returns:68 The pathname to the tracker directory.69 """70 tracker_dir = config.get(71 'GSUtil', 'resumable_tracker_dir',72 os.path.join(GetGsutilStateDir(), 'tracker-files'))73 CreateDirIfNeeded(tracker_dir)74 return tracker_dir75def GetRewriteTrackerFilePath(src_bucket_name, src_obj_name, dst_bucket_name,76 dst_obj_name, api_selector):77 """Gets the tracker file name described by the arguments.78 Args:79 src_bucket_name: Source bucket (string).80 src_obj_name: Source object (string).81 dst_bucket_name: Destination bucket (string).82 dst_obj_name: Destination object (string)83 api_selector: API to use for this operation.84 Returns:85 File path to tracker file.86 """87 # Encode the src and dest bucket and object names into the tracker file88 # name.89 res_tracker_file_name = (90 re.sub('[/\\\\]', '_', 'rewrite__%s__%s__%s__%s__%s.token' %91 (src_bucket_name, src_obj_name, dst_bucket_name,92 dst_obj_name, api_selector)))93 return _HashAndReturnPath(res_tracker_file_name, TrackerFileType.REWRITE)94def GetTrackerFilePath(dst_url, tracker_file_type, api_selector, src_url=None,95 component_num=None):96 """Gets the tracker file name described by the arguments.97 Args:98 dst_url: Destination URL for tracker file.99 tracker_file_type: TrackerFileType for this operation.100 api_selector: API to use for this operation.101 src_url: Source URL for the source file name for parallel uploads.102 component_num: Component number if this is a download component, else None.103 Returns:104 File path to tracker file.105 """106 if tracker_file_type == TrackerFileType.UPLOAD:107 # Encode the dest bucket and object name into the tracker file name.108 res_tracker_file_name = (109 re.sub('[/\\\\]', '_', 'resumable_upload__%s__%s__%s.url' %110 (dst_url.bucket_name, dst_url.object_name, api_selector)))111 elif tracker_file_type == TrackerFileType.DOWNLOAD:112 # Encode the fully-qualified dest file name into the tracker file name.113 res_tracker_file_name = (114 re.sub('[/\\\\]', '_', 'resumable_download__%s__%s.etag' %115 (os.path.realpath(dst_url.object_name), api_selector)))116 elif tracker_file_type == TrackerFileType.DOWNLOAD_COMPONENT:117 # Encode the fully-qualified dest file name and the component number118 # into the tracker file name.119 res_tracker_file_name = (120 re.sub('[/\\\\]', '_', 'resumable_download__%s__%s__%d.etag' %121 (os.path.realpath(dst_url.object_name), api_selector,122 component_num)))123 elif tracker_file_type == TrackerFileType.PARALLEL_UPLOAD:124 # Encode the dest bucket and object names as well as the source file name125 # into the tracker file name.126 res_tracker_file_name = (127 re.sub('[/\\\\]', '_', 'parallel_upload__%s__%s__%s__%s.url' %128 (dst_url.bucket_name, dst_url.object_name,129 src_url, api_selector)))130 elif tracker_file_type == TrackerFileType.SLICED_DOWNLOAD:131 # Encode the fully-qualified dest file name into the tracker file name.132 res_tracker_file_name = (133 re.sub('[/\\\\]', '_', 'sliced_download__%s__%s.etag' %134 (os.path.realpath(dst_url.object_name), api_selector)))135 elif tracker_file_type == TrackerFileType.REWRITE:136 # Should use GetRewriteTrackerFilePath instead.137 raise NotImplementedError()138 return _HashAndReturnPath(res_tracker_file_name, tracker_file_type)139def DeleteDownloadTrackerFiles(dst_url, api_selector):140 """Deletes all tracker files corresponding to an object download.141 Args:142 dst_url: StorageUrl describing the destination file.143 api_selector: The Cloud API implementation used.144 """145 # Delete non-sliced download tracker file.146 DeleteTrackerFile(GetTrackerFilePath(dst_url, TrackerFileType.DOWNLOAD,147 api_selector))148 # Delete all sliced download tracker files.149 tracker_files = GetSlicedDownloadTrackerFilePaths(dst_url, api_selector)150 for tracker_file in tracker_files:151 DeleteTrackerFile(tracker_file)152def GetSlicedDownloadTrackerFilePaths(dst_url, api_selector,153 num_components=None):154 """Gets a list of sliced download tracker file paths.155 The list consists of the parent tracker file path in index 0, and then156 any existing component tracker files in [1:].157 Args:158 dst_url: Destination URL for tracker file.159 api_selector: API to use for this operation.160 num_components: The number of component tracker files, if already known.161 If not known, the number will be retrieved from the parent162 tracker file on disk.163 Returns:164 File path to tracker file.165 """166 parallel_tracker_file_path = GetTrackerFilePath(167 dst_url, TrackerFileType.SLICED_DOWNLOAD, api_selector)168 tracker_file_paths = [parallel_tracker_file_path]169 # If we don't know the number of components, check the tracker file.170 if num_components is None:171 tracker_file = None172 try:173 tracker_file = open(parallel_tracker_file_path, 'r')174 num_components = json.load(tracker_file)['num_components']175 except (IOError, ValueError):176 return tracker_file_paths177 finally:178 if tracker_file:179 tracker_file.close()180 for i in range(num_components):181 tracker_file_paths.append(GetTrackerFilePath(182 dst_url, TrackerFileType.DOWNLOAD_COMPONENT, api_selector,183 component_num=i))184 return tracker_file_paths185def _HashAndReturnPath(res_tracker_file_name, tracker_file_type):186 """Hashes and returns a tracker file path.187 Args:188 res_tracker_file_name: The tracker file name prior to it being hashed.189 tracker_file_type: The TrackerFileType of res_tracker_file_name.190 Returns:191 Final (hashed) tracker file path.192 """193 resumable_tracker_dir = CreateTrackerDirIfNeeded()194 hashed_tracker_file_name = _HashFilename(res_tracker_file_name)195 tracker_file_name = '%s_%s' % (str(tracker_file_type).lower(),196 hashed_tracker_file_name)197 tracker_file_path = '%s%s%s' % (resumable_tracker_dir, os.sep,198 tracker_file_name)199 assert len(tracker_file_name) < MAX_TRACKER_FILE_NAME_LENGTH200 return tracker_file_path201def DeleteTrackerFile(tracker_file_name):202 if tracker_file_name and os.path.exists(tracker_file_name):203 os.unlink(tracker_file_name)204def HashRewriteParameters(205 src_obj_metadata, dst_obj_metadata, projection, src_generation=None,206 gen_match=None, meta_gen_match=None, canned_acl=None,207 max_bytes_per_call=None, src_dec_key_sha256=None, dst_enc_key_sha256=None,208 fields=None):209 """Creates an MD5 hex digest of the parameters for a rewrite call.210 Resuming rewrites requires that the input parameters are identical. Thus,211 the rewrite tracker file needs to represent the input parameters. For212 easy comparison, hash the input values. If a user does a performs a213 same-source/same-destination rewrite via a different command (for example,214 with a changed ACL), the hashes will not match and we will restart the215 rewrite from the beginning.216 Args:217 src_obj_metadata: apitools Object describing source object. Must include218 bucket, name, and etag.219 dst_obj_metadata: apitools Object describing destination object. Must220 include bucket and object name221 projection: Projection used for the API call.222 src_generation: Optional source generation.223 gen_match: Optional generation precondition.224 meta_gen_match: Optional metageneration precondition.225 canned_acl: Optional canned ACL string.226 max_bytes_per_call: Optional maximum bytes rewritten per call.227 src_dec_key_sha256: Optional SHA256 hash string of decryption key for228 source object.229 dst_enc_key_sha256: Optional SHA256 hash string of encryption key for230 destination object.231 fields: Optional fields to include in response to call.232 Returns:233 MD5 hex digest Hash of the input parameters, or None if required parameters234 are missing.235 """236 if (not src_obj_metadata or237 not src_obj_metadata.bucket or238 not src_obj_metadata.name or239 not src_obj_metadata.etag or240 not dst_obj_metadata or241 not dst_obj_metadata.bucket or242 not dst_obj_metadata.name or243 not projection):244 return245 md5_hash = hashlib.md5()246 for input_param in (247 src_obj_metadata, dst_obj_metadata, projection, src_generation,248 gen_match, meta_gen_match, canned_acl, fields, max_bytes_per_call,249 src_dec_key_sha256, dst_enc_key_sha256):250 # Tracker file matching changed between gsutil 4.15 -> 4.16 and will cause251 # rewrites to start over from the beginning on a gsutil version upgrade.252 if input_param is not None:253 md5_hash.update(str(input_param))254 return md5_hash.hexdigest()255def ReadRewriteTrackerFile(tracker_file_name, rewrite_params_hash):256 """Attempts to read a rewrite tracker file.257 Args:258 tracker_file_name: Tracker file path string.259 rewrite_params_hash: MD5 hex digest of rewrite call parameters constructed260 by HashRewriteParameters.261 Returns:262 String rewrite_token for resuming rewrite requests if a matching tracker263 file exists, None otherwise (which will result in starting a new rewrite).264 """265 # Check to see if we already have a matching tracker file.266 tracker_file = None267 if not rewrite_params_hash:268 return269 try:270 tracker_file = open(tracker_file_name, 'r')271 existing_hash = tracker_file.readline().rstrip('\n')272 if existing_hash == rewrite_params_hash:273 # Next line is the rewrite token.274 return tracker_file.readline().rstrip('\n')275 except IOError as e:276 # Ignore non-existent file (happens first time a rewrite is attempted.277 if e.errno != errno.ENOENT:278 print('Couldn\'t read Copy tracker file (%s): %s. Restarting copy '279 'from scratch.' %280 (tracker_file_name, e.strerror))281 finally:282 if tracker_file:283 tracker_file.close()284def WriteRewriteTrackerFile(tracker_file_name, rewrite_params_hash,285 rewrite_token):286 """Writes a rewrite tracker file.287 Args:288 tracker_file_name: Tracker file path string.289 rewrite_params_hash: MD5 hex digest of rewrite call parameters constructed290 by HashRewriteParameters.291 rewrite_token: Rewrite token string returned by the service.292 """293 _WriteTrackerFile(tracker_file_name, '%s\n%s\n' % (rewrite_params_hash,294 rewrite_token))295def ReadOrCreateDownloadTrackerFile(src_obj_metadata, dst_url, logger,296 api_selector, start_byte,297 existing_file_size, component_num=None):298 """Checks for a download tracker file and creates one if it does not exist.299 The methodology for determining the download start point differs between300 normal and sliced downloads. For normal downloads, the existing bytes in301 the file are presumed to be correct and have been previously downloaded from302 the server (if a tracker file exists). In this case, the existing file size303 is used to determine the download start point. For sliced downloads, the304 number of bytes previously retrieved from the server cannot be determined305 from the existing file size, and so the number of bytes known to have been306 previously downloaded is retrieved from the tracker file.307 Args:308 src_obj_metadata: Metadata for the source object. Must include etag and309 generation.310 dst_url: Destination URL for tracker file.311 logger: For outputting log messages.312 api_selector: API to use for this operation.313 start_byte: The start byte of the byte range for this download.314 existing_file_size: Size of existing file for this download on disk.315 component_num: The component number, if this is a component of a parallel316 download, else None.317 Returns:318 tracker_file_name: The name of the tracker file, if one was used.319 download_start_byte: The first byte that still needs to be downloaded.320 """321 assert src_obj_metadata.etag322 tracker_file_name = None323 if src_obj_metadata.size < ResumableThreshold():324 # Don't create a tracker file for a small downloads; cross-process resumes325 # won't work, but restarting a small download is inexpensive.326 return tracker_file_name, start_byte327 download_name = dst_url.object_name328 if component_num is None:329 tracker_file_type = TrackerFileType.DOWNLOAD330 else:331 tracker_file_type = TrackerFileType.DOWNLOAD_COMPONENT332 download_name += ' component %d' % component_num333 tracker_file_name = GetTrackerFilePath(dst_url, tracker_file_type,334 api_selector,335 component_num=component_num)336 tracker_file = None337 # Check to see if we already have a matching tracker file.338 try:339 tracker_file = open(tracker_file_name, 'r')340 if tracker_file_type is TrackerFileType.DOWNLOAD:341 etag_value = tracker_file.readline().rstrip('\n')342 if etag_value == src_obj_metadata.etag:343 return tracker_file_name, existing_file_size344 elif tracker_file_type is TrackerFileType.DOWNLOAD_COMPONENT:345 component_data = json.loads(tracker_file.read())346 if (component_data['etag'] == src_obj_metadata.etag and347 component_data['generation'] == src_obj_metadata.generation):348 return tracker_file_name, component_data['download_start_byte']349 logger.warn('Tracker file doesn\'t match for download of %s. Restarting '350 'download from scratch.' % download_name)351 except (IOError, ValueError) as e:352 # Ignore non-existent file (happens first time a download353 # is attempted on an object), but warn user for other errors.354 if isinstance(e, ValueError) or e.errno != errno.ENOENT:355 logger.warn('Couldn\'t read download tracker file (%s): %s. Restarting '356 'download from scratch.' % (tracker_file_name, str(e)))357 finally:358 if tracker_file:359 tracker_file.close()360 # There wasn't a matching tracker file, so create one and then start the361 # download from scratch.362 if tracker_file_type is TrackerFileType.DOWNLOAD:363 _WriteTrackerFile(tracker_file_name, '%s\n' % src_obj_metadata.etag)364 elif tracker_file_type is TrackerFileType.DOWNLOAD_COMPONENT:365 WriteDownloadComponentTrackerFile(tracker_file_name, src_obj_metadata,366 start_byte)367 return tracker_file_name, start_byte368def GetDownloadStartByte(src_obj_metadata, dst_url, api_selector,369 start_byte, existing_file_size, component_num=None):370 """Returns the download starting point.371 The methodology of this function is the same as in372 ReadOrCreateDownloadTrackerFile, with the difference that we are not373 interested here in possibly creating a tracker file. In case there is no374 tracker file, this means the download starting point is start_byte.375 Args:376 src_obj_metadata: Metadata for the source object. Must include etag and377 generation.378 dst_url: Destination URL for tracker file.379 api_selector: API to use for this operation.380 start_byte: The start byte of the byte range for this download.381 existing_file_size: Size of existing file for this download on disk.382 component_num: The component number, if this is a component of a parallel383 download, else None.384 Returns:385 download_start_byte: The first byte that still needs to be downloaded.386 """387 assert src_obj_metadata.etag388 tracker_file_name = None389 if src_obj_metadata.size < ResumableThreshold():390 # There is no tracker file for small downloads; this means we start from391 # scratch.392 return start_byte393 if component_num is None:394 tracker_file_type = TrackerFileType.DOWNLOAD395 else:396 tracker_file_type = TrackerFileType.DOWNLOAD_COMPONENT397 tracker_file_name = GetTrackerFilePath(dst_url, tracker_file_type,398 api_selector,399 component_num=component_num)400 tracker_file = None401 # Check to see if we already have a matching tracker file.402 try:403 tracker_file = open(tracker_file_name, 'r')404 if tracker_file_type is TrackerFileType.DOWNLOAD:405 etag_value = tracker_file.readline().rstrip('\n')406 if etag_value == src_obj_metadata.etag:407 return existing_file_size408 elif tracker_file_type is TrackerFileType.DOWNLOAD_COMPONENT:409 component_data = json.loads(tracker_file.read())410 if (component_data['etag'] == src_obj_metadata.etag and411 component_data['generation'] == src_obj_metadata.generation):412 return component_data['download_start_byte']413 except (IOError, ValueError):414 # If the file does not exist, there is not much we can do at this point.415 pass416 finally:417 if tracker_file:418 tracker_file.close()419 # There wasn't a matching tracker file, which means our starting point is420 # start_byte.421 return start_byte422def WriteDownloadComponentTrackerFile(tracker_file_name, src_obj_metadata,423 current_file_pos):424 """Updates or creates a download component tracker file on disk.425 Args:426 tracker_file_name: The name of the tracker file.427 src_obj_metadata: Metadata for the source object. Must include etag.428 current_file_pos: The current position in the file.429 """430 component_data = {'etag': src_obj_metadata.etag,431 'generation': src_obj_metadata.generation,432 'download_start_byte': current_file_pos}433 _WriteTrackerFile(tracker_file_name, json.dumps(component_data))434def _WriteTrackerFile(tracker_file_name, data):435 """Creates a tracker file, storing the input data."""436 try:437 with os.fdopen(os.open(tracker_file_name,438 os.O_WRONLY | os.O_CREAT, 0600), 'w') as tf:439 tf.write(data)440 return False441 except (IOError, OSError) as e:442 raise RaiseUnwritableTrackerFileException(tracker_file_name, e.strerror)443def GetUploadTrackerData(tracker_file_name, logger,444 encryption_key_sha256=None):445 """Reads tracker data from an upload tracker file if it exists.446 Deletes the tracker file if it uses an old format or the desired447 encryption key has changed.448 Args:449 tracker_file_name: Tracker file name for this upload.450 logger: logging.Logger for outputting log messages.451 encryption_key_sha256: Encryption key SHA256 for use in this upload, if any.452 Returns:453 Serialization data if the tracker file already exists (resume existing454 upload), None otherwise.455 """456 tracker_file = None457 remove_tracker_file = False458 encryption_restart = False459 # If we already have a matching tracker file, get the serialization data460 # so that we can resume the upload.461 try:462 tracker_file = open(tracker_file_name, 'r')463 tracker_data = tracker_file.read()464 tracker_json = json.loads(tracker_data)465 if tracker_json[ENCRYPTION_UPLOAD_TRACKER_ENTRY] != encryption_key_sha256:466 encryption_restart = True467 remove_tracker_file = True468 else:469 return tracker_json[SERIALIZATION_UPLOAD_TRACKER_ENTRY]470 except IOError as e:471 # Ignore non-existent file (happens first time a upload is attempted on an472 # object, or when re-starting an upload after a473 # ResumableUploadStartOverException), but warn user for other errors.474 if e.errno != errno.ENOENT:475 logger.warn('Couldn\'t read upload tracker file (%s): %s. Restarting '476 'upload from scratch.', tracker_file_name, e.strerror)477 except (KeyError, ValueError) as e:478 # Old tracker files used a non-JSON format; rewrite it and assume no479 # encryption key.480 remove_tracker_file = True481 if encryption_key_sha256 is not None:482 encryption_restart = True483 else:484 # If encryption key is still None, we can resume using the old format.485 return tracker_data486 finally:487 if tracker_file:488 tracker_file.close()489 if encryption_restart:490 logger.warn('Upload tracker file (%s) does not match current encryption '491 'key. Restarting upload from scratch with a new tracker '492 'file that uses the current encryption key.',493 tracker_file_name)494 if remove_tracker_file:495 DeleteTrackerFile(tracker_file_name)496def RaiseUnwritableTrackerFileException(tracker_file_name, error_str):497 """Raises an exception when unable to write the tracker file."""498 raise CommandException(TRACKER_FILE_UNWRITABLE_EXCEPTION_TEXT %...
multiTracker.py
Source:multiTracker.py
...71 72 print('Selected bounding boxes {}'.format(bboxes))73 ## Initialize MultiTracker74 # There are two ways you can initialize multitracker75 # 1. tracker = cv2.MultiTracker("CSRT")76 # All the trackers added to this multitracker77 # will use CSRT algorithm as default78 # 2. tracker = cv2.MultiTracker()79 # No default algorithm specified80 # Initialize MultiTracker with tracking algo81 # Specify tracker type82 83 # Create MultiTracker object84 multiTracker = cv2.MultiTracker_create()85 # Initialize MultiTracker 86 for bbox in bboxes:87 multiTracker.add(createTrackerByName(trackerType), frame, bbox)88 # Process video and track objects89 while cap.isOpened():90 success, frame = cap.read()91 if not success:92 break...
tracker-group.js
Source:tracker-group.js
...65TrackerGroup.prototype.newGroup = function (name, weight) {66 return this.addUnit(new TrackerGroup(name), weight)67}68TrackerGroup.prototype.newItem = function (name, todo, weight) {69 return this.addUnit(new Tracker(name, todo), weight)70}71TrackerGroup.prototype.newStream = function (name, todo, weight) {72 return this.addUnit(new TrackerStream(name, todo), weight)73}74TrackerGroup.prototype.finish = function () {75 this.finished = true76 if (!this.trackers.length) this.addUnit(new Tracker(), 1, true)77 for (var ii = 0; ii < this.trackers.length; ii++) {78 var tracker = this.trackers[ii]79 tracker.finish()80 tracker.removeListener('change', this.bubbleChange)81 }82 this.emit('change', this.name, 1, this)83}84var buffer = ' '85TrackerGroup.prototype.debug = function (depth) {86 depth = depth || 087 var indent = depth ? buffer.substr(0, depth) : ''88 var output = indent + (this.name || 'top') + ': ' + this.completed() + '\n'89 this.trackers.forEach(function (tracker) {90 if (tracker instanceof TrackerGroup) {...
stats-tracker-spec.js
Source:stats-tracker-spec.js
...3import {today} from '../lib/time-formatter'4describe('Stats tracker', function () {5 let tracker6 beforeEach(function () {7 tracker = new StatsTracker()8 })9 describe('StatsTracker::constructor', function () {10 it('should set default values', function () {11 expect(tracker.history).toEqual({completions: 0, linesAdded: 0, linesDeleted: 0})12 expect(tracker.today).toEqual({day: today(), completions: 0, linesAdded: 0, linesDeleted: 0})13 expect(tracker.session).toEqual({linesAdded: 0, linesDeleted: 0})14 })15 it('should inherits previous stats if provided', function () {16 const stats = {17 history: {completions: 2, linesAdded: 10, linesDeleted: 10},18 today: {day: today(), completions: 1, linesAdded: 5, linesDeleted: 5}19 }20 tracker = new StatsTracker(stats)21 expect(tracker.history).toEqual(stats.history)22 expect(tracker.today).toEqual(stats.today)23 // Ignore today's completions if 'today' has passed24 stats.today.day = '20140909'25 tracker = new StatsTracker(stats)26 expect(tracker.history).toEqual(stats.history)27 expect(tracker.today).toEqual({day: today(), completions: 0, linesAdded: 0, linesDeleted: 0})28 })29 })30 describe('StatsTracker::addCompletion', function () {31 it('should add to today and history', function () {32 tracker.addCompletion()33 expect(tracker.today.completions).toEqual(1)34 expect(tracker.history.completions).toEqual(1)35 })36 })37 describe('StatsTracker::addLines', function () {38 it('should add to session, today and history', function () {39 tracker.addLines(100)...
Using AI Code Generation
1var stf = require('devicefarmer-stf-client');2var tracker = new stf.Tracker({3});4tracker.on('add', function(device) {5 console.log('Device %s was added.', device.serial);6});7tracker.on('remove', function(device) {8 console.log('Device %s was removed.', device.serial);9});10tracker.on('change', function(device) {11 console.log('Device %s changed state to %s.', device.serial, device.present ? 'present' : 'absent');12});13tracker.track();
Using AI Code Generation
1var stf = require('devicefarmer-stf-client');2var tracker = new stf.Tracker(stfClient);3tracker.on('add', function(device) {4 console.log('Added device ' + device.serial);5});6tracker.on('remove', function(device) {7 console.log('Removed device ' + device.serial);8});9tracker.on('change', function(device) {10 console.log('Changed device ' + device.serial);11});12tracker.start();
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!