Best Python code snippet using localstack_python
test_committers.py
Source:test_committers.py
...40 type=type or 'M',41 )42class TokenizePathTestCase(TestCase):43 def test_forward_slash(self):44 assert list(tokenize_path('foo/bar')) == ['bar', 'foo']45 def test_back_slash(self):46 assert list(tokenize_path('foo\\bar')) == ['bar', 'foo']47 def test_dot_does_not_separate(self):48 assert list(tokenize_path('foo.bar')) == ['foo.bar']49 def test_additional_slash_in_front(self):50 assert list(tokenize_path('/foo/bar')) == ['bar', 'foo']51 assert list(tokenize_path('\\foo\\bar')) == ['bar', 'foo']52 def test_relative_paths(self):53 assert list(tokenize_path('./')) == ['.']54 assert list(tokenize_path('./../')) == ['..', '.']55 assert list(tokenize_path('./foo/bar')) == ['bar', 'foo', '.']56 assert list(tokenize_path('.\\foo\\bar')) == ['bar', 'foo', '.']57 def test_path_with_spaces(self):58 assert list(tokenize_path('\\foo bar\\bar')) == ['bar', 'foo bar']59 def test_no_path(self):60 assert list(tokenize_path('/')) == []61class ScorePathMatchLengthTest(TestCase):62 def test_equal_paths(self):63 assert score_path_match_length('foo/bar/baz', 'foo/bar/baz') == 364 def test_partial_match_paths(self):65 assert score_path_match_length('foo/bar/baz', 'bar/baz') == 266 assert score_path_match_length('foo/bar/baz', 'baz') == 167 def test_why_is_this_zero(self):68 assert score_path_match_length('foo/bar/baz', 'foo') == 069 def test_path_with_empty_path_segment(self):70 assert score_path_match_length('./foo/bar/baz', 'foo/bar/baz') == 371class GetFramePathsTestCase(TestCase):72 def setUp(self):73 self.event = Mock()74 self.event.data = {}...
data.py
Source:data.py
...14class Corpus(object):15 def __init__(self, path="", dicty=None):16 if(len(path) > 0):17 self.dictionary = Dictionary()18 self.train = self.tokenize_path(os.path.join(path, 'train.txt'))19 self.valid = self.tokenize_path(os.path.join(path, 'valid.txt'))20 self.test = self.tokenize_path(os.path.join(path, 'test.txt'))21 self.dictionary = dicty22 def tokenize_path(self, path):23 """Tokenizes a text file."""24 assert os.path.exists(path)25 # Add words to the dictionary26 with open(path, 'r') as f:27 tokens = 028 for line in f:29 words = line.split() + ['<eos>']30 tokens += len(words)31 for word in words:32 self.dictionary.add_word(word)33 # Tokenize file content34 with open(path, 'r') as f:35 ids = torch.LongTensor(tokens)36 token = 0...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!