Best Python code snippet using tox_python
__init__.py
Source:__init__.py
1"""2Contains possible interactions with the Apollo Organisms Module3"""4import json5from apollo.client import Client6from apollo.decorators import raise_error_decorator7class OrganismsClient(Client):8 CLIENT_BASE = '/organism/'9 @raise_error_decorator10 def add_organism(self, common_name, directory, blatdb=None, genus=None,11 species=None, public=False, metadata=None, suppress_output=False):12 """13 Add an organism14 :type common_name: str15 :param common_name: Organism common name16 :type directory: str17 :param directory: Server-side directory18 :type blatdb: str19 :param blatdb: Server-side path to 2bit index of the genome for Blat20 :type genus: str21 :param genus: Genus22 :type species: str23 :param species: Species24 :type public: bool25 :param public: Should the organism be public or not26 :type metadata: str27 :param metadata: JSON formatted arbitrary metadata28 :type suppress_output: bool29 :param suppress_output: Suppress output of all organisms (true / false) (default false)30 :rtype: dict31 :return: a dictionary with information about the new organism32 """33 data = {34 'commonName': common_name,35 'directory': directory,36 'publicMode': public,37 }38 if blatdb is not None:39 data['blatdb'] = blatdb40 if genus is not None:41 data['genus'] = genus42 if species is not None:43 data['species'] = species44 if metadata is not None:45 if isinstance(metadata, dict):46 # Apollo wants a string47 metadata = json.dumps(metadata)48 data['metadata'] = metadata49 if suppress_output is not None and suppress_output is True:50 data['returnAllOrganisms'] = False51 response = self.post('addOrganism', data)52 # Apollo decides here that it would be nice to return information about53 # EVERY organism. LMAO.54 if type(response) is not list:55 return response56 if len(response) > 0:57 return [x for x in response if x['commonName'] == common_name][0]58 else:59 return data60 def update_organism(self, organism_id, common_name, directory, blatdb=None, species=None, genus=None, public=False,61 no_reload_sequences=False, suppress_output=False):62 """63 Update an organism64 :type organism_id: str65 :param organism_id: Organism ID Number66 :type common_name: str67 :param common_name: Organism common name68 :type directory: str69 :param directory: Server-side directory70 :type blatdb: str71 :param blatdb: Server-side Blat directory for the organism72 :type genus: str73 :param genus: Genus74 :type species: str75 :param species: Species76 :type public: bool77 :param public: User's email78 :type no_reload_sequences: bool79 :param no_reload_sequences: Set this if you don't want Apollo to reload genome sequences (no change in genome sequence)80 :type suppress_output: bool81 :param suppress_output: Suppress output of all organisms (true / false) (default false)82 :rtype: dict83 :return: a dictionary with information about the updated organism84 """85 data = {86 'id': organism_id,87 'name': common_name,88 'directory': directory,89 'publicMode': public,90 'noReloadSequences': no_reload_sequences,91 }92 if blatdb is not None:93 data['blatdb'] = blatdb94 if genus is not None:95 data['genus'] = genus96 if species is not None:97 data['species'] = species98 if suppress_output is not None and suppress_output is True:99 data['returnAllOrganisms'] = False100 response = self.post('updateOrganismInfo', data)101 if type(response) is not list:102 return response103 if len(response) > 0:104 return [x for x in response if x['commonName'] == common_name][0]105 else:106 return self.show_organism(common_name)107 def get_organisms(self, common_name=None):108 """109 Get all organisms110 :type common_name: str111 :param common_name: Optionally filter on common name112 :rtype: list113 :return: Organism information114 """115 if common_name is None:116 orgs = self.post('findAllOrganisms', data={})117 else:118 orgs = self.post('findAllOrganisms', {'organism': common_name})119 return orgs120 def show_organism(self, common_name):121 """122 Get information about a specific organism.123 :type common_name: str124 :param common_name: Organism Common Name125 :rtype: dict126 :return: a dictionary containing the organism's information127 """128 orgs = self.get_organisms(common_name=common_name)129 if isinstance(orgs, list) and len(orgs) > 0:130 orgs = orgs[0]131 return orgs132 def delete_organism(self, organism_id, suppress_output=False):133 """134 Delete an organism135 :type organism_id: str136 :param organism_id: Organism ID Number137 :type suppress_output: bool138 :param suppress_output: Suppress return of all organisms (true / false) (default false)139 :rtype: list140 :return: A list of all remaining organisms141 """142 data = {143 'id': organism_id,144 }145 if suppress_output is not None and suppress_output is not False:146 data['returnAllOrganisms'] = False147 return self.post('deleteOrganism', data)148 def delete_features(self, organism_id):149 """150 Remove features of an organism151 :type organism_id: str152 :param organism_id: Organism ID Number153 :rtype: dict154 :return: an empty dictionary155 """156 return self.post('deleteOrganismFeatures', {'organism': organism_id})157 def get_sequences(self, organism_id):158 """159 Get the sequences for an organism160 :type organism_id: str161 :param organism_id: Organism ID Number162 :rtype: list of dict163 :return: The set of sequences associated with an organism164 """165 return self.post('getSequencesForOrganism', {'organism': organism_id})166 def get_organism_creator(self, organism_id):167 """168 Get the creator of an organism169 :type organism_id: str170 :param organism_id: Organism ID Number171 :rtype: dict172 :return: a dictionary containing user information173 """174 return self.post('getOrganismCreator', {'organism': organism_id})175 def update_metadata(self, organism_id, metadata):176 """177 Update the metadata for an existing organism.178 :type organism_id: str179 :param organism_id: Organism ID Number180 :type metadata: str181 :param metadata: Organism metadata. (Recommendation: use a structured format like JSON)182 :rtype: dict183 :return: An empty, useless dictionary184 """...
utils.py
Source:utils.py
1import torch2import torch.nn.functional as F3import torch.optim as optim4from torch.utils.data import random_split, DataLoader5from torchvision import datasets, transforms6torch.manual_seed(0)7data_dir = "data/mnist"8transforms_0 = transforms.Compose([transforms.ToTensor()])9train_dat_0_all = datasets.MNIST(10 data_dir, download=True, train=True, transform=transforms_011)12train_dat_0, val_dat_0 = random_split(train_dat_0_all, [54000, 6000])13test_dat_0 = datasets.MNIST(data_dir, train=False, transform=transforms_0)14train_loader_0 = DataLoader(train_dat_0, batch_size=64, shuffle=True)15val_loader_0 = DataLoader(val_dat_0, batch_size=1000, shuffle=False)16test_loader_0 = DataLoader(test_dat_0, batch_size=1000, shuffle=False)17transforms_1 = transforms.Compose(18 [transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))]19)20train_dat_1_all = datasets.MNIST(data_dir, train=True, transform=transforms_1)21train_dat_1, val_dat_1 = random_split(train_dat_1_all, [54000, 6000])22test_dat_1 = datasets.MNIST(data_dir, train=False, transform=transforms_1)23train_loader_1 = DataLoader(train_dat_1, batch_size=64, shuffle=True)24val_loader_1 = DataLoader(val_dat_1, batch_size=1000, shuffle=False)25test_loader_1 = DataLoader(test_dat_1, batch_size=1000, shuffle=False)26tmp = iter(train_loader_1)27sample_inputs, sample_targets = next(tmp)28def train(29 model,30 dataloader,31 optimizer,32 l1_penalty_coef,33 l2_penalty_coef,34 suppress_output,35):36 total_loss = 0.037 correct = 038 for inputs, targets in dataloader:39 optimizer.zero_grad()40 outputs = model(inputs)41 loss = F.nll_loss(outputs, targets)42 total_loss += len(inputs) * loss.item()43 if l1_penalty_coef != 0.0:44 loss += l1_penalty_coef * model.l1_weight_penalty()45 if l2_penalty_coef != 0.0:46 loss += l2_penalty_coef * model.l2_weight_penalty()47 loss.backward()48 optimizer.step()49 with torch.no_grad():50 pred = outputs.argmax(dim=1, keepdim=True)51 correct += pred.eq(targets.view_as(pred)).sum().item()52 train_loss = total_loss / len(dataloader.dataset)53 train_accuracy = correct / len(dataloader.dataset)54 if not suppress_output:55 print(56 "Train set:\tAverage loss: {:.4f}, Accuracy: {:.4f}".format(57 train_loss, train_accuracy58 )59 )60 return train_loss, train_accuracy61def evaluate(model, dataloader, eval_type, suppress_output=True):62 total_loss = 0.063 correct = 064 with torch.no_grad():65 for inputs, targets in dataloader:66 outputs = model(inputs)67 total_loss += F.nll_loss(outputs, targets, reduction="sum")68 pred = outputs.argmax(dim=1, keepdim=True)69 correct += pred.eq(targets.view_as(pred)).sum().item()70 loss = total_loss / len(dataloader.dataset)71 accuracy = correct / len(dataloader.dataset)72 if not suppress_output:73 print(74 "{} set:\tAverage loss: {:.4f}, Accuracy: {:.4f}\n".format(75 eval_type, loss, accuracy76 )77 )78 return loss, accuracy79def run_experiment(80 model,81 optimizer,82 train_loader,83 val_loader,84 test_loader,85 n_epochs,86 l1_penalty_coef,87 l2_penalty_coef,88 suppress_output=True,89):90 train_losses = []91 train_accuracies = []92 val_losses = []93 val_accuracies = []94 for epoch in range(n_epochs):95 if not suppress_output:96 print("Epoch {}: training...".format(epoch))97 train_loss, train_accuracy = train(98 model=model,99 dataloader=train_loader,100 optimizer=optimizer,101 l1_penalty_coef=l1_penalty_coef,102 l2_penalty_coef=l2_penalty_coef,103 suppress_output=suppress_output,104 )105 val_loss, val_accuracy = evaluate(106 model=model,107 dataloader=val_loader,108 eval_type="Validation",109 suppress_output=suppress_output,110 )111 train_losses.append(train_loss)112 train_accuracies.append(train_accuracy)113 val_losses.append(val_loss)114 val_accuracies.append(val_accuracy)115 final_test_loss, final_test_accuracy = evaluate(116 model=model,117 dataloader=test_loader,118 eval_type="Test",119 suppress_output=suppress_output,120 )121 return {122 "train_losses": train_losses,123 "train_accuracies": train_accuracies,124 "val_losses": val_losses,125 "val_accuracies": val_accuracies,126 "final_test_loss": final_test_loss,127 "final_test_accuracy": final_test_accuracy,...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!