Best Python code snippet using autotest_python
compute_rate_time_series.py
Source:compute_rate_time_series.py
1import json2import neo3import numpy as np4import os5import quantities as pq6from multiarea_model.analysis_helpers import pop_rate_time_series7from elephant.statistics import instantaneous_rate8from multiarea_model import MultiAreaModel9import sys10"""11Compute time series of population-averaged spike rates for a given12area from raw spike files of a given simulation.13Implements three different methods:14- binned spike histograms on all neurons ('full')15- binned spike histograms on a subsample of 140 neurons ('subsample')16- spike histograms convolved with a Gaussian kernel of optimal width17 after Shimazaki et al. (2010)18"""19assert(len(sys.argv) == 5)20data_path = sys.argv[1]21label = sys.argv[2]22area = sys.argv[3]23method = sys.argv[4]24assert(method in ['subsample', 'full', 'auto_kernel'])25# subsample : subsample spike data to 140 neurons to match the Chu 2014 data26# full : use spikes of all neurons and compute spike histogram with bin size 1 ms27# auto_kernel : use spikes of all neurons and convolve with Gaussian28# kernel of optimal width using the method of Shimazaki et al. (2012)29# (see Method parts of the paper)30load_path = os.path.join(data_path,31 label,32 'recordings')33save_path = os.path.join(data_path,34 label,35 'Analysis',36 'rate_time_series_{}'.format(method))37try:38 os.mkdir(save_path)39except FileExistsError:40 pass41with open(os.path.join(data_path, label, 'custom_params_{}'.format(label)), 'r') as f:42 sim_params = json.load(f)43T = sim_params['T']44"""45Create MultiAreaModel instance to have access to data structures46"""47M = MultiAreaModel({})48time_series_list = []49N_list = []50for pop in M.structure[area]:51 fp = '-'.join((label,52 'spikes', # assumes that the default label for spike files was used53 area,54 pop))55 fn = '{}/{}.npy'.format(load_path, fp)56 spike_data = np.load(fn)57 spike_data = spike_data[np.logical_and(spike_data[:, 1] > 500.,58 spike_data[:, 1] <= T)]59 if method == 'subsample':60 all_gid = np.unique(spike_data[:, 0])61 N = int(np.round(140 * M.N[area][pop] / M.N[area]['total']))62 i = 063 s = 064 gid_list = []65 while s < N:66 rate = spike_data[:, 1][spike_data[:, 0] == all_gid[i]].size / (1e-3 * (T - 500.))67 if rate > 0.56:68 gid_list.append(all_gid[i])69 s += 170 i += 171 spike_data = spike_data[np.isin(spike_data[:, 0], gid_list)]72 kernel = 'binned_subsample'73 if method == 'full':74 N = M.N[area][pop] # Assumes that all neurons were recorded75 kernel = 'binned'76 77 if method in ['subsample', 'full']:78 time_series = pop_rate_time_series(spike_data, N, 500., T,79 resolution=1.)80 if method == 'auto_kernel':81 # To reduce the computational load, the time series is only computed until 10500. ms82 T = 10500.83 N = M.N[area][pop] # Assumes that all neurons were recorded84 st = neo.SpikeTrain(spike_data[:, 1] * pq.ms, t_stop=T*pq.ms)85 time_series = instantaneous_rate(st, 1.*pq.ms, t_start=500.*pq.ms, t_stop=T*pq.ms)86 time_series = np.array(time_series)[:, 0] / N87 kernel = 'auto'88 89 time_series_list.append(time_series)90 N_list.append(N)91 92 fp = '_'.join(('rate_time_series',93 method,94 area,95 pop))96 np.save('{}/{}.npy'.format(save_path, fp), time_series)97time_series_list = np.array(time_series_list)98area_time_series = np.average(time_series_list, axis=0, weights=N_list)99fp = '_'.join(('rate_time_series',100 method,101 area))102np.save('{}/{}.npy'.format(save_path, fp), area_time_series)103par = {'areas': M.area_list,104 'pops': 'complete',105 'kernel': kernel,106 'resolution': 1.,107 't_min': 500.,108 't_max': T}109fp = '_'.join(('rate_time_series',110 method,111 'Parameters.json'))112with open('{}/{}'.format(save_path, fp), 'w') as f:...
source.py
Source:source.py
...37 estimation_dif_rule_histogram(sampled_data, gm1d)38elif estimation_method == 'kernel':39 estimation_kernel(sampled_data, gm1d, kernel_h)40elif estimation_method == 'auto_kernel':41 estimation_auto_kernel(sampled_data, gm1d)42elif estimation_method == 'knn':...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!