Best Python code snippet using grail_python
keystroke_dynamics_model.py
Source:keystroke_dynamics_model.py
1import numpy as np2import pandas as pd3import matplotlib.pyplot as plt4import seaborn as sns5from sklearn.metrics import accuracy_score6from sklearn.neighbors import KNeighborsClassifier7from sklearn.model_selection import StratifiedShuffleSplit8from sklearn.model_selection import GridSearchCV, StratifiedShuffleSplit9from xgboost.sklearn import XGBClassifier10import xgboost as xgb11train = pd.read_csv('model/train.csv')12test = pd.read_csv('model/test.csv')13test_Combined = None14train_Combined = None15nr_bins = 1016HDMax, RPDMax, PPDMax = None, None, None17HDBins, RPDBins, PPDBins = None, None, None18train_1 = train19for i in range(1, 13):20 train_1['PPD-' + str(i)] = train_1['press-' + str(i)] - train_1['press-' + str(i - 1)]21 train_1['RPD-' + str(i)] = train_1['release-' + str(i)] - train_1['press-' + str(i - 1)]22for i in range(13):23 train_1['HD-' + str(i)] = train_1['release-' + str(i)] - train_1['press-' + str(i)]24test_1 = test25for i in range(1, 13):26 test_1['PPD-' + str(i)] = test_1['press-' + str(i)] - test_1['press-' + str(i - 1)]27 test_1['RPD-' + str(i)] = test_1['release-' + str(i)] - test_1['press-' + str(i - 1)]28for i in range(13):29 test_1['HD-' + str(i)] = test_1['release-' + str(i)] - test_1['press-' + str(i)]30def train():31 # Training Data32 drop_cols_HD_analysis = ['PPD-' + str(i) for i in range(1, 13)] + ['RPD-' + str(i) for i in range(1, 13)] + ['release-' + str(i) for i in range(13)]33 train_HD_analysis = train_1.drop(columns = drop_cols_HD_analysis)34 train_HD_analysis['id'] = train_HD_analysis.index35 train_HD_analysis = pd.wide_to_long(train_HD_analysis, ['press-', 'HD-'],36 i = 'id', j = 'key_no').sort_values(by = ['user', 'id', 'key_no'])37 drop_cols_PPD_analysis = ['HD-' + str(i) for i in range(13)] + ['RPD-' + str(i) for i in range(1, 13)] + ['release-' + str(i) for i in range(13)] + ['press-0']38 train_PPD_analysis = train_1.drop(columns = drop_cols_PPD_analysis)39 train_PPD_analysis['id'] = train_PPD_analysis.index40 train_PPD_analysis = pd.wide_to_long(train_PPD_analysis, ['press-', 'PPD-'],41 i = 'id', j = 'key_no').sort_values(by =['user', 'id', 'key_no'])42 drop_cols_RPD_analysis = ['HD-'+str(i) for i in range(13)] + ['PPD-' + str(i) for i in range(1, 13)] + ['release-' + str(i) for i in range(13)] + ['press-0']43 44 train_RPD_analysis = train_1.drop(columns = drop_cols_RPD_analysis)45 train_RPD_analysis['id'] = train_RPD_analysis.index46 train_RPD_analysis = pd.wide_to_long(train_RPD_analysis, ['press-', 'RPD-'],47 i = 'id', j = 'key_no').sort_values(by =['user', 'id', 'key_no'])48 # Test Data49 test_HD_analysis = test_1.drop(columns = drop_cols_HD_analysis)50 test_HD_analysis['id'] = test_HD_analysis.index51 test_HD_analysis = pd.wide_to_long(test_HD_analysis, ['press-', 'HD-'],52 i = 'id', j = 'key_no').sort_values(53 by = ['id', 'key_no'])54 test_PPD_analysis = test_1.drop(columns = drop_cols_PPD_analysis)55 test_PPD_analysis['id'] = test_PPD_analysis.index56 test_PPD_analysis = pd.wide_to_long(test_PPD_analysis, ['press-', 'PPD-'],57 i = 'id', j = 'key_no').sort_values(58 by =['id', 'key_no'])59 test_RPD_analysis = test_1.drop(columns = drop_cols_RPD_analysis)60 test_RPD_analysis['id'] = test_RPD_analysis.index61 test_RPD_analysis = pd.wide_to_long(test_RPD_analysis, ['press-', 'RPD-'],62 i = 'id', j = 'key_no').sort_values(63 by =['id', 'key_no'])64 # Join these individual tables together65 test_Combined = test_HD_analysis.join(test_RPD_analysis.drop(columns = ['press-']), rsuffix = 'RPD_').join(test_PPD_analysis.drop(columns = ['press-']), rsuffix = 'PPD_')66 train_Combined = train_HD_analysis.join(train_RPD_analysis.drop(columns = ['user', 'press-']), rsuffix = 'RPD_').join(train_PPD_analysis.drop(columns = ['user', 'press-']), rsuffix = 'PPD_')67 #print('Max values in train are: HDMax: ', HDMax, 'RPDMax:',68 # RPDMax, 'PPDMax:', PPDMax)69 labels = [i for i in range(nr_bins)]70 train_Combined['HDEnc'], HDBins = pd.qcut(train_Combined['HD-'],71 retbins = True, labels = labels,72 q = nr_bins)73 train_Combined['PPDEnc'], RPDBins = pd.qcut(train_Combined['PPD-'],74 retbins = True, labels = labels,75 q = nr_bins)76 train_Combined['RPDEnc'], PPDBins = pd.qcut(train_Combined['RPD-'],77 retbins = True, labels = labels,78 q = nr_bins)79 train_Combined['HDEnc'] = train_Combined['HDEnc'].astype(str).replace('nan', -1).astype(int)80 train_Combined['PPDEnc'] = train_Combined['PPDEnc'].astype(str).replace('nan', -1).astype(float)81 train_Combined['RPDEnc'] = train_Combined['RPDEnc'].astype(str).replace('nan', -1).astype(float)82def predict(pressed_t, released_t):83 HDMax = test_Combined['HD-'].max()84 RPDMax = test_Combined['RPD-'].max()85 PPDMax = test_Combined['PPD-'].max()86 #print('Max values in test are: HDMax: ', HDMax, 'RPDMax:',87 # RPDMax, 'PPDMax:', PPDMax)88 labels = [i for i in range(nr_bins)]89 test_Combined['HDEnc'] = pd.cut(test_Combined['HD-'],90 labels = labels,91 bins = HDBins)92 test_Combined['PPDEnc'] = pd.cut(test_Combined['PPD-'],93 labels = labels,94 bins = RPDBins)95 test_Combined['RPDEnc'] = pd.cut(test_Combined['RPD-'],96 labels = labels,97 bins = PPDBins)98 test_Combined['HDEnc'] = test_Combined['HDEnc'].astype(str).replace('nan', -1).astype(float)99 test_Combined['PPDEnc'] = test_Combined['PPDEnc'].astype(str).replace('nan', -1).astype(float)100 test_Combined['RPDEnc'] = test_Combined['RPDEnc'].astype(str).replace('nan', -1).astype(float)101 train_Combined_HDAvg = train_Combined.reset_index().groupby(['user', 'key_no'])['HDEnc'].mean()102 train_Combined_PPDAvg = train_Combined.reset_index().groupby(['user', 'key_no'])['PPDEnc'].mean()103 train_Combined_RPDAvg = train_Combined.reset_index().groupby(['user', 'key_no'])['RPDEnc'].mean()104 temp = pd.DataFrame({'HD':train_Combined_HDAvg, 'PPD':train_Combined_PPDAvg, 105 'RPD':train_Combined_RPDAvg})106 train_HDProperties = temp.reset_index().groupby('user')['HD'].apply(np.array)107 train_PPDProperties = temp.reset_index().groupby('user')['PPD'].apply(np.array)108 train_RPDProperties = temp.reset_index().groupby('user')['RPD'].apply(np.array)109 train_UserProps = pd.DataFrame({'HD':train_HDProperties, 'PPD':train_PPDProperties,110 'RPD':train_RPDProperties})111 pressed_t = released_t112 released_t = pressed_t113 train_Combined_HDAvg = test_Combined.reset_index().groupby(['id', 'key_no'])['HDEnc'].mean()114 train_Combined_PPDAvg = test_Combined.reset_index().groupby(['id', 'key_no'])['PPDEnc'].mean()115 train_Combined_RPDAvg = test_Combined.reset_index().groupby(['id', 'key_no'])['RPDEnc'].mean()116 temp = pd.DataFrame({'HD':train_Combined_HDAvg, 'PPD':train_Combined_PPDAvg, 117 'RPD':train_Combined_RPDAvg})118 train_HDProperties = temp.reset_index().groupby('id')['HD'].apply(np.array)119 train_PPDProperties = temp.reset_index().groupby('id')['PPD'].apply(np.array)120 train_RPDProperties = temp.reset_index().groupby('id')['RPD'].apply(np.array)121 test_UserProps = pd.DataFrame({'HD':train_HDProperties, 'PPD':train_PPDProperties,122 'RPD':train_RPDProperties})123 test_UserProps = pd.DataFrame(test_UserProps.HD.tolist(),124 index = test_UserProps.index).add_prefix('HD_').join(125 pd.DataFrame(test_UserProps.PPD.tolist(), index = 126 test_UserProps.index).add_prefix('PPD_')).join(127 pd.DataFrame(test_UserProps.RPD.tolist(), index =128 test_UserProps.index).add_prefix('RPD_'))129 train_HDTemp = train_Combined.reset_index().groupby(['user', 'id'])['HDEnc'].apply(np.array)130 train_PPDTemp = train_Combined.reset_index().groupby(['user', 'id'])['PPDEnc'].apply(np.array)131 train_RPDTemp = train_Combined.reset_index().groupby(['user', 'id'])['RPDEnc'].apply(np.array)132 train_User_AllSampleProps = pd.DataFrame({'HD':train_HDTemp, 'PPD':train_PPDTemp,133 'RPD':train_RPDTemp})134 train_User_AllSampleProps = pd.DataFrame(train_User_AllSampleProps.HD.tolist(),135 index = train_User_AllSampleProps.index).add_prefix('HD_').join(136 pd.DataFrame(train_User_AllSampleProps.PPD.tolist(),137 index = train_User_AllSampleProps.index).add_prefix('PPD_')).join(138 pd.DataFrame(train_User_AllSampleProps.RPD.tolist(),139 index = train_User_AllSampleProps.index).add_prefix('RPD_')).reset_index().set_index('user').drop(columns = ['id'])140 train_HDTemp = test_Combined.reset_index().groupby(['id'])['HDEnc'].apply(np.array)141 train_PPDTemp = test_Combined.reset_index().groupby(['id'])['PPDEnc'].apply(np.array)142 train_RPDTemp = test_Combined.reset_index().groupby(['id'])['RPDEnc'].apply(np.array)143 test_User_AllSampleProps = pd.DataFrame({'HD':train_HDTemp, 'PPD':train_PPDTemp,144 'RPD':train_RPDTemp})145 test_User_AllSampleProps = pd.DataFrame(test_User_AllSampleProps.HD.tolist(),146 index = test_User_AllSampleProps.index).add_prefix('HD_').join(147 pd.DataFrame(test_User_AllSampleProps.PPD.tolist(),148 index = test_User_AllSampleProps.index).add_prefix('PPD_')).join(149 pd.DataFrame(test_User_AllSampleProps.RPD.tolist(),150 index = test_User_AllSampleProps.index).add_prefix('RPD_'))151 trainX_allSamples = train_User_AllSampleProps.reset_index().drop(columns = ['user'])152 trainY_allSamples = train_User_AllSampleProps.index153 model = xgb.XGBClassifier()154 model.load_model('keystroke_model.bst')155 testX_allSamples = test_User_AllSampleProps.reset_index().drop(columns = ['id'])156 pd.DataFrame({'idx': testX_allSamples.index},157 index = testX_allSamples.index).to_csv('submission_x.csv', index = False)...
test.py
Source:test.py
...18 '''19 single_word = SingleShortenedWord(text)20 single_word.generate()21 test_common(text, target_word, single_word)22def test_combined(text, target_word):23 '''ç縮èªï¼çµåèªï¼ã®ãã¹ã24 '''25 combined_word = CombinedShortenedWord(text)26 combined_word.generate()27 test_common(text, target_word, combined_word)28def test_yuragi(text, target_word):29 '''ãããèªã®ãã¹ã30 '''31 yuragi_word = Yuragi(text)32 yuragi_word.generate()33 test_common(text, target_word, yuragi_word)34def main():35 # # -------------------------------36 # # åä¸èªã®ãã¹ã37 # # -------------------------------38 # # åä¸èª(åå²)39 # test_single('CRISIS å
¬å®æ©åææ»éç¹æç', 'CRISIS')40 # test_single('SRãµã¤ã¿ããã©ãããã¼~ãã¤ã¯ã®ç´°é~', 'ãµã¤ã¿ããã©ãããã¼')41 # test_single('ä¸å±
æ£åºã®ãã«ãªãå³æ¸é¤¨', 'ãã«ãªãå³æ¸é¤¨')42 # test_single('ãã¡ã¤ãã«ãã¡ã³ã¿ã¸ã¼XIVãå
ã®ãç¶ãã', 'å
ã®ãç¶ãã')43 # test_single('é¢ã¸ã£ã ãå®å
¨çSHOW', 'é¢ã¸ã£ã ')44 # # åä¸èªï¼åå²ãã¦å¤æï¼45 # test_single('CRISIS å
¬å®æ©åææ»éç¹æç', 'ã¯ã©ã¤ã·ã¹')46 # # åä¸èªï¼ãã£ããã³ãã¼ãåé¤ãã¦ãã®ã¾ã¾ï¼47 # test_single('1å人ã®å¤§è³ªå!?ç¬ã£ã¦ã³ã©ãã¦ï¼', 'ç¬ã£ã¦ã³ã©ãã¦')48 # # åä¸èªï¼ä¸»èªããã®ã¾ã¾ï¼49 # test_single('æ«»åããã®è¶³ä¸ã«ã¯æ»ä½ãåã¾ã£ã¦ãã', 'æ«»åãã')50 # # åä¸èªï¼ä¸»èªããã®ã¾ã¾ï¼51 # test_single('è¦è¦åºææ»ä¸èª²9ä¿ãseason12', '9ä¿')52 # # åä¸èªï¼ã«ã¿ã«ãèªããã®ã¾ã¾ï¼53 # test_single('幸ãï¼ãã³ãã¼ã¬ã¼ã«', 'ãã³ãã¼ã¬ã¼ã«')54 # # åä¸èªï¼ãã¤ãºãé¤å»ãã¦ãã«ã¿ã«ãèªããã®ã¾ã¾ï¼55 # test_single('ã¦ã¼ãª!!! on ICE', 'ã¦ã¼ãª')56 # # åä¸èªï¼ã·ãªã¼ãºåã®é¤å»ï¼57 # test_single('é²æã®å·¨äºº Season 2ã', 'é²æã®å·¨äºº')58 # # -------------------------------59 # # çµåèªã®ãã¹ã60 # # -------------------------------61 # # çµåèª62 # # test_combined('ãããµã¼ã¸æ¢åµãã¸ã§ã¼ã', 'æ¢åµã¸ã§ã¼') # 失æãããã«ã¿ã«ã3åã¯é対å¿ã ãã63 # test_combined('ãã¯ãéå½ã®äººã§ãã', 'ãã¯é')64 # test_combined('çå¤ä¸ã®ããªã³ã¹', 'çå¤ããª')65 # test_combined('ç·æ¥å調室 第2ã·ãªã¼ã¹ãã', 'ãã³ããª')66 # test_combined('ä¸å±
æ£åºã®éææ¥ã®ã¹ãã¤ã«ãã¡ã¸ã', 'éã¹ã')67 # test_combined('ãã®ç´ æ´ãããä¸çã«ç¥ç¦ãï¼2ã', 'ãã®ãã°')68 # test_combined('ããªãã®ãã¨ã¯ããã»ã©ã', 'ããªãã')69 # test_combined('ãã¯ãéå½ã®äººã§ããã', 'ãã¯é')70 # test_combined('人ã¯è¦ãç®ã100ãã¼ã»ã³ãã', 'ã²ã¨ãã¼')71 # test_combined('æããã¿ã§ãçãã¦ã¾ãã', 'æãã¿')72 # test_combined('ããªãã®ãã¨ã¯ããã»ã©ã', 'ããªãã')73 # test_combined('3人ã®ããã', '3ãã')74 # test_combined('è¦è¦åºææ»ä¸èª²9ä¿ãseason12ã', '9ä¿')75 # # çµåèªï¼å解ãã¦å¤æãã¦çµåï¼76 # test_combined('é¼ã®é¬éè¡å¸«', 'ãã¬ã¬ã³')77 # -------------------------------78 # ãããåè£èªã®ãã¹ã79 # -------------------------------80 # åä¸èª(åå²)81 test_yuragi('ãã³ã¸ã§ã³ã«åºä¼ããæ±ããã®ã¯ééã£ã¦ããã ããã', 'ãã³ã¾ã¡')82 test_yuragi('Reï¼ã¼ãããå§ããç°ä¸ççæ´»', 'ãªã¼ã')83 test_yuragi('転çãããã¹ã©ã¤ã ã ã£ã件', '転ã¹ã©')...
test_meta_data.py
Source:test_meta_data.py
1import anvil.meta_data as md2from base_test import TestBase3class TestBaseMetaData(TestBase):4 test_meta_data = {'foo': 'moo'}5 test_other_meta_data = {'bar': 'larp'}6 test_combined = {}7 test_combined.update(test_meta_data)8 test_combined.update(test_other_meta_data)9 test_total_meta_data_overwrite = {'foo': 'boo', 'bar': 'farp'}10 test_other_meta_data_overwrite = {'bar': 'marp'}11 test_overwritten = test_combined.copy()12 test_overwritten.update(test_other_meta_data_overwrite)13 def build_dependencies(cls):14 pass15class TestMetaDataMergeDicts(TestBaseMetaData):16 def test_meta_data_double(self):17 merged_dict = md.MetaData().merge(self.test_meta_data, self.test_other_meta_data)18 self.assertEquals(merged_dict, self.test_combined)19 def test_meta_data_single(self):20 merged_dict = md.MetaData().merge(self.test_meta_data)21 self.assertEquals(merged_dict, self.test_meta_data)22 def test_meta_data_empty(self):23 test_meta_data = {}24 actual = {}25 actual.update(test_meta_data)26 merged_dict = md.MetaData().merge(test_meta_data)27 self.assertEquals(merged_dict, actual)28 def test_meta_data_none(self):29 test_meta_data = None30 merged_dict = md.MetaData().merge(test_meta_data)31 self.assertEquals(merged_dict, {})32class TestKeys(TestBaseMetaData):33 def test_default_merge(self):34 meta_data_object = md.MetaData()35 meta_data_object.merge(self.test_meta_data, self.test_other_meta_data)36 self.assertEquals(meta_data_object.keys(), list(self.test_combined))37 def test_initialize_with_dicts(self):38 meta_data_object = md.MetaData(self.test_meta_data, self.test_other_meta_data)39 self.assertEquals(meta_data_object.keys(), list(self.test_combined))40 def test_initialize_with_dict_and_splat(self):41 meta_data_object = md.MetaData(self.test_meta_data, **self.test_other_meta_data)42 self.assertEquals(meta_data_object.keys(), list(self.test_combined))43class TestSplatting(TestBaseMetaData):44 @staticmethod45 def single_splat_returner(*args):46 return args47 @staticmethod48 def double_splat_returner(**kwargs):49 return kwargs50 def test_single_from_merge(self):51 meta_data_object = md.MetaData()52 meta_data_object.merge(self.test_meta_data, self.test_other_meta_data)53 self.assertEquals(tuple(meta_data_object.keys()), self.single_splat_returner(*meta_data_object))54 def test_double_from_init(self):55 meta_data_object = md.MetaData(self.test_meta_data, self.test_other_meta_data)56 self.assertEquals(meta_data_object, self.double_splat_returner(**meta_data_object.to_dict()))57class TestMerge(TestBaseMetaData):58 def test_default_merge(self):59 meta_data_object = md.MetaData()60 meta_data_object.merge(self.test_meta_data, self.test_other_meta_data)61 self.assertEquals(meta_data_object, self.test_combined)62 def test_initialize_with_dicts(self):63 meta_data_object = md.MetaData(self.test_meta_data, self.test_other_meta_data)64 self.assertEquals(meta_data_object, self.test_combined)65 def test_initialize_with_dict_and_splat(self):66 meta_data_object = md.MetaData(self.test_meta_data, **self.test_other_meta_data)67 self.assertEquals(meta_data_object, self.test_combined)68class TestProtection(TestBaseMetaData):69 def test_overwrite_merge(self):70 meta_data_object = md.MetaData(self.test_combined)71 meta_data_object.merge(self.test_other_meta_data_overwrite)72 self.assertEquals(meta_data_object, self.test_overwritten)73 def test_protected_merge_force(self):74 meta_data_object = md.MetaData(self.test_combined, protected='bar')75 meta_data_object.merge(self.test_other_meta_data_overwrite, force=True)76 self.assertEquals(meta_data_object, self.test_overwritten)77 def test_protected_multi_merge_force(self):78 meta_data_object = md.MetaData(self.test_combined, protected=['foo', 'bar'])79 meta_data_object.merge(self.test_total_meta_data_overwrite, force=True)80 self.assertEquals(meta_data_object, self.test_total_meta_data_overwrite)81 def test_protected_single_merge_force(self):82 meta_data_object = md.MetaData(self.test_combined, protected=['foo'])83 meta_data_object.merge(self.test_total_meta_data_overwrite, force=True)84 self.assertEquals(meta_data_object, self.test_total_meta_data_overwrite)85 def test_protected_merge(self):86 meta_data_object = md.MetaData(self.test_combined, protected='bar')87 meta_data_object.merge(self.test_other_meta_data_overwrite)88 self.assertEquals(meta_data_object, self.test_combined)89 def test_protected_merge_as_list(self):90 meta_data_object = md.MetaData(self.test_combined, protected=['bar'])91 meta_data_object.merge(self.test_other_meta_data_overwrite)92 self.assertEquals(meta_data_object, self.test_combined)93 def test_add_protection_via_method(self):94 meta_data_object = md.MetaData(self.test_combined, protected=['bar'])95 meta_data_object.merge(self.test_other_meta_data_overwrite)96 self.assertEquals(meta_data_object, self.test_combined)97 def test_add_protection_manually(self):98 meta_data_object = md.MetaData(self.test_combined)99 meta_data_object.protected |= {'bar'}100 meta_data_object.merge(self.test_other_meta_data_overwrite)101 self.assertEquals(meta_data_object, self.test_combined)102 def test_add_protection_manually_overwrite(self):103 meta_data_object = md.MetaData(self.test_combined)104 meta_data_object.protected = ['bar']105 meta_data_object.merge(self.test_other_meta_data_overwrite)...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!