commit 83b04e213300ef40c68e5efffa9ded8f069bba3b Author: shi Date: Sun Mar 24 23:42:27 2024 +0100 first commit diff --git a/README.md b/README.md new file mode 100644 index 0000000..af1405f --- /dev/null +++ b/README.md @@ -0,0 +1,29 @@ +# Inferring Human Intentions from Predicted Action Probabilities + +*Lei Shi, Paul Bürkner, Andreas Bulling* + +*University of Stuttgart, Stuttgart, Germany* + +Accepted by [Workshop on Theory of Mind in Human-AI Interaction at CHI 2024](https://theoryofmindinhaichi2024.wordpress.com/) + +## Requirements +The code is test in Ubuntu 20.04. + +``` +pytorch 1.11.0 +matplotlib 3.3.2 +pickle 4.0 +pandas 1.4.3 +R 4.2.1 +RStan 2.26.3 +``` +To install R, [see here](https://cran.r-project.org/bin/linux/ubuntu/fullREADME.html) + +To install RStan, [see here](https://mc-stan.org/users/interfaces/rstan.html) + +## Experiments + +To train and evaluate the method on Watch-And-Help dataset, see [here](watch_and_help/README.md) + +To train and evaluate the method on Keyboard and Mouse Interaction dataset, see [here](keyboard_and_mouse/README.md) + diff --git a/keyboard_and_mouse/README.MD b/keyboard_and_mouse/README.MD new file mode 100644 index 0000000..1dfe191 --- /dev/null +++ b/keyboard_and_mouse/README.MD @@ -0,0 +1,69 @@ +# Keyboard And Mouse Interactive Dataset + + +# Neural Network + +## Requirements +The code is test in Ubuntu 20.04. + +pytorch 1.11.0 +matplotlib 3.3.2 +pickle 4.0 +pandas 1.4.3 + +## Train + +Set training parameters in train.sh + +Run `sh train.sh` to train the model + + +## Test + +Run `sh test.sh` to run test on trained model + +Predictions are saved under `prediction/task$i$/` + + +# Bayesian Inference + +## Requirements +R 4.2.1 +RStan [](https://mc-stan.org/users/interfaces/rstan.html) + + +Run `sh sampler_user.sh` to split prediction to 10% to 90% + +Run `Rscript stan/strategy_inference_test.R` to get results of intention prediction for all users +Run `sh stan/plot_user.sh` to plot the bar chart for user intention prediction results of all action sequences + +Run `Rscript stan/strategy_inference_test_full_length.R` to get results of intention prediction (0% to 100%) for all users +Run `sh stan/plot_user_length_10_steps.sh` to plot the bar chart for user intention prediction results (0% to 100%) of all action sequences + +Run `sh sampler_single_act.sh` to get the predictions for each individual action sequence. +Run `Rscript stan/strategy_inference_test_all_individual_act.R` to get all action sequences (0% to 100%) of all users for intention prediction +Run `sh plot_user_all_individual.sh` to plot the bar chart for user intention prediction results of all action sequences +Run `sh plot_user_length_10_steps_all_individual.sh` to plot the user intention prediction results (0% to 100%) of all action sequences + + + + +Set training and test parameters in train.sh and test.sh + +Run sh train.sh to train the model. + +Run sh test.sh to run test on trained model. +Predictions are saved under prediction/task$i$/ + +Run sh sampler_user.sh to split prediction to 10% to 90% + +Run stan/strategy_inference_test.R to get results of intention prediction for all users +Run stan/plot_user.py to plot the bar chart for user intention prediction results of all action sequences + +Run stan/strategy_inference_test_full_length.R to get results of intention prediction (0% to 100%) for all users +Run stan/plot_user_length_10_users.py to plot the bar chart for user intention prediction results (0% to 100%) of all action sequences + + +Run stan/strategy_inference_test_all_individual_act.R to get all action sequences (0% to 100%) of all users for intention prediction +Run stan/plot_user_all_individual.py to plot the bar chart for user intention prediction results of all action sequences +Run stan/plot_user_length_10_steps_all_individual.py to plot the user intention prediction results (0% to 100%) of all action sequences diff --git a/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task0_checkpoint.ckpt b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task0_checkpoint.ckpt new file mode 100644 index 0000000..820088a Binary files /dev/null and b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task0_checkpoint.ckpt differ diff --git a/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task0_model_best.ckpt b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task0_model_best.ckpt new file mode 100644 index 0000000..820088a Binary files /dev/null and b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task0_model_best.ckpt differ diff --git a/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task1_checkpoint.ckpt b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task1_checkpoint.ckpt new file mode 100644 index 0000000..38927b1 Binary files /dev/null and b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task1_checkpoint.ckpt differ diff --git a/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task1_model_best.ckpt b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task1_model_best.ckpt new file mode 100644 index 0000000..d78d2ab Binary files /dev/null and b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task1_model_best.ckpt differ diff --git a/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task2_checkpoint.ckpt b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task2_checkpoint.ckpt new file mode 100644 index 0000000..6c60b08 Binary files /dev/null and b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task2_checkpoint.ckpt differ diff --git a/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task2_model_best.ckpt b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task2_model_best.ckpt new file mode 100644 index 0000000..50409ea Binary files /dev/null and b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task2_model_best.ckpt differ diff --git a/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task3_checkpoint.ckpt b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task3_checkpoint.ckpt new file mode 100644 index 0000000..cb66b34 Binary files /dev/null and b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task3_checkpoint.ckpt differ diff --git a/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task3_model_best.ckpt b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task3_model_best.ckpt new file mode 100644 index 0000000..cb66b34 Binary files /dev/null and b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task3_model_best.ckpt differ diff --git a/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task4_checkpoint.ckpt b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task4_checkpoint.ckpt new file mode 100644 index 0000000..482574b Binary files /dev/null and b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task4_checkpoint.ckpt differ diff --git a/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task4_model_best.ckpt b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task4_model_best.ckpt new file mode 100644 index 0000000..482574b Binary files /dev/null and b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task4_model_best.ckpt differ diff --git a/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task5_checkpoint.ckpt b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task5_checkpoint.ckpt new file mode 100644 index 0000000..6e7d3e6 Binary files /dev/null and b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task5_checkpoint.ckpt differ diff --git a/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task5_model_best.ckpt b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task5_model_best.ckpt new file mode 100644 index 0000000..97f1bc0 Binary files /dev/null and b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task5_model_best.ckpt differ diff --git a/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task6_checkpoint.ckpt b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task6_checkpoint.ckpt new file mode 100644 index 0000000..9d64290 Binary files /dev/null and b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task6_checkpoint.ckpt differ diff --git a/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task6_model_best.ckpt b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task6_model_best.ckpt new file mode 100644 index 0000000..5a57e7e Binary files /dev/null and b/keyboard_and_mouse/checkpoints/lstmlast_bs_8_lr_0.0001_hidden_size_128/task6_model_best.ckpt differ diff --git a/keyboard_and_mouse/dataset/.ipynb_checkpoints/00-preprocessing-checkpoint.ipynb b/keyboard_and_mouse/dataset/.ipynb_checkpoints/00-preprocessing-checkpoint.ipynb new file mode 100644 index 0000000..9966e83 --- /dev/null +++ b/keyboard_and_mouse/dataset/.ipynb_checkpoints/00-preprocessing-checkpoint.ipynb @@ -0,0 +1,2043 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "44382f1b", + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "import pandas as pd\n", + "import glob, pdb, os" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "3f2c0190", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[{'Title': ['Alignment', 'Center', 'and', 'Underline'],\n", + " 'Subtitle': ['Underline'],\n", + " 'Paragraph': ['1', 'Indent']},\n", + " {'Title': ['Size', 'Big', 'and', 'Underline'],\n", + " 'Subtitle': ['Underline'],\n", + " 'Paragraph': ['Alignment', 'Right']},\n", + " {'Title': ['Bold', 'and', 'Underline'],\n", + " 'Subtitle': ['Bold'],\n", + " 'Paragraph': ['1', 'Indent']},\n", + " {'Title': ['1', 'Indent', 'and', 'Italic'],\n", + " 'Subtitle': ['1', 'Indent'],\n", + " 'Paragraph': ['Font', 'Family', 'Consolas']},\n", + " {'Title': ['Size', 'Big', 'and', 'Bold'],\n", + " 'Subtitle': ['Bold'],\n", + " 'Paragraph': ['Italic']},\n", + " {'Title': ['Size', 'Big'],\n", + " 'Subtitle': ['Bold'],\n", + " 'Paragraph': ['Font', 'Family', 'Consolas']},\n", + " {'Title': ['Font', 'Family', 'Consolas', 'and', 'Alignment', 'Center'],\n", + " 'Subtitle': ['Font', 'Family', 'Consolas'],\n", + " 'Paragraph': ['Italic']}]" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "study_data_path = '../IntentData/Python_data/'\n", + "javascript_path = \"../IntentData/Javascript_data/\"\n", + "javascript_files = [\"commands_data\", \"key_data\", \"selection_data\", \"toolbar_data\"]\n", + "sum_subj = 17 # Ignore subj 0: \"16 participants\" in the paper\n", + "\n", + "# Map 7 formatting rules to 7 labels\n", + "label_index = dict()\n", + "label_index[\"['', 'Title:', 'Alignment', 'Center', 'and', 'Underline\\\\n', 'Subtitle:', 'Underline\\\\n', 'Paragraph:', '1', 'Indent']\"] = 0\n", + "label_index[\"['', 'Title:', 'Size', 'Big', 'and', 'Underline\\\\n', 'Subtitle:', 'Underline\\\\n', 'Paragraph:', 'Alignment', 'Right']\"] = 1\n", + "label_index[\"['', 'Title:', 'Bold', 'and', 'Underline\\\\n', 'Subtitle:', 'Bold\\\\n', 'Paragraph:', '1', 'Indent']\"] = 2\n", + "label_index[\"['', 'Title:', '1', 'Indent', 'and', 'Italic\\\\n', 'Subtitle:', '1', 'Indent\\\\n', 'Paragraph:', 'Font', 'Family', 'Consolas']\"] = 3\n", + "label_index[\"['', 'Title:', 'Size', 'Big', 'and', 'Bold\\\\n', 'Subtitle:', 'Bold\\\\n', 'Paragraph:', 'Italic']\"] = 4\n", + "label_index[\"['', 'Title:', 'Size', 'Big\\\\n', 'Subtitle:', 'Bold\\\\n', 'Paragraph:', 'Font', 'Family', 'Consolas']\"] = 5\n", + "label_index[\"['', 'Title:', 'Font', 'Family', 'Consolas', 'and', 'Alignment', 'Center\\\\n', 'Subtitle:', 'Font', 'Family', 'Consolas\\\\n', 'Paragraph:', 'Italic']\"] = 6\n", + "# 7 formatting rules\n", + "formatting_rule = list()\n", + "formatting_rule.append({'Title': ['Alignment', 'Center', 'and', 'Underline'], 'Subtitle': ['Underline'], 'Paragraph': ['1', 'Indent']})\n", + "formatting_rule.append({'Title': ['Size', 'Big', 'and', 'Underline'], 'Subtitle': ['Underline'], 'Paragraph': ['Alignment', 'Right']})\n", + "formatting_rule.append({'Title': ['Bold', 'and', 'Underline'], 'Subtitle': ['Bold'], 'Paragraph': ['1', 'Indent']})\n", + "formatting_rule.append({'Title': ['1', 'Indent', 'and', 'Italic'], 'Subtitle': ['1', 'Indent'], 'Paragraph': ['Font', 'Family', 'Consolas']})\n", + "formatting_rule.append({'Title': ['Size', 'Big', 'and', 'Bold'], 'Subtitle': ['Bold'], 'Paragraph': ['Italic']})\n", + "formatting_rule.append({'Title': ['Size', 'Big'], 'Subtitle': ['Bold'], 'Paragraph': ['Font', 'Family', 'Consolas']})\n", + "formatting_rule.append({'Title': ['Font', 'Family', 'Consolas', 'and', 'Alignment', 'Center'], 'Subtitle': ['Font', 'Family', 'Consolas'], 'Paragraph': ['Italic']})\n", + "formatting_rule" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "c64ca45f", + "metadata": {}, + "outputs": [], + "source": [ + "def cleanEye(df, DISPLAY_X= 2560, DISPLAY_Y=1600):\n", + " df.fillna(0, inplace=True)\n", + " # Only keep samples where 0 0.0) & (df['Right_Gaze_X'] > 0.0) & (df['Left_Gaze_X'] < 1.0) & (df['Right_Gaze_X'] < 1.0) \n", + " & (df['Left_Gaze_Y'] > 0.0) & (df['Right_Gaze_Y'] > 0.0) & (df['Left_Gaze_Y'] < 1.0) & (df['Right_Gaze_Y'] < 1.0)]\n", + " # Mean(Left_Gaze_XorY,Right_Gaze_XorY)*DISPLAY_XorY\n", + " df.loc[:, \"Gaze_X\"] = (df.loc[:, 'Left_Gaze_X'] + df.loc[:, 'Right_Gaze_X'])/2 * DISPLAY_X\n", + " df.loc[:, \"Gaze_Y\"] = (df.loc[:, 'Left_Gaze_Y'] + df.loc[:, 'Right_Gaze_Y'])/2 * DISPLAY_Y\n", + " df = df.reset_index()\n", + " return df" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "695375fb", + "metadata": {}, + "outputs": [], + "source": [ + "def filter_start_end(data, df_starts, df_ends):\n", + " # Only keep data in the time segment\n", + " new_data = pd.DataFrame()\n", + " for start, end, condition, part, taskID in zip(df_starts.Timestamp, df_ends.Timestamp, df_starts.condition, df_starts.part, df_starts.taskID):\n", + " # use iloc here\n", + " tmp = data[ (data.Timestamp >= start) & (data.Timestamp <= end) ]\n", + " tmp[\"condition\"] = condition\n", + " tmp[\"part\"] = part\n", + " tmp[\"taskID\"] = taskID\n", + " new_data = new_data.append(tmp)\n", + " return new_data" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "2645b10d", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " level_0 Timestamp event type PID key data range index \\\n", + "0 19 1575388379768 Tab commands 1 NaN NaN NaN NaN \n", + "1 20 1575388384967 Ctrl+I commands 1 NaN NaN NaN NaN \n", + "2 21 1575388388830 Tab commands 1 NaN NaN NaN NaN \n", + "3 22 1575388390782 Ctrl+Z commands 1 NaN NaN NaN NaN \n", + "4 23 1575388395487 Tab commands 1 NaN NaN NaN NaN \n", + "... ... ... ... ... ... ... ... ... ... \n", + "683124 69 1603897372746 NaN lorem 16 NaN NaN NaN NaN \n", + "683125 70 1603897429608 NaN lorem 16 NaN NaN NaN NaN \n", + "683126 71 1603897429849 NaN lorem 16 NaN NaN NaN NaN \n", + "683127 72 1603897491765 NaN lorem 16 NaN NaN NaN NaN \n", + "683128 73 1603897492016 NaN lorem 16 NaN NaN NaN NaN \n", + "\n", + " Left_Gaze_X ... clientY condition part taskID button \\\n", + "0 NaN ... NaN 1 1 0 NaN \n", + "1 NaN ... NaN 1 1 0 NaN \n", + "2 NaN ... NaN 1 1 0 NaN \n", + "3 NaN ... NaN 1 1 0 NaN \n", + "4 NaN ... NaN 1 1 0 NaN \n", + "... ... ... ... ... ... ... ... \n", + "683124 NaN ... NaN 1 5 4 NaN \n", + "683125 NaN ... NaN 1 5 5 NaN \n", + "683126 NaN ... NaN 1 5 5 NaN \n", + "683127 NaN ... NaN 1 5 6 NaN \n", + "683128 NaN ... NaN 1 5 6 NaN \n", + "\n", + " isCurrentlyPasting lorem_format \\\n", + "0 NaN NaN \n", + "1 NaN NaN \n", + "2 NaN NaN \n", + "3 NaN NaN \n", + "4 NaN NaN \n", + "... ... ... \n", + "683124 False None \n", + "683125 True Quaerat et... \n", + "683126 False None \n", + "683127 True Etincidunt... \n", + "683128 False None \n", + "\n", + " lorem_text \\\n", + "0 NaN \n", + "1 NaN \n", + "2 NaN \n", + "3 NaN \n", + "4 NaN \n", + "... ... \n", + "683124 None \n", + "683125 Quaerat etincidunt tempora. Sit quaerat adipis... \n", + "683126 None \n", + "683127 Etincidunt modi. Sed non etincidunt magnam qui... \n", + "683128 None \n", + "\n", + " rules label \n", + "0 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3 \n", + "1 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3 \n", + "2 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3 \n", + "3 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3 \n", + "4 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3 \n", + "... ... ... \n", + "683124 {'Title': ['Alignment', 'Center', 'and', 'Unde... 0 \n", + "683125 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3 \n", + "683126 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3 \n", + "683127 {'Title': ['Size', 'Big'], 'Subtitle': ['Bold'... 5 \n", + "683128 {'Title': ['Size', 'Big'], 'Subtitle': ['Bold'... 5 \n", + "\n", + "[10119178 rows x 28 columns]\n" + ] + } + ], + "source": [ + "if os.path.exists(\"../ReRun/filtered_data.pkl\"):\n", + " result = pd.read_pickle(\"../ReRun/filtered_data.pkl\")\n", + "else:\n", + " result = None\n", + " for subj in range(1,sum_subj):\n", + " print(\"subj:\",subj)\n", + " procedure_file = glob.glob(study_data_path+\"procedure_%d_*.csv\"%(subj))\n", + " lorem_file = glob.glob(study_data_path+\"lorem_text_%d_*.csv\"%(subj))\n", + " eye_file = glob.glob(study_data_path+\"eyedata_%d_*.csv\"%(subj))\n", + " mouse_pos_file = glob.glob(study_data_path+\"mouse_pos_%d_*.csv\"%(subj))\n", + " mouse_click_file = glob.glob(study_data_path+\"mouse_click_%d_*.csv\"%(subj))\n", + "\n", + " # Actually none of them contains multiple csv files\n", + " assert len(procedure_file)==1\n", + " assert len(lorem_file)==1\n", + " assert len(eye_file)==1\n", + " assert len(mouse_pos_file)==1\n", + " assert len(mouse_click_file)==1\n", + "\n", + " # Load dataframe from .csv\n", + " df_p = pd.read_csv(procedure_file[-1])\n", + " df_l = pd.read_csv(lorem_file[-1])\n", + " df_e = pd.read_csv(eye_file[-1])\n", + " df_mouse_pos = pd.read_csv(mouse_pos_file[-1])\n", + " df_mouse_click = pd.read_csv(mouse_click_file[-1])\n", + "\n", + " ''''print(\"df_p:\",df_p)\n", + " print(\"df_l:\",df_l)\n", + " print(\"df_e:\",df_e)\n", + " print(\"df_mouse_pos:\",df_mouse_pos)\n", + " print(\"df_mouse_click:\",df_mouse_click)'''\n", + "\n", + " # Calculate timestamp\n", + " #print(\"df_p.Timestamp BEF:\",df_p.Timestamp)\n", + " df_p.Timestamp = (df_p.Timestamp * 1000).astype(int)\n", + " df_l.Timestamp = (df_l.Timestamp * 1000).astype(int)\n", + " df_e.Timestamp = (df_e.Timestamp * 1000).astype(int)\n", + " df_mouse_pos.Timestamp = (df_mouse_pos.Timestamp * 1000).astype(int)\n", + " df_mouse_click.Timestamp = (df_mouse_click.Timestamp * 1000).astype(int)\n", + " #print(\"df_p.Timestamp AFT:\",df_p.Timestamp)\n", + "\n", + " # ??? For lorem_text and procedure, only take data where condition==1\n", + " df_l_1 = df_l[df_l.condition == 1]\n", + " df_p_1 = df_p[df_p.condition == 1]\n", + " ''''print(\"df_l_1:\\n\",df_l_1)\n", + " print(\"df_p_1:\\n\",df_p_1)'''\n", + "\n", + " # Start-End-Filter (condition 1)\n", + " ## ????? WHY?\n", + " df_starts = df_l_1[(df_l_1.isCurrentlyPasting == False) & (df_l_1.isStudy == True)]\n", + " #print(\"df_starts:\\n\",df_starts)\n", + " ## Only keep PID, Timestamp, condition, part and taskID\n", + " df_starts = df_starts.drop([\"isCurrentlyPasting\", \"isStudy\", \"lorem_format\", \"lorem_text\", \"rules\"], axis = 1)\n", + " #print(\"df_starts:\\n\",df_starts)\n", + "\n", + " ## ????? WHY?\n", + " df_ends = df_p_1[(df_p_1.isEscapePressed == True) & (df_p_1.isStudy == True) & (df_p_1.isOverlayDisplayed == False)]\n", + " #print(\"df_ends:\\n\",df_ends)\n", + " ## Only keep PID, Timestamp, condition, part and taskID(can be -1??????)\n", + " df_ends = df_ends.drop([\"isEscapePressed\", \"isStudy\", \"isOverlayDisplayed\"], axis = 1)\n", + " #print(\"df_ends:\\n\",df_ends)\n", + "\n", + " ## Add a new column of ID starting from 0 consecutively\n", + " df_starts = df_starts.reset_index()\n", + " df_ends = df_ends.reset_index()\n", + " ''''print(\"df_starts:\\n\",df_starts)\n", + " print(\"df_ends:\\n\",df_ends)'''\n", + "\n", + " # Get integer labels (0~6) from formatting rule strings\n", + " labels = pd.DataFrame(df_l_1[(df_l_1.rules != \"None\") & (df_l_1.isStudy == True)].rules)\n", + " labels = labels.apply(lambda x: label_index[x[0]], axis=1)\n", + " #print(\"labels:\\n\",labels)\n", + "\n", + " # Read javascript files\n", + " unfiltered_data = pd.DataFrame()\n", + " for javascript_file in javascript_files:\n", + " file = glob.glob(javascript_path + javascript_file + \"_%d.csv\"%(subj))\n", + " assert len(file)==1\n", + " data = pd.read_csv(file[-1])\n", + " print(file[-1])\n", + " ''''for key in data.keys():\n", + " if key=='Timestamp':\n", + " continue\n", + " print(key, pd.unique(data[key]))'''\n", + " # Remove first empty line done by lizard\n", + " data = data.iloc[1:]\n", + " # Add two columns: type = \"commands\"/\"key\"/\"selection\"/\"toolbar\"; PID = subj\n", + " data['type'] = javascript_file.replace(\"_data\", \"\")\n", + " data['PID'] = subj\n", + " # Concatenate to \"unfiltered_data\"\n", + " unfiltered_data = pd.concat([unfiltered_data, data], sort=False)\n", + "\n", + " # Add two columns to eye data\n", + " df_e['type'] = \"eye\"\n", + " df_e['PID'] = subj\n", + " # Remove invalid data and convert from (0,1) to real pixels\n", + " df_e = cleanEye(df_e)\n", + "\n", + " unfiltered_data = pd.concat([unfiltered_data, df_e], sort=False)\n", + " df_mouse_pos[\"type\"] = \"pos\"\n", + " unfiltered_data = pd.concat([unfiltered_data, df_mouse_pos], sort=False)\n", + " df_mouse_click[\"type\"] = \"click\"\n", + " unfiltered_data = pd.concat([unfiltered_data, df_mouse_click], sort=False)\n", + "\n", + " # filter according to rulesets\n", + " ## add 300ms to prevent pasting to be included??????\n", + " df_starts.Timestamp += 300\n", + " ## Only keep the data inside the time segments\n", + " filtered_data = filter_start_end(unfiltered_data, df_starts, df_ends)\n", + " df_starts['type'] = \"start\"\n", + " df_starts = df_starts.drop([\"index\"], axis = 1)\n", + " filtered_data = pd.concat([filtered_data, df_starts], sort=False)\n", + "\n", + " df_ends['type'] = \"end\"\n", + " df_ends.taskID = df_ends.taskID.replace(-1, 7)\n", + " df_ends.taskID -= 1\n", + " df_ends = df_ends.drop([\"index\"], axis = 1)\n", + " filtered_data = pd.concat([filtered_data, df_ends], sort=False)\n", + "\n", + " df_l_1['type'] = \"lorem\"\n", + " filtered_data = pd.concat([filtered_data, df_l_1], sort=False)\n", + "\n", + " # remove tutorial part\n", + " filtered_data = filtered_data[(filtered_data.part != 0)]\n", + "\n", + " # add labels???????\n", + " filtered_data[\"label\"] = filtered_data.apply(lambda x: labels.iloc[[(7 * (x.part - 1)) + x.taskID]].values[0], axis=1)\n", + "\n", + " # convert rules back to list\n", + " #filtered_data[\"rules\"] = filtered_data[\"rules\"].apply(lambda x: x.strip('][').split(', ')[1:] if isinstance(x, str) else None)\n", + " # this one overwrites everything, based on labels\n", + " filtered_data[\"rules\"] = filtered_data[\"label\"].apply(lambda x: formatting_rule[x])\n", + "\n", + " filtered_data = filtered_data.drop([\"isStudy\"], axis = 1).reset_index()\n", + "\n", + " # combine for all participants\n", + " #pdb.set_trace()\n", + " if result is None:\n", + " result = filtered_data\n", + " else:\n", + " result = pd.concat([result, filtered_data], sort=False)\n", + "\n", + " result.to_pickle(\"../ReRun/filtered_data.pkl\")\n", + "print(result)" + ] + }, + { + "cell_type": "markdown", + "id": "3d762353", + "metadata": {}, + "source": [ + "## Compare the generated and saved filtered_data" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "b15a52be", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(array([ 139, 139, 139, ..., 10116492, 10116493, 10116494]),\n", + " array([10, 12, 16, ..., 16, 10, 9]))" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Check if the result here is the same as that saved by Jan\n", + "result = pd.read_pickle(\"../ReRun/filtered_data.pkl\")\n", + "pre_result = pd.read_pickle(\"../IntentData/Preprocessing_data/filtered_data.pkl\")\n", + "\n", + "result.fillna(0, inplace=True)\n", + "pre_result.fillna(0, inplace=True)\n", + "\n", + "idx = np.where(pre_result!=result)\n", + "idx" + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "id": "ef3326f1", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[ 1 9 10 11 12 15 16]\n", + " Timestamp Timestamp Timestamp Timestamp \\\n", + "9805 1575388411623 1575388411623 1575388411623 1575388411623 \n", + "56789 1575388567787 1575388567787 1575388567787 1575388567787 \n", + "69831 1575388607232 1575388607232 1575388607232 1575388607232 \n", + "72534 1575388616827 1575388616827 1575388616827 1575388616827 \n", + "78669 1575388639022 1575388639022 1575388639022 1575388639022 \n", + "... ... ... ... ... \n", + "635390 1603897392982 1603897392982 1603897392982 1603897392982 \n", + "648780 1603897436218 1603897436218 1603897436218 1603897436218 \n", + "660707 1603897478712 1603897478712 1603897478712 1603897478712 \n", + "661641 1603897482847 1603897482847 1603897482847 1603897482847 \n", + "672132 1603897511607 1603897511607 1603897511607 1603897511607 \n", + "\n", + " Timestamp Timestamp Timestamp Timestamp \\\n", + "9805 1575388411623 1575388411623 1575388411623 1575388411623 \n", + "56789 1575388567787 1575388567787 1575388567787 1575388567787 \n", + "69831 1575388607232 1575388607232 1575388607232 1575388607232 \n", + "72534 1575388616827 1575388616827 1575388616827 1575388616827 \n", + "78669 1575388639022 1575388639022 1575388639022 1575388639022 \n", + "... ... ... ... ... \n", + "635390 1603897392982 1603897392982 1603897392982 1603897392982 \n", + "648780 1603897436218 1603897436218 1603897436218 1603897436218 \n", + "660707 1603897478712 1603897478712 1603897478712 1603897478712 \n", + "661641 1603897482847 1603897482847 1603897482847 1603897482847 \n", + "672132 1603897511607 1603897511607 1603897511607 1603897511607 \n", + "\n", + " Timestamp Timestamp ... Timestamp Timestamp \\\n", + "9805 1575388411623 1575388411623 ... 1575388411623 1575388411623 \n", + "56789 1575388567787 1575388567787 ... 1575388567787 1575388567787 \n", + "69831 1575388607232 1575388607232 ... 1575388607232 1575388607232 \n", + "72534 1575388616827 1575388616827 ... 1575388616827 1575388616827 \n", + "78669 1575388639022 1575388639022 ... 1575388639022 1575388639022 \n", + "... ... ... ... ... ... \n", + "635390 1603897392982 1603897392982 ... 1603897392982 1603897392982 \n", + "648780 1603897436218 1603897436218 ... 1603897436218 1603897436218 \n", + "660707 1603897478712 1603897478712 ... 1603897478712 1603897478712 \n", + "661641 1603897482847 1603897482847 ... 1603897482847 1603897482847 \n", + "672132 1603897511607 1603897511607 ... 1603897511607 1603897511607 \n", + "\n", + " Timestamp Timestamp Timestamp Timestamp \\\n", + "9805 1575388411623 1575388411623 1575388411623 1575388411623 \n", + "56789 1575388567787 1575388567787 1575388567787 1575388567787 \n", + "69831 1575388607232 1575388607232 1575388607232 1575388607232 \n", + "72534 1575388616827 1575388616827 1575388616827 1575388616827 \n", + "78669 1575388639022 1575388639022 1575388639022 1575388639022 \n", + "... ... ... ... ... \n", + "635390 1603897392982 1603897392982 1603897392982 1603897392982 \n", + "648780 1603897436218 1603897436218 1603897436218 1603897436218 \n", + "660707 1603897478712 1603897478712 1603897478712 1603897478712 \n", + "661641 1603897482847 1603897482847 1603897482847 1603897482847 \n", + "672132 1603897511607 1603897511607 1603897511607 1603897511607 \n", + "\n", + " Timestamp Timestamp Timestamp Timestamp \n", + "9805 1575388411623 1575388411623 1575388411623 1575388411623 \n", + "56789 1575388567787 1575388567787 1575388567787 1575388567787 \n", + "69831 1575388607232 1575388607232 1575388607232 1575388607232 \n", + "72534 1575388616827 1575388616827 1575388616827 1575388616827 \n", + "78669 1575388639022 1575388639022 1575388639022 1575388639022 \n", + "... ... ... ... ... \n", + "635390 1603897392982 1603897392982 1603897392982 1603897392982 \n", + "648780 1603897436218 1603897436218 1603897436218 1603897436218 \n", + "660707 1603897478712 1603897478712 1603897478712 1603897478712 \n", + "661641 1603897482847 1603897482847 1603897482847 1603897482847 \n", + "672132 1603897511607 1603897511607 1603897511607 1603897511607 \n", + "\n", + "[1213 rows x 1213 columns] Timestamp Timestamp Timestamp Timestamp \\\n", + "9805 1575388411624 1575388411624 1575388411624 1575388411624 \n", + "56789 1575388567788 1575388567788 1575388567788 1575388567788 \n", + "69831 1575388607233 1575388607233 1575388607233 1575388607233 \n", + "72534 1575388616828 1575388616828 1575388616828 1575388616828 \n", + "78669 1575388639023 1575388639023 1575388639023 1575388639023 \n", + "... ... ... ... ... \n", + "635390 1603897392983 1603897392983 1603897392983 1603897392983 \n", + "648780 1603897436219 1603897436219 1603897436219 1603897436219 \n", + "660707 1603897478713 1603897478713 1603897478713 1603897478713 \n", + "661641 1603897482848 1603897482848 1603897482848 1603897482848 \n", + "672132 1603897511608 1603897511608 1603897511608 1603897511608 \n", + "\n", + " Timestamp Timestamp Timestamp Timestamp \\\n", + "9805 1575388411624 1575388411624 1575388411624 1575388411624 \n", + "56789 1575388567788 1575388567788 1575388567788 1575388567788 \n", + "69831 1575388607233 1575388607233 1575388607233 1575388607233 \n", + "72534 1575388616828 1575388616828 1575388616828 1575388616828 \n", + "78669 1575388639023 1575388639023 1575388639023 1575388639023 \n", + "... ... ... ... ... \n", + "635390 1603897392983 1603897392983 1603897392983 1603897392983 \n", + "648780 1603897436219 1603897436219 1603897436219 1603897436219 \n", + "660707 1603897478713 1603897478713 1603897478713 1603897478713 \n", + "661641 1603897482848 1603897482848 1603897482848 1603897482848 \n", + "672132 1603897511608 1603897511608 1603897511608 1603897511608 \n", + "\n", + " Timestamp Timestamp ... Timestamp Timestamp \\\n", + "9805 1575388411624 1575388411624 ... 1575388411624 1575388411624 \n", + "56789 1575388567788 1575388567788 ... 1575388567788 1575388567788 \n", + "69831 1575388607233 1575388607233 ... 1575388607233 1575388607233 \n", + "72534 1575388616828 1575388616828 ... 1575388616828 1575388616828 \n", + "78669 1575388639023 1575388639023 ... 1575388639023 1575388639023 \n", + "... ... ... ... ... ... \n", + "635390 1603897392983 1603897392983 ... 1603897392983 1603897392983 \n", + "648780 1603897436219 1603897436219 ... 1603897436219 1603897436219 \n", + "660707 1603897478713 1603897478713 ... 1603897478713 1603897478713 \n", + "661641 1603897482848 1603897482848 ... 1603897482848 1603897482848 \n", + "672132 1603897511608 1603897511608 ... 1603897511608 1603897511608 \n", + "\n", + " Timestamp Timestamp Timestamp Timestamp \\\n", + "9805 1575388411624 1575388411624 1575388411624 1575388411624 \n", + "56789 1575388567788 1575388567788 1575388567788 1575388567788 \n", + "69831 1575388607233 1575388607233 1575388607233 1575388607233 \n", + "72534 1575388616828 1575388616828 1575388616828 1575388616828 \n", + "78669 1575388639023 1575388639023 1575388639023 1575388639023 \n", + "... ... ... ... ... \n", + "635390 1603897392983 1603897392983 1603897392983 1603897392983 \n", + "648780 1603897436219 1603897436219 1603897436219 1603897436219 \n", + "660707 1603897478713 1603897478713 1603897478713 1603897478713 \n", + "661641 1603897482848 1603897482848 1603897482848 1603897482848 \n", + "672132 1603897511608 1603897511608 1603897511608 1603897511608 \n", + "\n", + " Timestamp Timestamp Timestamp Timestamp \n", + "9805 1575388411624 1575388411624 1575388411624 1575388411624 \n", + "56789 1575388567788 1575388567788 1575388567788 1575388567788 \n", + "69831 1575388607233 1575388607233 1575388607233 1575388607233 \n", + "72534 1575388616828 1575388616828 1575388616828 1575388616828 \n", + "78669 1575388639023 1575388639023 1575388639023 1575388639023 \n", + "... ... ... ... ... \n", + "635390 1603897392983 1603897392983 1603897392983 1603897392983 \n", + "648780 1603897436219 1603897436219 1603897436219 1603897436219 \n", + "660707 1603897478713 1603897478713 1603897478713 1603897478713 \n", + "661641 1603897482848 1603897482848 1603897482848 1603897482848 \n", + "672132 1603897511608 1603897511608 1603897511608 1603897511608 \n", + "\n", + "[1213 rows x 1213 columns]\n" + ] + } + ], + "source": [ + "# columns that can be different\n", + "print(np.unique(idx[1]))\n", + "result.columns[np.unique(idx[1])]\n", + "np.where(idx[1]==1)[0]\n", + "print(pre_result.iloc[idx[0][np.where(idx[1]==1)[0]],idx[1][np.where(idx[1]==1)[0]]],result.iloc[idx[0][np.where(idx[1]==1)[0]],idx[1][np.where(idx[1]==1)[0]]])" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "ad397903", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "139 5.551115e-17 5.551115e-17 1.136868e-13\n", + "139 5.551115e-17 5.551115e-17 1.136868e-13\n", + "139 5.551115e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_Y\n", + "140 5.551115e-17\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "141 1.110223e-16 5.551115e-17 1.136868e-13\n", + "141 1.110223e-16 5.551115e-17 1.136868e-13\n", + "141 1.110223e-16 5.551115e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_X Gaze_Y\n", + "142 2.775558e-17 1.110223e-16 5.684342e-14\n", + "142 2.775558e-17 1.110223e-16 5.684342e-14\n", + "142 2.775558e-17 1.110223e-16 5.684342e-14\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Gaze_X Gaze_Y\n", + "144 1.110223e-16 5.551115e-17 1.110223e-16 4.547474e-13 5.684342e-14\n", + "144 1.110223e-16 5.551115e-17 1.110223e-16 4.547474e-13 5.684342e-14\n", + "144 1.110223e-16 5.551115e-17 1.110223e-16 4.547474e-13 5.684342e-14\n", + "144 1.110223e-16 5.551115e-17 1.110223e-16 4.547474e-13 5.684342e-14\n", + "144 1.110223e-16 5.551115e-17 1.110223e-16 4.547474e-13 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_X\n", + "145 5.551115e-17 5.551115e-17\n", + "145 5.551115e-17 5.551115e-17\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_X Gaze_Y\n", + "146 2.775558e-17 5.551115e-17 5.551115e-17 2.273737e-13 1.136868e-13\n", + "146 2.775558e-17 5.551115e-17 5.551115e-17 2.273737e-13 1.136868e-13\n", + "146 2.775558e-17 5.551115e-17 5.551115e-17 2.273737e-13 1.136868e-13\n", + "146 2.775558e-17 5.551115e-17 5.551115e-17 2.273737e-13 1.136868e-13\n", + "146 2.775558e-17 5.551115e-17 5.551115e-17 2.273737e-13 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "147 5.551115e-17 8.326673e-17 1.136868e-13\n", + "147 5.551115e-17 8.326673e-17 1.136868e-13\n", + "147 5.551115e-17 8.326673e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Gaze_Y\n", + "148 1.110223e-16 1.110223e-16 1.136868e-13\n", + "148 1.110223e-16 1.110223e-16 1.136868e-13\n", + "148 1.110223e-16 1.110223e-16 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "149 5.551115e-17 5.551115e-17 8.326673e-17 1.136868e-13\n", + "149 5.551115e-17 5.551115e-17 8.326673e-17 1.136868e-13\n", + "149 5.551115e-17 5.551115e-17 8.326673e-17 1.136868e-13\n", + "149 5.551115e-17 5.551115e-17 8.326673e-17 1.136868e-13\n", + " Left_Gaze_X\n", + "150 5.551115e-17\n", + " Left_Gaze_X Right_Gaze_Y Gaze_X Gaze_Y\n", + "151 5.551115e-17 8.326673e-17 2.273737e-13 5.684342e-14\n", + "151 5.551115e-17 8.326673e-17 2.273737e-13 5.684342e-14\n", + "151 5.551115e-17 8.326673e-17 2.273737e-13 5.684342e-14\n", + "151 5.551115e-17 8.326673e-17 2.273737e-13 5.684342e-14\n", + " Left_Gaze_Y Gaze_Y\n", + "152 8.326673e-17 1.136868e-13\n", + "152 8.326673e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "154 5.551115e-17 5.551115e-17 1.136868e-13\n", + "154 5.551115e-17 5.551115e-17 1.136868e-13\n", + "154 5.551115e-17 5.551115e-17 1.136868e-13\n", + " Right_Gaze_X\n", + "155 5.551115e-17\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "156 8.326673e-17 -5.551115e-17 -2.775558e-17 5.684342e-14\n", + "156 8.326673e-17 -5.551115e-17 -2.775558e-17 5.684342e-14\n", + "156 8.326673e-17 -5.551115e-17 -2.775558e-17 5.684342e-14\n", + "156 8.326673e-17 -5.551115e-17 -2.775558e-17 5.684342e-14\n", + " Right_Gaze_X Right_Gaze_Y Gaze_X Gaze_Y\n", + "157 1.110223e-16 8.326673e-17 2.273737e-13 1.136868e-13\n", + "157 1.110223e-16 8.326673e-17 2.273737e-13 1.136868e-13\n", + "157 1.110223e-16 8.326673e-17 2.273737e-13 1.136868e-13\n", + "157 1.110223e-16 8.326673e-17 2.273737e-13 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "158 8.326673e-17 8.326673e-17 1.136868e-13\n", + "158 8.326673e-17 8.326673e-17 1.136868e-13\n", + "158 8.326673e-17 8.326673e-17 1.136868e-13\n", + " Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "159 1.110223e-16 5.551115e-17 5.684342e-14\n", + "159 1.110223e-16 5.551115e-17 5.684342e-14\n", + "159 1.110223e-16 5.551115e-17 5.684342e-14\n", + " Right_Gaze_Y\n", + "160 2.775558e-17\n", + " Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "161 -5.551115e-17 1.110223e-16 1.136868e-13\n", + "161 -5.551115e-17 1.110223e-16 1.136868e-13\n", + "161 -5.551115e-17 1.110223e-16 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "162 8.326673e-17 5.551115e-17 1.136868e-13\n", + "162 8.326673e-17 5.551115e-17 1.136868e-13\n", + "162 8.326673e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_Y\n", + "164 5.551115e-17\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "165 1.110223e-16 5.551115e-17 5.551115e-17 1.136868e-13\n", + "165 1.110223e-16 5.551115e-17 5.551115e-17 1.136868e-13\n", + "165 1.110223e-16 5.551115e-17 5.551115e-17 1.136868e-13\n", + "165 1.110223e-16 5.551115e-17 5.551115e-17 1.136868e-13\n", + " Right_Gaze_Y Gaze_Y\n", + "166 1.110223e-16 1.136868e-13\n", + "166 1.110223e-16 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_X \\\n", + "167 5.551115e-17 5.551115e-17 5.551115e-17 2.775558e-17 2.273737e-13 \n", + "167 5.551115e-17 5.551115e-17 5.551115e-17 2.775558e-17 2.273737e-13 \n", + "167 5.551115e-17 5.551115e-17 5.551115e-17 2.775558e-17 2.273737e-13 \n", + "167 5.551115e-17 5.551115e-17 5.551115e-17 2.775558e-17 2.273737e-13 \n", + "167 5.551115e-17 5.551115e-17 5.551115e-17 2.775558e-17 2.273737e-13 \n", + "167 5.551115e-17 5.551115e-17 5.551115e-17 2.775558e-17 2.273737e-13 \n", + "\n", + " Gaze_Y \n", + "167 1.136868e-13 \n", + "167 1.136868e-13 \n", + "167 1.136868e-13 \n", + "167 1.136868e-13 \n", + "167 1.136868e-13 \n", + "167 1.136868e-13 \n", + " Left_Gaze_X Right_Gaze_Y Gaze_Y\n", + "168 5.551115e-17 5.551115e-17 5.684342e-14\n", + "168 5.551115e-17 5.551115e-17 5.684342e-14\n", + "168 5.551115e-17 5.551115e-17 5.684342e-14\n", + " Left_Gaze_X Left_Gaze_Y\n", + "169 1.110223e-16 2.775558e-17\n", + "169 1.110223e-16 2.775558e-17\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "171 5.551115e-17 5.551115e-17 1.136868e-13\n", + "171 5.551115e-17 5.551115e-17 1.136868e-13\n", + "171 5.551115e-17 5.551115e-17 1.136868e-13\n", + " Right_Gaze_Y\n", + "172 5.551115e-17\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "173 5.551115e-17 5.551115e-17 1.136868e-13\n", + "173 5.551115e-17 5.551115e-17 1.136868e-13\n", + "173 5.551115e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_X Gaze_Y\n", + "174 1.110223e-16 2.775558e-17 2.775558e-17 2.273737e-13 5.684342e-14\n", + "174 1.110223e-16 2.775558e-17 2.775558e-17 2.273737e-13 5.684342e-14\n", + "174 1.110223e-16 2.775558e-17 2.775558e-17 2.273737e-13 5.684342e-14\n", + "174 1.110223e-16 2.775558e-17 2.775558e-17 2.273737e-13 5.684342e-14\n", + "174 1.110223e-16 2.775558e-17 2.775558e-17 2.273737e-13 5.684342e-14\n", + " Left_Gaze_Y Gaze_Y\n", + "175 8.326673e-17 5.684342e-14\n", + "175 8.326673e-17 5.684342e-14\n", + " Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "176 5.551115e-17 5.551115e-17 5.684342e-14\n", + "176 5.551115e-17 5.551115e-17 5.684342e-14\n", + "176 5.551115e-17 5.551115e-17 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_X Gaze_Y\n", + "177 5.551115e-17 1.110223e-16 8.326673e-17 2.273737e-13 1.705303e-13\n", + "177 5.551115e-17 1.110223e-16 8.326673e-17 2.273737e-13 1.705303e-13\n", + "177 5.551115e-17 1.110223e-16 8.326673e-17 2.273737e-13 1.705303e-13\n", + "177 5.551115e-17 1.110223e-16 8.326673e-17 2.273737e-13 1.705303e-13\n", + "177 5.551115e-17 1.110223e-16 8.326673e-17 2.273737e-13 1.705303e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_X \\\n", + "178 1.110223e-16 5.551115e-17 1.110223e-16 5.551115e-17 4.547474e-13 \n", + "178 1.110223e-16 5.551115e-17 1.110223e-16 5.551115e-17 4.547474e-13 \n", + "178 1.110223e-16 5.551115e-17 1.110223e-16 5.551115e-17 4.547474e-13 \n", + "178 1.110223e-16 5.551115e-17 1.110223e-16 5.551115e-17 4.547474e-13 \n", + "178 1.110223e-16 5.551115e-17 1.110223e-16 5.551115e-17 4.547474e-13 \n", + "178 1.110223e-16 5.551115e-17 1.110223e-16 5.551115e-17 4.547474e-13 \n", + "\n", + " Gaze_Y \n", + "178 1.136868e-13 \n", + "178 1.136868e-13 \n", + "178 1.136868e-13 \n", + "178 1.136868e-13 \n", + "178 1.136868e-13 \n", + "178 1.136868e-13 \n", + " Right_Gaze_Y Gaze_Y\n", + "179 8.326673e-17 1.136868e-13\n", + "179 8.326673e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Gaze_Y\n", + "180 1.110223e-16 5.551115e-17 5.684342e-14\n", + "180 1.110223e-16 5.551115e-17 5.684342e-14\n", + "180 1.110223e-16 5.551115e-17 5.684342e-14\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Gaze_X\n", + "181 5.551115e-17 -2.775558e-17 1.110223e-16 2.273737e-13\n", + "181 5.551115e-17 -2.775558e-17 1.110223e-16 2.273737e-13\n", + "181 5.551115e-17 -2.775558e-17 1.110223e-16 2.273737e-13\n", + "181 5.551115e-17 -2.775558e-17 1.110223e-16 2.273737e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "182 1.110223e-16 5.551115e-17 2.775558e-17 1.136868e-13\n", + "182 1.110223e-16 5.551115e-17 2.775558e-17 1.136868e-13\n", + "182 1.110223e-16 5.551115e-17 2.775558e-17 1.136868e-13\n", + "182 1.110223e-16 5.551115e-17 2.775558e-17 1.136868e-13\n", + " Right_Gaze_Y Gaze_Y\n", + "183 5.551115e-17 5.684342e-14\n", + "183 5.551115e-17 5.684342e-14\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "184 -5.551115e-17 2.775558e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + "184 -5.551115e-17 2.775558e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + "184 -5.551115e-17 2.775558e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + "184 -5.551115e-17 2.775558e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + "184 -5.551115e-17 2.775558e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_Y Gaze_Y\n", + "185 5.551115e-17 5.684342e-14\n", + "185 5.551115e-17 5.684342e-14\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_X \\\n", + "186 5.551115e-17 5.551115e-17 1.110223e-16 2.775558e-17 2.273737e-13 \n", + "186 5.551115e-17 5.551115e-17 1.110223e-16 2.775558e-17 2.273737e-13 \n", + "186 5.551115e-17 5.551115e-17 1.110223e-16 2.775558e-17 2.273737e-13 \n", + "186 5.551115e-17 5.551115e-17 1.110223e-16 2.775558e-17 2.273737e-13 \n", + "186 5.551115e-17 5.551115e-17 1.110223e-16 2.775558e-17 2.273737e-13 \n", + "186 5.551115e-17 5.551115e-17 1.110223e-16 2.775558e-17 2.273737e-13 \n", + "\n", + " Gaze_Y \n", + "186 1.136868e-13 \n", + "186 1.136868e-13 \n", + "186 1.136868e-13 \n", + "186 1.136868e-13 \n", + "186 1.136868e-13 \n", + "186 1.136868e-13 \n", + " Left_Gaze_X Left_Gaze_Y Gaze_Y\n", + "187 1.110223e-16 8.326673e-17 1.136868e-13\n", + "187 1.110223e-16 8.326673e-17 1.136868e-13\n", + "187 1.110223e-16 8.326673e-17 1.136868e-13\n", + " Left_Gaze_Y Gaze_Y\n", + "188 5.551115e-17 5.684342e-14\n", + "188 5.551115e-17 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "189 5.551115e-17 5.551115e-17 2.775558e-17 1.136868e-13\n", + "189 5.551115e-17 5.551115e-17 2.775558e-17 1.136868e-13\n", + "189 5.551115e-17 5.551115e-17 2.775558e-17 1.136868e-13\n", + "189 5.551115e-17 5.551115e-17 2.775558e-17 1.136868e-13\n", + " Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "190 1.110223e-16 5.551115e-17 5.684342e-14\n", + "190 1.110223e-16 5.551115e-17 5.684342e-14\n", + "190 1.110223e-16 5.551115e-17 5.684342e-14\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "191 1.110223e-16 2.775558e-17 2.775558e-17 5.684342e-14\n", + "191 1.110223e-16 2.775558e-17 2.775558e-17 5.684342e-14\n", + "191 1.110223e-16 2.775558e-17 2.775558e-17 5.684342e-14\n", + "191 1.110223e-16 2.775558e-17 2.775558e-17 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "192 5.551115e-17 2.775558e-17 1.136868e-13\n", + "192 5.551115e-17 2.775558e-17 1.136868e-13\n", + "192 5.551115e-17 2.775558e-17 1.136868e-13\n", + " Left_Gaze_X Right_Gaze_Y\n", + "193 1.110223e-16 2.775558e-17\n", + "193 1.110223e-16 2.775558e-17\n", + " Left_Gaze_Y\n", + "194 2.775558e-17\n", + " Left_Gaze_X Right_Gaze_X Right_Gaze_Y Gaze_X\n", + "195 1.110223e-16 5.551115e-17 2.775558e-17 2.273737e-13\n", + "195 1.110223e-16 5.551115e-17 2.775558e-17 2.273737e-13\n", + "195 1.110223e-16 5.551115e-17 2.775558e-17 2.273737e-13\n", + "195 1.110223e-16 5.551115e-17 2.775558e-17 2.273737e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Gaze_X\n", + "196 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13\n", + "196 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13\n", + "196 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13\n", + "196 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "197 1.110223e-16 5.551115e-17 5.551115e-17 1.136868e-13\n", + "197 1.110223e-16 5.551115e-17 5.551115e-17 1.136868e-13\n", + "197 1.110223e-16 5.551115e-17 5.551115e-17 1.136868e-13\n", + "197 1.110223e-16 5.551115e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_X \\\n", + "198 5.551115e-17 5.551115e-17 1.110223e-16 2.775558e-17 2.273737e-13 \n", + "198 5.551115e-17 5.551115e-17 1.110223e-16 2.775558e-17 2.273737e-13 \n", + "198 5.551115e-17 5.551115e-17 1.110223e-16 2.775558e-17 2.273737e-13 \n", + "198 5.551115e-17 5.551115e-17 1.110223e-16 2.775558e-17 2.273737e-13 \n", + "198 5.551115e-17 5.551115e-17 1.110223e-16 2.775558e-17 2.273737e-13 \n", + "198 5.551115e-17 5.551115e-17 1.110223e-16 2.775558e-17 2.273737e-13 \n", + "\n", + " Gaze_Y \n", + "198 5.684342e-14 \n", + "198 5.684342e-14 \n", + "198 5.684342e-14 \n", + "198 5.684342e-14 \n", + "198 5.684342e-14 \n", + "198 5.684342e-14 \n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "199 1.110223e-16 2.775558e-17 5.551115e-17 1.136868e-13\n", + "199 1.110223e-16 2.775558e-17 5.551115e-17 1.136868e-13\n", + "199 1.110223e-16 2.775558e-17 5.551115e-17 1.136868e-13\n", + "199 1.110223e-16 2.775558e-17 5.551115e-17 1.136868e-13\n", + " Right_Gaze_Y Gaze_Y\n", + "200 5.551115e-17 5.684342e-14\n", + "200 5.551115e-17 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_X\n", + "201 5.551115e-17 5.551115e-17\n", + "201 5.551115e-17 5.551115e-17\n", + " Left_Gaze_Y Gaze_Y\n", + "202 5.551115e-17 5.684342e-14\n", + "202 5.551115e-17 5.684342e-14\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "204 1.110223e-16 5.551115e-17 5.551115e-17 1.136868e-13\n", + "204 1.110223e-16 5.551115e-17 5.551115e-17 1.136868e-13\n", + "204 1.110223e-16 5.551115e-17 5.551115e-17 1.136868e-13\n", + "204 1.110223e-16 5.551115e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "205 2.775558e-17 5.551115e-17 1.136868e-13\n", + "205 2.775558e-17 5.551115e-17 1.136868e-13\n", + "205 2.775558e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "206 5.551115e-17 2.775558e-17 1.136868e-13\n", + "206 5.551115e-17 2.775558e-17 1.136868e-13\n", + "206 5.551115e-17 2.775558e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "207 1.110223e-16 5.551115e-17 1.110223e-16 1.705303e-13\n", + "207 1.110223e-16 5.551115e-17 1.110223e-16 1.705303e-13\n", + "207 1.110223e-16 5.551115e-17 1.110223e-16 1.705303e-13\n", + "207 1.110223e-16 5.551115e-17 1.110223e-16 1.705303e-13\n", + " Left_Gaze_Y Right_Gaze_X Gaze_Y\n", + "208 5.551115e-17 5.551115e-17 5.684342e-14\n", + "208 5.551115e-17 5.551115e-17 5.684342e-14\n", + "208 5.551115e-17 5.551115e-17 5.684342e-14\n", + " Right_Gaze_X Right_Gaze_Y Gaze_X\n", + "209 1.110223e-16 5.551115e-17 2.273737e-13\n", + "209 1.110223e-16 5.551115e-17 2.273737e-13\n", + "209 1.110223e-16 5.551115e-17 2.273737e-13\n", + " Left_Gaze_Y Right_Gaze_X Gaze_Y\n", + "210 8.326673e-17 1.110223e-16 1.136868e-13\n", + "210 8.326673e-17 1.110223e-16 1.136868e-13\n", + "210 8.326673e-17 1.110223e-16 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "211 5.551115e-17 5.551115e-17 1.136868e-13\n", + "211 5.551115e-17 5.551115e-17 1.136868e-13\n", + "211 5.551115e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "212 2.775558e-17 5.551115e-17 5.684342e-14\n", + "212 2.775558e-17 5.551115e-17 5.684342e-14\n", + "212 2.775558e-17 5.551115e-17 5.684342e-14\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_X \\\n", + "213 5.551115e-17 2.775558e-17 1.110223e-16 2.775558e-17 2.273737e-13 \n", + "213 5.551115e-17 2.775558e-17 1.110223e-16 2.775558e-17 2.273737e-13 \n", + "213 5.551115e-17 2.775558e-17 1.110223e-16 2.775558e-17 2.273737e-13 \n", + "213 5.551115e-17 2.775558e-17 1.110223e-16 2.775558e-17 2.273737e-13 \n", + "213 5.551115e-17 2.775558e-17 1.110223e-16 2.775558e-17 2.273737e-13 \n", + "213 5.551115e-17 2.775558e-17 1.110223e-16 2.775558e-17 2.273737e-13 \n", + "\n", + " Gaze_Y \n", + "213 5.684342e-14 \n", + "213 5.684342e-14 \n", + "213 5.684342e-14 \n", + "213 5.684342e-14 \n", + "213 5.684342e-14 \n", + "213 5.684342e-14 \n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "214 5.551115e-17 1.110223e-16 5.551115e-17 1.136868e-13\n", + "214 5.551115e-17 1.110223e-16 5.551115e-17 1.136868e-13\n", + "214 5.551115e-17 1.110223e-16 5.551115e-17 1.136868e-13\n", + "214 5.551115e-17 1.110223e-16 5.551115e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y\n", + "215 5.551115e-17 5.551115e-17\n", + "215 5.551115e-17 5.551115e-17\n", + " Left_Gaze_Y Right_Gaze_Y\n", + "216 -2.775558e-17 5.551115e-17\n", + "216 -2.775558e-17 5.551115e-17\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Gaze_X Gaze_Y\n", + "217 5.551115e-17 2.775558e-17 1.110223e-16 2.273737e-13 5.684342e-14\n", + "217 5.551115e-17 2.775558e-17 1.110223e-16 2.273737e-13 5.684342e-14\n", + "217 5.551115e-17 2.775558e-17 1.110223e-16 2.273737e-13 5.684342e-14\n", + "217 5.551115e-17 2.775558e-17 1.110223e-16 2.273737e-13 5.684342e-14\n", + "217 5.551115e-17 2.775558e-17 1.110223e-16 2.273737e-13 5.684342e-14\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_X Gaze_Y\n", + "218 5.551115e-17 1.110223e-16 2.775558e-17 2.273737e-13 1.136868e-13\n", + "218 5.551115e-17 1.110223e-16 2.775558e-17 2.273737e-13 1.136868e-13\n", + "218 5.551115e-17 1.110223e-16 2.775558e-17 2.273737e-13 1.136868e-13\n", + "218 5.551115e-17 1.110223e-16 2.775558e-17 2.273737e-13 1.136868e-13\n", + "218 5.551115e-17 1.110223e-16 2.775558e-17 2.273737e-13 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "219 5.551115e-17 5.551115e-17 1.136868e-13\n", + "219 5.551115e-17 5.551115e-17 1.136868e-13\n", + "219 5.551115e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Gaze_X\n", + "220 5.551115e-17 2.775558e-17 1.110223e-16 2.273737e-13\n", + "220 5.551115e-17 2.775558e-17 1.110223e-16 2.273737e-13\n", + "220 5.551115e-17 2.775558e-17 1.110223e-16 2.273737e-13\n", + "220 5.551115e-17 2.775558e-17 1.110223e-16 2.273737e-13\n", + " Left_Gaze_X Left_Gaze_Y Gaze_Y\n", + "222 -5.551115e-17 2.775558e-17 5.684342e-14\n", + "222 -5.551115e-17 2.775558e-17 5.684342e-14\n", + "222 -5.551115e-17 2.775558e-17 5.684342e-14\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Gaze_X Gaze_Y\n", + "223 1.110223e-16 5.551115e-17 1.110223e-16 4.547474e-13 5.684342e-14\n", + "223 1.110223e-16 5.551115e-17 1.110223e-16 4.547474e-13 5.684342e-14\n", + "223 1.110223e-16 5.551115e-17 1.110223e-16 4.547474e-13 5.684342e-14\n", + "223 1.110223e-16 5.551115e-17 1.110223e-16 4.547474e-13 5.684342e-14\n", + "223 1.110223e-16 5.551115e-17 1.110223e-16 4.547474e-13 5.684342e-14\n", + " Right_Gaze_Y Gaze_Y\n", + "224 -2.775558e-17 -5.684342e-14\n", + "224 -2.775558e-17 -5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "225 5.551115e-17 5.551115e-17 1.110223e-16 1.705303e-13\n", + "225 5.551115e-17 5.551115e-17 1.110223e-16 1.705303e-13\n", + "225 5.551115e-17 5.551115e-17 1.110223e-16 1.705303e-13\n", + "225 5.551115e-17 5.551115e-17 1.110223e-16 1.705303e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_X \\\n", + "226 1.110223e-16 2.775558e-17 1.110223e-16 5.551115e-17 4.547474e-13 \n", + "226 1.110223e-16 2.775558e-17 1.110223e-16 5.551115e-17 4.547474e-13 \n", + "226 1.110223e-16 2.775558e-17 1.110223e-16 5.551115e-17 4.547474e-13 \n", + "226 1.110223e-16 2.775558e-17 1.110223e-16 5.551115e-17 4.547474e-13 \n", + "226 1.110223e-16 2.775558e-17 1.110223e-16 5.551115e-17 4.547474e-13 \n", + "226 1.110223e-16 2.775558e-17 1.110223e-16 5.551115e-17 4.547474e-13 \n", + "\n", + " Gaze_Y \n", + "226 5.684342e-14 \n", + "226 5.684342e-14 \n", + "226 5.684342e-14 \n", + "226 5.684342e-14 \n", + "226 5.684342e-14 \n", + "226 5.684342e-14 \n", + " Left_Gaze_Y Gaze_Y\n", + "227 2.775558e-17 5.684342e-14\n", + "227 2.775558e-17 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "228 5.551115e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + "228 5.551115e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + "228 5.551115e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + "228 5.551115e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_X\n", + "229 5.551115e-17\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "230 5.551115e-17 5.551115e-17 2.775558e-17 1.136868e-13\n", + "230 5.551115e-17 5.551115e-17 2.775558e-17 1.136868e-13\n", + "230 5.551115e-17 5.551115e-17 2.775558e-17 1.136868e-13\n", + "230 5.551115e-17 5.551115e-17 2.775558e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y\n", + "231 5.551115e-17 2.775558e-17 -2.775558e-17\n", + "231 5.551115e-17 2.775558e-17 -2.775558e-17\n", + "231 5.551115e-17 2.775558e-17 -2.775558e-17\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "232 5.551115e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + "232 5.551115e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + "232 5.551115e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + "232 5.551115e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "233 5.551115e-17 1.110223e-16 5.551115e-17 1.136868e-13\n", + "233 5.551115e-17 1.110223e-16 5.551115e-17 1.136868e-13\n", + "233 5.551115e-17 1.110223e-16 5.551115e-17 1.136868e-13\n", + "233 5.551115e-17 1.110223e-16 5.551115e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "234 2.775558e-17 2.775558e-17 5.684342e-14\n", + "234 2.775558e-17 2.775558e-17 5.684342e-14\n", + "234 2.775558e-17 2.775558e-17 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "235 8.326673e-17 1.110223e-16 2.775558e-17 1.136868e-13\n", + "235 8.326673e-17 1.110223e-16 2.775558e-17 1.136868e-13\n", + "235 8.326673e-17 1.110223e-16 2.775558e-17 1.136868e-13\n", + "235 8.326673e-17 1.110223e-16 2.775558e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "236 5.551115e-17 1.110223e-16 5.551115e-17 1.136868e-13\n", + "236 5.551115e-17 1.110223e-16 5.551115e-17 1.136868e-13\n", + "236 5.551115e-17 1.110223e-16 5.551115e-17 1.136868e-13\n", + "236 5.551115e-17 1.110223e-16 5.551115e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "237 5.551115e-17 1.110223e-16 2.775558e-17 1.136868e-13\n", + "237 5.551115e-17 1.110223e-16 2.775558e-17 1.136868e-13\n", + "237 5.551115e-17 1.110223e-16 2.775558e-17 1.136868e-13\n", + "237 5.551115e-17 1.110223e-16 2.775558e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Gaze_Y\n", + "238 5.551115e-17 2.775558e-17 5.684342e-14\n", + "238 5.551115e-17 2.775558e-17 5.684342e-14\n", + "238 5.551115e-17 2.775558e-17 5.684342e-14\n", + " Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "239 -5.551115e-17 5.551115e-17 5.684342e-14\n", + "239 -5.551115e-17 5.551115e-17 5.684342e-14\n", + "239 -5.551115e-17 5.551115e-17 5.684342e-14\n", + " Left_Gaze_Y Gaze_Y\n", + "240 2.775558e-17 5.684342e-14\n", + "240 2.775558e-17 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "241 8.326673e-17 5.551115e-17 1.136868e-13\n", + "241 8.326673e-17 5.551115e-17 1.136868e-13\n", + "241 8.326673e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_X\n", + "242 2.775558e-17 1.110223e-16\n", + "242 2.775558e-17 1.110223e-16\n", + " Left_Gaze_X Right_Gaze_X Right_Gaze_Y Gaze_X Gaze_Y\n", + "243 1.110223e-16 1.110223e-16 5.551115e-17 4.547474e-13 5.684342e-14\n", + "243 1.110223e-16 1.110223e-16 5.551115e-17 4.547474e-13 5.684342e-14\n", + "243 1.110223e-16 1.110223e-16 5.551115e-17 4.547474e-13 5.684342e-14\n", + "243 1.110223e-16 1.110223e-16 5.551115e-17 4.547474e-13 5.684342e-14\n", + "243 1.110223e-16 1.110223e-16 5.551115e-17 4.547474e-13 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "244 5.551115e-17 2.775558e-17 1.136868e-13\n", + "244 5.551115e-17 2.775558e-17 1.136868e-13\n", + "244 5.551115e-17 2.775558e-17 1.136868e-13\n", + " Left_Gaze_X Right_Gaze_Y Gaze_Y\n", + "245 5.551115e-17 8.326673e-17 1.136868e-13\n", + "245 5.551115e-17 8.326673e-17 1.136868e-13\n", + "245 5.551115e-17 8.326673e-17 1.136868e-13\n", + " Left_Gaze_X Right_Gaze_Y Gaze_Y\n", + "246 5.551115e-17 5.551115e-17 5.684342e-14\n", + "246 5.551115e-17 5.551115e-17 5.684342e-14\n", + "246 5.551115e-17 5.551115e-17 5.684342e-14\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_X \\\n", + "247 5.551115e-17 8.326673e-17 1.110223e-16 8.326673e-17 2.273737e-13 \n", + "247 5.551115e-17 8.326673e-17 1.110223e-16 8.326673e-17 2.273737e-13 \n", + "247 5.551115e-17 8.326673e-17 1.110223e-16 8.326673e-17 2.273737e-13 \n", + "247 5.551115e-17 8.326673e-17 1.110223e-16 8.326673e-17 2.273737e-13 \n", + "247 5.551115e-17 8.326673e-17 1.110223e-16 8.326673e-17 2.273737e-13 \n", + "247 5.551115e-17 8.326673e-17 1.110223e-16 8.326673e-17 2.273737e-13 \n", + "\n", + " Gaze_Y \n", + "247 1.136868e-13 \n", + "247 1.136868e-13 \n", + "247 1.136868e-13 \n", + "247 1.136868e-13 \n", + "247 1.136868e-13 \n", + "247 1.136868e-13 \n", + " Left_Gaze_X Right_Gaze_X Gaze_X\n", + "248 5.551115e-17 1.110223e-16 2.273737e-13\n", + "248 5.551115e-17 1.110223e-16 2.273737e-13\n", + "248 5.551115e-17 1.110223e-16 2.273737e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "249 1.110223e-16 2.775558e-17 5.551115e-17 1.136868e-13\n", + "249 1.110223e-16 2.775558e-17 5.551115e-17 1.136868e-13\n", + "249 1.110223e-16 2.775558e-17 5.551115e-17 1.136868e-13\n", + "249 1.110223e-16 2.775558e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "250 -5.551115e-17 5.551115e-17 2.775558e-17 1.136868e-13\n", + "250 -5.551115e-17 5.551115e-17 2.775558e-17 1.136868e-13\n", + "250 -5.551115e-17 5.551115e-17 2.775558e-17 1.136868e-13\n", + "250 -5.551115e-17 5.551115e-17 2.775558e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "251 2.775558e-17 2.775558e-17 5.684342e-14\n", + "251 2.775558e-17 2.775558e-17 5.684342e-14\n", + "251 2.775558e-17 2.775558e-17 5.684342e-14\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_X \\\n", + "252 1.110223e-16 5.551115e-17 1.110223e-16 2.775558e-17 4.547474e-13 \n", + "252 1.110223e-16 5.551115e-17 1.110223e-16 2.775558e-17 4.547474e-13 \n", + "252 1.110223e-16 5.551115e-17 1.110223e-16 2.775558e-17 4.547474e-13 \n", + "252 1.110223e-16 5.551115e-17 1.110223e-16 2.775558e-17 4.547474e-13 \n", + "252 1.110223e-16 5.551115e-17 1.110223e-16 2.775558e-17 4.547474e-13 \n", + "252 1.110223e-16 5.551115e-17 1.110223e-16 2.775558e-17 4.547474e-13 \n", + "\n", + " Gaze_Y \n", + "252 1.136868e-13 \n", + "252 1.136868e-13 \n", + "252 1.136868e-13 \n", + "252 1.136868e-13 \n", + "252 1.136868e-13 \n", + "252 1.136868e-13 \n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_X \\\n", + "253 1.110223e-16 2.775558e-17 5.551115e-17 5.551115e-17 2.273737e-13 \n", + "253 1.110223e-16 2.775558e-17 5.551115e-17 5.551115e-17 2.273737e-13 \n", + "253 1.110223e-16 2.775558e-17 5.551115e-17 5.551115e-17 2.273737e-13 \n", + "253 1.110223e-16 2.775558e-17 5.551115e-17 5.551115e-17 2.273737e-13 \n", + "253 1.110223e-16 2.775558e-17 5.551115e-17 5.551115e-17 2.273737e-13 \n", + "253 1.110223e-16 2.775558e-17 5.551115e-17 5.551115e-17 2.273737e-13 \n", + "\n", + " Gaze_Y \n", + "253 1.136868e-13 \n", + "253 1.136868e-13 \n", + "253 1.136868e-13 \n", + "253 1.136868e-13 \n", + "253 1.136868e-13 \n", + "253 1.136868e-13 \n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_X Gaze_Y\n", + "254 5.551115e-17 5.551115e-17 2.775558e-17 2.273737e-13 5.684342e-14\n", + "254 5.551115e-17 5.551115e-17 2.775558e-17 2.273737e-13 5.684342e-14\n", + "254 5.551115e-17 5.551115e-17 2.775558e-17 2.273737e-13 5.684342e-14\n", + "254 5.551115e-17 5.551115e-17 2.775558e-17 2.273737e-13 5.684342e-14\n", + "254 5.551115e-17 5.551115e-17 2.775558e-17 2.273737e-13 5.684342e-14\n", + " Left_Gaze_X Left_Gaze_Y Gaze_Y\n", + "255 5.551115e-17 8.326673e-17 1.136868e-13\n", + "255 5.551115e-17 8.326673e-17 1.136868e-13\n", + "255 5.551115e-17 8.326673e-17 1.136868e-13\n", + " Left_Gaze_Y Gaze_Y\n", + "256 8.326673e-17 1.136868e-13\n", + "256 8.326673e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_X\n", + "257 5.551115e-17 1.110223e-16\n", + "257 5.551115e-17 1.110223e-16\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "258 5.551115e-17 8.326673e-17 2.775558e-17 1.136868e-13\n", + "258 5.551115e-17 8.326673e-17 2.775558e-17 1.136868e-13\n", + "258 5.551115e-17 8.326673e-17 2.775558e-17 1.136868e-13\n", + "258 5.551115e-17 8.326673e-17 2.775558e-17 1.136868e-13\n", + " Left_Gaze_X\n", + "259 5.551115e-17\n", + " Left_Gaze_X Left_Gaze_Y\n", + "261 1.110223e-16 5.551115e-17\n", + "261 1.110223e-16 5.551115e-17\n", + " Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "262 5.551115e-17 5.551115e-17 1.136868e-13\n", + "262 5.551115e-17 5.551115e-17 1.136868e-13\n", + "262 5.551115e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Gaze_Y\n", + "263 1.110223e-16 5.551115e-17 5.684342e-14\n", + "263 1.110223e-16 5.551115e-17 5.684342e-14\n", + "263 1.110223e-16 5.551115e-17 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "264 2.775558e-17 1.110223e-16 5.551115e-17 1.136868e-13\n", + "264 2.775558e-17 1.110223e-16 5.551115e-17 1.136868e-13\n", + "264 2.775558e-17 1.110223e-16 5.551115e-17 1.136868e-13\n", + "264 2.775558e-17 1.110223e-16 5.551115e-17 1.136868e-13\n", + " Left_Gaze_X Right_Gaze_Y Gaze_X Gaze_Y\n", + "265 1.110223e-16 5.551115e-17 2.273737e-13 5.684342e-14\n", + "265 1.110223e-16 5.551115e-17 2.273737e-13 5.684342e-14\n", + "265 1.110223e-16 5.551115e-17 2.273737e-13 5.684342e-14\n", + "265 1.110223e-16 5.551115e-17 2.273737e-13 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "266 5.551115e-17 2.775558e-17 5.684342e-14\n", + "266 5.551115e-17 2.775558e-17 5.684342e-14\n", + "266 5.551115e-17 2.775558e-17 5.684342e-14\n", + " Right_Gaze_X Right_Gaze_Y\n", + "267 5.551115e-17 5.551115e-17\n", + "267 5.551115e-17 5.551115e-17\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_X \\\n", + "268 1.110223e-16 -2.775558e-17 1.110223e-16 -2.775558e-17 4.547474e-13 \n", + "268 1.110223e-16 -2.775558e-17 1.110223e-16 -2.775558e-17 4.547474e-13 \n", + "268 1.110223e-16 -2.775558e-17 1.110223e-16 -2.775558e-17 4.547474e-13 \n", + "268 1.110223e-16 -2.775558e-17 1.110223e-16 -2.775558e-17 4.547474e-13 \n", + "268 1.110223e-16 -2.775558e-17 1.110223e-16 -2.775558e-17 4.547474e-13 \n", + "268 1.110223e-16 -2.775558e-17 1.110223e-16 -2.775558e-17 4.547474e-13 \n", + "\n", + " Gaze_Y \n", + "268 -5.684342e-14 \n", + "268 -5.684342e-14 \n", + "268 -5.684342e-14 \n", + "268 -5.684342e-14 \n", + "268 -5.684342e-14 \n", + "268 -5.684342e-14 \n", + " Right_Gaze_Y Gaze_Y\n", + "269 2.775558e-17 5.684342e-14\n", + "269 2.775558e-17 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "270 5.551115e-17 8.326673e-17 1.136868e-13\n", + "270 5.551115e-17 8.326673e-17 1.136868e-13\n", + "270 5.551115e-17 8.326673e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Gaze_X\n", + "271 5.551115e-17 5.551115e-17 1.110223e-16 2.273737e-13\n", + "271 5.551115e-17 5.551115e-17 1.110223e-16 2.273737e-13\n", + "271 5.551115e-17 5.551115e-17 1.110223e-16 2.273737e-13\n", + "271 5.551115e-17 5.551115e-17 1.110223e-16 2.273737e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "272 1.110223e-16 2.775558e-17 5.551115e-17 1.136868e-13\n", + "272 1.110223e-16 2.775558e-17 5.551115e-17 1.136868e-13\n", + "272 1.110223e-16 2.775558e-17 5.551115e-17 1.136868e-13\n", + "272 1.110223e-16 2.775558e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Gaze_Y\n", + "273 5.551115e-17 8.326673e-17 5.684342e-14\n", + "273 5.551115e-17 8.326673e-17 5.684342e-14\n", + "273 5.551115e-17 8.326673e-17 5.684342e-14\n", + " Right_Gaze_X\n", + "274 1.110223e-16\n", + " Left_Gaze_X Right_Gaze_X Right_Gaze_Y Gaze_X Gaze_Y\n", + "275 1.110223e-16 5.551115e-17 8.326673e-17 2.273737e-13 1.136868e-13\n", + "275 1.110223e-16 5.551115e-17 8.326673e-17 2.273737e-13 1.136868e-13\n", + "275 1.110223e-16 5.551115e-17 8.326673e-17 2.273737e-13 1.136868e-13\n", + "275 1.110223e-16 5.551115e-17 8.326673e-17 2.273737e-13 1.136868e-13\n", + "275 1.110223e-16 5.551115e-17 8.326673e-17 2.273737e-13 1.136868e-13\n", + " Left_Gaze_Y Gaze_Y\n", + "276 5.551115e-17 5.684342e-14\n", + "276 5.551115e-17 5.684342e-14\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "277 1.110223e-16 8.326673e-17 5.551115e-17 1.136868e-13\n", + "277 1.110223e-16 8.326673e-17 5.551115e-17 1.136868e-13\n", + "277 1.110223e-16 8.326673e-17 5.551115e-17 1.136868e-13\n", + "277 1.110223e-16 8.326673e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "278 5.551115e-17 2.775558e-17 5.551115e-17 1.136868e-13\n", + "278 5.551115e-17 2.775558e-17 5.551115e-17 1.136868e-13\n", + "278 5.551115e-17 2.775558e-17 5.551115e-17 1.136868e-13\n", + "278 5.551115e-17 2.775558e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_X\n", + "279 1.110223e-16\n", + " Left_Gaze_Y Gaze_Y\n", + "280 8.326673e-17 1.136868e-13\n", + "280 8.326673e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "281 2.775558e-17 5.551115e-17 1.136868e-13\n", + "281 2.775558e-17 5.551115e-17 1.136868e-13\n", + "281 2.775558e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "282 5.551115e-17 1.110223e-16 8.326673e-17 1.705303e-13\n", + "282 5.551115e-17 1.110223e-16 8.326673e-17 1.705303e-13\n", + "282 5.551115e-17 1.110223e-16 8.326673e-17 1.705303e-13\n", + "282 5.551115e-17 1.110223e-16 8.326673e-17 1.705303e-13\n", + " Left_Gaze_Y\n", + "283 2.775558e-17\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "284 1.110223e-16 2.775558e-17 5.551115e-17 1.136868e-13\n", + "284 1.110223e-16 2.775558e-17 5.551115e-17 1.136868e-13\n", + "284 1.110223e-16 2.775558e-17 5.551115e-17 1.136868e-13\n", + "284 1.110223e-16 2.775558e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_Y Gaze_Y\n", + "285 2.775558e-17 5.684342e-14\n", + "285 2.775558e-17 5.684342e-14\n", + " Left_Gaze_X Right_Gaze_X Right_Gaze_Y Gaze_X Gaze_Y\n", + "286 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "286 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "286 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "286 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "286 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "287 5.551115e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + "287 5.551115e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + "287 5.551115e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + "287 5.551115e-17 5.551115e-17 5.551115e-17 1.136868e-13\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "288 8.326673e-17 5.551115e-17 1.136868e-13\n", + "288 8.326673e-17 5.551115e-17 1.136868e-13\n", + "288 8.326673e-17 5.551115e-17 1.136868e-13\n", + " Right_Gaze_X\n", + "289 1.110223e-16\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y\n", + "290 2.775558e-17 -5.551115e-17 -2.775558e-17\n", + "290 2.775558e-17 -5.551115e-17 -2.775558e-17\n", + "290 2.775558e-17 -5.551115e-17 -2.775558e-17\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "291 2.775558e-17 5.551115e-17 5.551115e-17 5.684342e-14\n", + "291 2.775558e-17 5.551115e-17 5.551115e-17 5.684342e-14\n", + "291 2.775558e-17 5.551115e-17 5.551115e-17 5.684342e-14\n", + "291 2.775558e-17 5.551115e-17 5.551115e-17 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "292 5.551115e-17 2.775558e-17 5.684342e-14\n", + "292 5.551115e-17 2.775558e-17 5.684342e-14\n", + "292 5.551115e-17 2.775558e-17 5.684342e-14\n", + " Left_Gaze_Y\n", + "293 2.775558e-17\n", + " Left_Gaze_X Left_Gaze_Y Gaze_Y\n", + "294 5.551115e-17 5.551115e-17 1.136868e-13\n", + "294 5.551115e-17 5.551115e-17 1.136868e-13\n", + "294 5.551115e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "295 5.551115e-17 1.110223e-16 1.136868e-13\n", + "295 5.551115e-17 1.110223e-16 1.136868e-13\n", + "295 5.551115e-17 1.110223e-16 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Gaze_X Gaze_Y\n", + "296 5.551115e-17 2.775558e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "296 5.551115e-17 2.775558e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "296 5.551115e-17 2.775558e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "296 5.551115e-17 2.775558e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "296 5.551115e-17 2.775558e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "297 5.551115e-17 1.110223e-16 8.326673e-17 1.136868e-13\n", + "297 5.551115e-17 1.110223e-16 8.326673e-17 1.136868e-13\n", + "297 5.551115e-17 1.110223e-16 8.326673e-17 1.136868e-13\n", + "297 5.551115e-17 1.110223e-16 8.326673e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "298 1.110223e-16 1.110223e-16 5.551115e-17 1.136868e-13\n", + "298 1.110223e-16 1.110223e-16 5.551115e-17 1.136868e-13\n", + "298 1.110223e-16 1.110223e-16 5.551115e-17 1.136868e-13\n", + "298 1.110223e-16 1.110223e-16 5.551115e-17 1.136868e-13\n", + " Right_Gaze_Y\n", + "299 2.775558e-17\n", + " Left_Gaze_Y Gaze_Y\n", + "300 1.110223e-16 1.136868e-13\n", + "300 1.110223e-16 1.136868e-13\n", + " Left_Gaze_X Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "301 -5.551115e-17 5.551115e-17 1.110223e-16 1.136868e-13\n", + "301 -5.551115e-17 5.551115e-17 1.110223e-16 1.136868e-13\n", + "301 -5.551115e-17 5.551115e-17 1.110223e-16 1.136868e-13\n", + "301 -5.551115e-17 5.551115e-17 1.110223e-16 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "302 5.551115e-17 5.551115e-17 1.110223e-16 1.136868e-13\n", + "302 5.551115e-17 5.551115e-17 1.110223e-16 1.136868e-13\n", + "302 5.551115e-17 5.551115e-17 1.110223e-16 1.136868e-13\n", + "302 5.551115e-17 5.551115e-17 1.110223e-16 1.136868e-13\n", + " Right_Gaze_Y Gaze_Y\n", + "303 8.326673e-17 1.136868e-13\n", + "303 8.326673e-17 1.136868e-13\n", + " Right_Gaze_X Right_Gaze_Y Gaze_X Gaze_Y\n", + "304 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "304 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "304 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "304 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "306 8.326673e-17 8.326673e-17 1.136868e-13\n", + "306 8.326673e-17 8.326673e-17 1.136868e-13\n", + "306 8.326673e-17 8.326673e-17 1.136868e-13\n", + " Right_Gaze_Y Gaze_Y\n", + "307 5.551115e-17 1.136868e-13\n", + "307 5.551115e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y\n", + "308 1.110223e-16 2.775558e-17\n", + "308 1.110223e-16 2.775558e-17\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_X Gaze_Y\n", + "309 8.326673e-17 -5.551115e-17 8.326673e-17 -2.273737e-13 1.136868e-13\n", + "309 8.326673e-17 -5.551115e-17 8.326673e-17 -2.273737e-13 1.136868e-13\n", + "309 8.326673e-17 -5.551115e-17 8.326673e-17 -2.273737e-13 1.136868e-13\n", + "309 8.326673e-17 -5.551115e-17 8.326673e-17 -2.273737e-13 1.136868e-13\n", + "309 8.326673e-17 -5.551115e-17 8.326673e-17 -2.273737e-13 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "310 2.775558e-17 5.551115e-17 1.136868e-13\n", + "310 2.775558e-17 5.551115e-17 1.136868e-13\n", + "310 2.775558e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_X\n", + "311 1.110223e-16\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "312 8.326673e-17 1.110223e-16 8.326673e-17 1.136868e-13\n", + "312 8.326673e-17 1.110223e-16 8.326673e-17 1.136868e-13\n", + "312 8.326673e-17 1.110223e-16 8.326673e-17 1.136868e-13\n", + "312 8.326673e-17 1.110223e-16 8.326673e-17 1.136868e-13\n", + " Left_Gaze_X Right_Gaze_X Gaze_X\n", + "313 1.110223e-16 5.551115e-17 2.273737e-13\n", + "313 1.110223e-16 5.551115e-17 2.273737e-13\n", + "313 1.110223e-16 5.551115e-17 2.273737e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Gaze_X Gaze_Y\n", + "314 5.551115e-17 5.551115e-17 1.110223e-16 2.273737e-13 5.684342e-14\n", + "314 5.551115e-17 5.551115e-17 1.110223e-16 2.273737e-13 5.684342e-14\n", + "314 5.551115e-17 5.551115e-17 1.110223e-16 2.273737e-13 5.684342e-14\n", + "314 5.551115e-17 5.551115e-17 1.110223e-16 2.273737e-13 5.684342e-14\n", + "314 5.551115e-17 5.551115e-17 1.110223e-16 2.273737e-13 5.684342e-14\n", + " Right_Gaze_Y\n", + "315 2.775558e-17\n", + " Left_Gaze_Y Gaze_Y\n", + "316 2.775558e-17 5.684342e-14\n", + "316 2.775558e-17 5.684342e-14\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "317 5.551115e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + "317 5.551115e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + "317 5.551115e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + "317 5.551115e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "318 5.551115e-17 5.551115e-17 1.136868e-13\n", + "318 5.551115e-17 5.551115e-17 1.136868e-13\n", + "318 5.551115e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "319 8.326673e-17 5.551115e-17 1.136868e-13\n", + "319 8.326673e-17 5.551115e-17 1.136868e-13\n", + "319 8.326673e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "320 5.551115e-17 2.775558e-17 1.136868e-13\n", + "320 5.551115e-17 2.775558e-17 1.136868e-13\n", + "320 5.551115e-17 2.775558e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_X \\\n", + "321 1.110223e-16 5.551115e-17 1.110223e-16 2.775558e-17 4.547474e-13 \n", + "321 1.110223e-16 5.551115e-17 1.110223e-16 2.775558e-17 4.547474e-13 \n", + "321 1.110223e-16 5.551115e-17 1.110223e-16 2.775558e-17 4.547474e-13 \n", + "321 1.110223e-16 5.551115e-17 1.110223e-16 2.775558e-17 4.547474e-13 \n", + "321 1.110223e-16 5.551115e-17 1.110223e-16 2.775558e-17 4.547474e-13 \n", + "321 1.110223e-16 5.551115e-17 1.110223e-16 2.775558e-17 4.547474e-13 \n", + "\n", + " Gaze_Y \n", + "321 1.136868e-13 \n", + "321 1.136868e-13 \n", + "321 1.136868e-13 \n", + "321 1.136868e-13 \n", + "321 1.136868e-13 \n", + "321 1.136868e-13 \n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "322 5.551115e-17 2.775558e-17 5.684342e-14\n", + "322 5.551115e-17 2.775558e-17 5.684342e-14\n", + "322 5.551115e-17 2.775558e-17 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "323 2.775558e-17 5.551115e-17 8.326673e-17 1.136868e-13\n", + "323 2.775558e-17 5.551115e-17 8.326673e-17 1.136868e-13\n", + "323 2.775558e-17 5.551115e-17 8.326673e-17 1.136868e-13\n", + "323 2.775558e-17 5.551115e-17 8.326673e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "324 5.551115e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + "324 5.551115e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + "324 5.551115e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + "324 5.551115e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + " Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "325 1.110223e-16 8.326673e-17 5.684342e-14\n", + "325 1.110223e-16 8.326673e-17 5.684342e-14\n", + "325 1.110223e-16 8.326673e-17 5.684342e-14\n", + " Left_Gaze_X Right_Gaze_X Right_Gaze_Y Gaze_X Gaze_Y\n", + "326 -5.551115e-17 1.110223e-16 5.551115e-17 2.273737e-13 1.136868e-13\n", + "326 -5.551115e-17 1.110223e-16 5.551115e-17 2.273737e-13 1.136868e-13\n", + "326 -5.551115e-17 1.110223e-16 5.551115e-17 2.273737e-13 1.136868e-13\n", + "326 -5.551115e-17 1.110223e-16 5.551115e-17 2.273737e-13 1.136868e-13\n", + "326 -5.551115e-17 1.110223e-16 5.551115e-17 2.273737e-13 1.136868e-13\n", + " Right_Gaze_Y Gaze_Y\n", + "327 8.326673e-17 1.136868e-13\n", + "327 8.326673e-17 1.136868e-13\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "328 1.110223e-16 2.775558e-17 5.551115e-17 5.684342e-14\n", + "328 1.110223e-16 2.775558e-17 5.551115e-17 5.684342e-14\n", + "328 1.110223e-16 2.775558e-17 5.551115e-17 5.684342e-14\n", + "328 1.110223e-16 2.775558e-17 5.551115e-17 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "329 5.551115e-17 5.551115e-17 2.775558e-17 1.136868e-13\n", + "329 5.551115e-17 5.551115e-17 2.775558e-17 1.136868e-13\n", + "329 5.551115e-17 5.551115e-17 2.775558e-17 1.136868e-13\n", + "329 5.551115e-17 5.551115e-17 2.775558e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "331 8.326673e-17 2.775558e-17 1.136868e-13\n", + "331 8.326673e-17 2.775558e-17 1.136868e-13\n", + "331 8.326673e-17 2.775558e-17 1.136868e-13\n", + " Left_Gaze_Y\n", + "332 2.775558e-17\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "333 1.110223e-16 5.551115e-17 2.775558e-17 5.684342e-14\n", + "333 1.110223e-16 5.551115e-17 2.775558e-17 5.684342e-14\n", + "333 1.110223e-16 5.551115e-17 2.775558e-17 5.684342e-14\n", + "333 1.110223e-16 5.551115e-17 2.775558e-17 5.684342e-14\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_X \\\n", + "334 1.110223e-16 2.775558e-17 1.110223e-16 5.551115e-17 4.547474e-13 \n", + "334 1.110223e-16 2.775558e-17 1.110223e-16 5.551115e-17 4.547474e-13 \n", + "334 1.110223e-16 2.775558e-17 1.110223e-16 5.551115e-17 4.547474e-13 \n", + "334 1.110223e-16 2.775558e-17 1.110223e-16 5.551115e-17 4.547474e-13 \n", + "334 1.110223e-16 2.775558e-17 1.110223e-16 5.551115e-17 4.547474e-13 \n", + "334 1.110223e-16 2.775558e-17 1.110223e-16 5.551115e-17 4.547474e-13 \n", + "\n", + " Gaze_Y \n", + "334 1.136868e-13 \n", + "334 1.136868e-13 \n", + "334 1.136868e-13 \n", + "334 1.136868e-13 \n", + "334 1.136868e-13 \n", + "334 1.136868e-13 \n", + " Right_Gaze_Y\n", + "335 2.775558e-17\n", + " Left_Gaze_Y Gaze_Y\n", + "336 8.326673e-17 5.684342e-14\n", + "336 8.326673e-17 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "337 5.551115e-17 5.551115e-17 8.326673e-17 1.705303e-13\n", + "337 5.551115e-17 5.551115e-17 8.326673e-17 1.705303e-13\n", + "337 5.551115e-17 5.551115e-17 8.326673e-17 1.705303e-13\n", + "337 5.551115e-17 5.551115e-17 8.326673e-17 1.705303e-13\n", + " Right_Gaze_X Right_Gaze_Y Gaze_X\n", + "338 5.551115e-17 5.551115e-17 2.273737e-13\n", + "338 5.551115e-17 5.551115e-17 2.273737e-13\n", + "338 5.551115e-17 5.551115e-17 2.273737e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "339 1.110223e-16 2.775558e-17 8.326673e-17 1.136868e-13\n", + "339 1.110223e-16 2.775558e-17 8.326673e-17 1.136868e-13\n", + "339 1.110223e-16 2.775558e-17 8.326673e-17 1.136868e-13\n", + "339 1.110223e-16 2.775558e-17 8.326673e-17 1.136868e-13\n", + " Right_Gaze_Y Gaze_Y\n", + "340 5.551115e-17 5.684342e-14\n", + "340 5.551115e-17 5.684342e-14\n", + " Right_Gaze_X\n", + "341 5.551115e-17\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_X \\\n", + "342 5.551115e-17 2.775558e-17 1.110223e-16 5.551115e-17 2.273737e-13 \n", + "342 5.551115e-17 2.775558e-17 1.110223e-16 5.551115e-17 2.273737e-13 \n", + "342 5.551115e-17 2.775558e-17 1.110223e-16 5.551115e-17 2.273737e-13 \n", + "342 5.551115e-17 2.775558e-17 1.110223e-16 5.551115e-17 2.273737e-13 \n", + "342 5.551115e-17 2.775558e-17 1.110223e-16 5.551115e-17 2.273737e-13 \n", + "342 5.551115e-17 2.775558e-17 1.110223e-16 5.551115e-17 2.273737e-13 \n", + "\n", + " Gaze_Y \n", + "342 1.136868e-13 \n", + "342 1.136868e-13 \n", + "342 1.136868e-13 \n", + "342 1.136868e-13 \n", + "342 1.136868e-13 \n", + "342 1.136868e-13 \n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "343 5.551115e-17 2.775558e-17 5.684342e-14\n", + "343 5.551115e-17 2.775558e-17 5.684342e-14\n", + "343 5.551115e-17 2.775558e-17 5.684342e-14\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "344 5.551115e-17 2.775558e-17 5.551115e-17 1.136868e-13\n", + "344 5.551115e-17 2.775558e-17 5.551115e-17 1.136868e-13\n", + "344 5.551115e-17 2.775558e-17 5.551115e-17 1.136868e-13\n", + "344 5.551115e-17 2.775558e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_X Right_Gaze_X Right_Gaze_Y Gaze_X Gaze_Y\n", + "345 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "345 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "345 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "345 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "345 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_X Gaze_Y\n", + "346 1.110223e-16 2.775558e-17 5.551115e-17 2.273737e-13 1.136868e-13\n", + "346 1.110223e-16 2.775558e-17 5.551115e-17 2.273737e-13 1.136868e-13\n", + "346 1.110223e-16 2.775558e-17 5.551115e-17 2.273737e-13 1.136868e-13\n", + "346 1.110223e-16 2.775558e-17 5.551115e-17 2.273737e-13 1.136868e-13\n", + "346 1.110223e-16 2.775558e-17 5.551115e-17 2.273737e-13 1.136868e-13\n", + " Right_Gaze_Y Gaze_Y\n", + "347 5.551115e-17 1.136868e-13\n", + "347 5.551115e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "348 2.775558e-17 5.551115e-17 2.775558e-17 5.684342e-14\n", + "348 2.775558e-17 5.551115e-17 2.775558e-17 5.684342e-14\n", + "348 2.775558e-17 5.551115e-17 2.775558e-17 5.684342e-14\n", + "348 2.775558e-17 5.551115e-17 2.775558e-17 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "349 5.551115e-17 1.110223e-16 2.775558e-17 1.136868e-13\n", + "349 5.551115e-17 1.110223e-16 2.775558e-17 1.136868e-13\n", + "349 5.551115e-17 1.110223e-16 2.775558e-17 1.136868e-13\n", + "349 5.551115e-17 1.110223e-16 2.775558e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "350 1.110223e-16 5.551115e-17 5.551115e-17 1.136868e-13\n", + "350 1.110223e-16 5.551115e-17 5.551115e-17 1.136868e-13\n", + "350 1.110223e-16 5.551115e-17 5.551115e-17 1.136868e-13\n", + "350 1.110223e-16 5.551115e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_X \\\n", + "351 1.110223e-16 5.551115e-17 5.551115e-17 8.326673e-17 2.273737e-13 \n", + "351 1.110223e-16 5.551115e-17 5.551115e-17 8.326673e-17 2.273737e-13 \n", + "351 1.110223e-16 5.551115e-17 5.551115e-17 8.326673e-17 2.273737e-13 \n", + "351 1.110223e-16 5.551115e-17 5.551115e-17 8.326673e-17 2.273737e-13 \n", + "351 1.110223e-16 5.551115e-17 5.551115e-17 8.326673e-17 2.273737e-13 \n", + "351 1.110223e-16 5.551115e-17 5.551115e-17 8.326673e-17 2.273737e-13 \n", + "\n", + " Gaze_Y \n", + "351 1.136868e-13 \n", + "351 1.136868e-13 \n", + "351 1.136868e-13 \n", + "351 1.136868e-13 \n", + "351 1.136868e-13 \n", + "351 1.136868e-13 \n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "352 5.551115e-17 8.326673e-17 1.136868e-13\n", + "352 5.551115e-17 8.326673e-17 1.136868e-13\n", + "352 5.551115e-17 8.326673e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "353 5.551115e-17 5.551115e-17 2.775558e-17 5.684342e-14\n", + "353 5.551115e-17 5.551115e-17 2.775558e-17 5.684342e-14\n", + "353 5.551115e-17 5.551115e-17 2.775558e-17 5.684342e-14\n", + "353 5.551115e-17 5.551115e-17 2.775558e-17 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "354 5.551115e-17 5.551115e-17 1.110223e-16 1.136868e-13\n", + "354 5.551115e-17 5.551115e-17 1.110223e-16 1.136868e-13\n", + "354 5.551115e-17 5.551115e-17 1.110223e-16 1.136868e-13\n", + "354 5.551115e-17 5.551115e-17 1.110223e-16 1.136868e-13\n", + " Left_Gaze_Y\n", + "355 2.775558e-17\n", + " Right_Gaze_Y\n", + "356 5.551115e-17\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "357 5.551115e-17 8.326673e-17 1.136868e-13\n", + "357 5.551115e-17 8.326673e-17 1.136868e-13\n", + "357 5.551115e-17 8.326673e-17 1.136868e-13\n", + " Right_Gaze_Y\n", + "358 2.775558e-17\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "360 8.326673e-17 5.551115e-17 1.136868e-13\n", + "360 8.326673e-17 5.551115e-17 1.136868e-13\n", + "360 8.326673e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "361 5.551115e-17 8.326673e-17 1.136868e-13\n", + "361 5.551115e-17 8.326673e-17 1.136868e-13\n", + "361 5.551115e-17 8.326673e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_X Gaze_Y\n", + "362 1.110223e-16 2.775558e-17 2.775558e-17 2.273737e-13 5.684342e-14\n", + "362 1.110223e-16 2.775558e-17 2.775558e-17 2.273737e-13 5.684342e-14\n", + "362 1.110223e-16 2.775558e-17 2.775558e-17 2.273737e-13 5.684342e-14\n", + "362 1.110223e-16 2.775558e-17 2.775558e-17 2.273737e-13 5.684342e-14\n", + "362 1.110223e-16 2.775558e-17 2.775558e-17 2.273737e-13 5.684342e-14\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "363 8.326673e-17 -2.775558e-17 1.136868e-13\n", + "363 8.326673e-17 -2.775558e-17 1.136868e-13\n", + "363 8.326673e-17 -2.775558e-17 1.136868e-13\n", + " Left_Gaze_Y Gaze_Y\n", + "364 -2.775558e-17 -5.684342e-14\n", + "364 -2.775558e-17 -5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_X Gaze_Y\n", + "365 5.551115e-17 5.551115e-17 5.684342e-14\n", + "365 5.551115e-17 5.551115e-17 5.684342e-14\n", + "365 5.551115e-17 5.551115e-17 5.684342e-14\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_X \\\n", + "366 1.110223e-16 2.775558e-17 5.551115e-17 2.775558e-17 2.273737e-13 \n", + "366 1.110223e-16 2.775558e-17 5.551115e-17 2.775558e-17 2.273737e-13 \n", + "366 1.110223e-16 2.775558e-17 5.551115e-17 2.775558e-17 2.273737e-13 \n", + "366 1.110223e-16 2.775558e-17 5.551115e-17 2.775558e-17 2.273737e-13 \n", + "366 1.110223e-16 2.775558e-17 5.551115e-17 2.775558e-17 2.273737e-13 \n", + "366 1.110223e-16 2.775558e-17 5.551115e-17 2.775558e-17 2.273737e-13 \n", + "\n", + " Gaze_Y \n", + "366 5.684342e-14 \n", + "366 5.684342e-14 \n", + "366 5.684342e-14 \n", + "366 5.684342e-14 \n", + "366 5.684342e-14 \n", + "366 5.684342e-14 \n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "367 5.551115e-17 2.775558e-17 1.136868e-13\n", + "367 5.551115e-17 2.775558e-17 1.136868e-13\n", + "367 5.551115e-17 2.775558e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Gaze_X\n", + "368 -5.551115e-17 2.775558e-17 -2.273737e-13\n", + "368 -5.551115e-17 2.775558e-17 -2.273737e-13\n", + "368 -5.551115e-17 2.775558e-17 -2.273737e-13\n", + " Right_Gaze_Y Gaze_Y\n", + "369 5.551115e-17 5.684342e-14\n", + "369 5.551115e-17 5.684342e-14\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "370 1.110223e-16 1.110223e-16 5.551115e-17 1.136868e-13\n", + "370 1.110223e-16 1.110223e-16 5.551115e-17 1.136868e-13\n", + "370 1.110223e-16 1.110223e-16 5.551115e-17 1.136868e-13\n", + "370 1.110223e-16 1.110223e-16 5.551115e-17 1.136868e-13\n", + " Right_Gaze_X\n", + "371 1.110223e-16\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "373 5.551115e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + "373 5.551115e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + "373 5.551115e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + "373 5.551115e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "374 1.110223e-16 -2.775558e-17 5.551115e-17 5.684342e-14\n", + "374 1.110223e-16 -2.775558e-17 5.551115e-17 5.684342e-14\n", + "374 1.110223e-16 -2.775558e-17 5.551115e-17 5.684342e-14\n", + "374 1.110223e-16 -2.775558e-17 5.551115e-17 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "375 8.326673e-17 1.110223e-16 5.551115e-17 1.136868e-13\n", + "375 8.326673e-17 1.110223e-16 5.551115e-17 1.136868e-13\n", + "375 8.326673e-17 1.110223e-16 5.551115e-17 1.136868e-13\n", + "375 8.326673e-17 1.110223e-16 5.551115e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Gaze_Y\n", + "376 1.110223e-16 2.775558e-17 5.684342e-14\n", + "376 1.110223e-16 2.775558e-17 5.684342e-14\n", + "376 1.110223e-16 2.775558e-17 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "377 5.551115e-17 8.326673e-17 1.136868e-13\n", + "377 5.551115e-17 8.326673e-17 1.136868e-13\n", + "377 5.551115e-17 8.326673e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_X Gaze_X Gaze_Y\n", + "378 8.326673e-17 1.110223e-16 2.273737e-13 1.136868e-13\n", + "378 8.326673e-17 1.110223e-16 2.273737e-13 1.136868e-13\n", + "378 8.326673e-17 1.110223e-16 2.273737e-13 1.136868e-13\n", + "378 8.326673e-17 1.110223e-16 2.273737e-13 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_X Gaze_Y\n", + "379 5.551115e-17 5.551115e-17 1.110223e-16 2.273737e-13 1.136868e-13\n", + "379 5.551115e-17 5.551115e-17 1.110223e-16 2.273737e-13 1.136868e-13\n", + "379 5.551115e-17 5.551115e-17 1.110223e-16 2.273737e-13 1.136868e-13\n", + "379 5.551115e-17 5.551115e-17 1.110223e-16 2.273737e-13 1.136868e-13\n", + "379 5.551115e-17 5.551115e-17 1.110223e-16 2.273737e-13 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_X \\\n", + "380 1.110223e-16 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 \n", + "380 1.110223e-16 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 \n", + "380 1.110223e-16 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 \n", + "380 1.110223e-16 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 \n", + "380 1.110223e-16 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 \n", + "380 1.110223e-16 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 \n", + "\n", + " Gaze_Y \n", + "380 1.136868e-13 \n", + "380 1.136868e-13 \n", + "380 1.136868e-13 \n", + "380 1.136868e-13 \n", + "380 1.136868e-13 \n", + "380 1.136868e-13 \n", + " Left_Gaze_Y Right_Gaze_X Gaze_Y\n", + "381 5.551115e-17 1.110223e-16 5.684342e-14\n", + "381 5.551115e-17 1.110223e-16 5.684342e-14\n", + "381 5.551115e-17 1.110223e-16 5.684342e-14\n", + " Right_Gaze_Y Gaze_Y\n", + "382 5.551115e-17 5.684342e-14\n", + "382 5.551115e-17 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_Y\n", + "383 2.775558e-17 2.775558e-17\n", + "383 2.775558e-17 2.775558e-17\n", + " Left_Gaze_Y Gaze_Y\n", + "384 5.551115e-17 5.684342e-14\n", + "384 5.551115e-17 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "385 2.775558e-17 2.775558e-17 1.136868e-13\n", + "385 2.775558e-17 2.775558e-17 1.136868e-13\n", + "385 2.775558e-17 2.775558e-17 1.136868e-13\n", + " Right_Gaze_Y Gaze_Y\n", + "386 5.551115e-17 5.684342e-14\n", + "386 5.551115e-17 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "387 2.775558e-17 5.551115e-17 2.775558e-17 5.684342e-14\n", + "387 2.775558e-17 5.551115e-17 2.775558e-17 5.684342e-14\n", + "387 2.775558e-17 5.551115e-17 2.775558e-17 5.684342e-14\n", + "387 2.775558e-17 5.551115e-17 2.775558e-17 5.684342e-14\n", + " Right_Gaze_Y\n", + "388 2.775558e-17\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Gaze_X Gaze_Y\n", + "389 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "389 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "389 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "389 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "389 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + " Left_Gaze_X Left_Gaze_Y Gaze_Y\n", + "390 1.110223e-16 5.551115e-17 5.684342e-14\n", + "390 1.110223e-16 5.551115e-17 5.684342e-14\n", + "390 1.110223e-16 5.551115e-17 5.684342e-14\n", + " Left_Gaze_Y Gaze_Y\n", + "392 8.326673e-17 1.136868e-13\n", + "392 8.326673e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "393 2.775558e-17 5.551115e-17 1.136868e-13\n", + "393 2.775558e-17 5.551115e-17 1.136868e-13\n", + "393 2.775558e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "394 5.551115e-17 2.775558e-17 1.136868e-13\n", + "394 5.551115e-17 2.775558e-17 1.136868e-13\n", + "394 5.551115e-17 2.775558e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "395 5.551115e-17 2.775558e-17 5.684342e-14\n", + "395 5.551115e-17 2.775558e-17 5.684342e-14\n", + "395 5.551115e-17 2.775558e-17 5.684342e-14\n", + " Left_Gaze_X Left_Gaze_Y Gaze_Y\n", + "396 1.110223e-16 2.775558e-17 5.684342e-14\n", + "396 1.110223e-16 2.775558e-17 5.684342e-14\n", + "396 1.110223e-16 2.775558e-17 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "397 5.551115e-17 5.551115e-17 1.136868e-13\n", + "397 5.551115e-17 5.551115e-17 1.136868e-13\n", + "397 5.551115e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_X Gaze_Y\n", + "398 5.551115e-17 5.551115e-17 5.684342e-14\n", + "398 5.551115e-17 5.551115e-17 5.684342e-14\n", + "398 5.551115e-17 5.551115e-17 5.684342e-14\n", + " Right_Gaze_Y\n", + "399 2.775558e-17\n", + " Left_Gaze_X Right_Gaze_Y Gaze_Y\n", + "400 1.110223e-16 2.775558e-17 5.684342e-14\n", + "400 1.110223e-16 2.775558e-17 5.684342e-14\n", + "400 1.110223e-16 2.775558e-17 5.684342e-14\n", + " Right_Gaze_Y Gaze_Y\n", + "401 -2.775558e-17 -5.684342e-14\n", + "401 -2.775558e-17 -5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "402 5.551115e-17 2.775558e-17 1.136868e-13\n", + "402 5.551115e-17 2.775558e-17 1.136868e-13\n", + "402 5.551115e-17 2.775558e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "403 5.551115e-17 5.551115e-17 1.136868e-13\n", + "403 5.551115e-17 5.551115e-17 1.136868e-13\n", + "403 5.551115e-17 5.551115e-17 1.136868e-13\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + " Right_Gaze_Y Gaze_Y\n", + "404 5.551115e-17 5.684342e-14\n", + "404 5.551115e-17 5.684342e-14\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_X \\\n", + "405 5.551115e-17 5.551115e-17 5.551115e-17 2.775558e-17 2.273737e-13 \n", + "405 5.551115e-17 5.551115e-17 5.551115e-17 2.775558e-17 2.273737e-13 \n", + "405 5.551115e-17 5.551115e-17 5.551115e-17 2.775558e-17 2.273737e-13 \n", + "405 5.551115e-17 5.551115e-17 5.551115e-17 2.775558e-17 2.273737e-13 \n", + "405 5.551115e-17 5.551115e-17 5.551115e-17 2.775558e-17 2.273737e-13 \n", + "405 5.551115e-17 5.551115e-17 5.551115e-17 2.775558e-17 2.273737e-13 \n", + "\n", + " Gaze_Y \n", + "405 5.684342e-14 \n", + "405 5.684342e-14 \n", + "405 5.684342e-14 \n", + "405 5.684342e-14 \n", + "405 5.684342e-14 \n", + "405 5.684342e-14 \n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "406 -5.551115e-17 8.326673e-17 8.326673e-17 1.136868e-13\n", + "406 -5.551115e-17 8.326673e-17 8.326673e-17 1.136868e-13\n", + "406 -5.551115e-17 8.326673e-17 8.326673e-17 1.136868e-13\n", + "406 -5.551115e-17 8.326673e-17 8.326673e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "407 5.551115e-17 1.110223e-16 1.136868e-13\n", + "407 5.551115e-17 1.110223e-16 1.136868e-13\n", + "407 5.551115e-17 1.110223e-16 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Gaze_Y\n", + "408 1.110223e-16 5.551115e-17 5.684342e-14\n", + "408 1.110223e-16 5.551115e-17 5.684342e-14\n", + "408 1.110223e-16 5.551115e-17 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_X Gaze_X Gaze_Y\n", + "409 5.551115e-17 -5.551115e-17 -2.273737e-13 1.136868e-13\n", + "409 5.551115e-17 -5.551115e-17 -2.273737e-13 1.136868e-13\n", + "409 5.551115e-17 -5.551115e-17 -2.273737e-13 1.136868e-13\n", + "409 5.551115e-17 -5.551115e-17 -2.273737e-13 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_X Gaze_Y\n", + "410 -5.551115e-17 8.326673e-17 5.551115e-17 -2.273737e-13 1.705303e-13\n", + "410 -5.551115e-17 8.326673e-17 5.551115e-17 -2.273737e-13 1.705303e-13\n", + "410 -5.551115e-17 8.326673e-17 5.551115e-17 -2.273737e-13 1.705303e-13\n", + "410 -5.551115e-17 8.326673e-17 5.551115e-17 -2.273737e-13 1.705303e-13\n", + "410 -5.551115e-17 8.326673e-17 5.551115e-17 -2.273737e-13 1.705303e-13\n", + " Right_Gaze_Y\n", + "412 2.775558e-17\n", + " Left_Gaze_X Left_Gaze_Y\n", + "413 1.110223e-16 5.551115e-17\n", + "413 1.110223e-16 5.551115e-17\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "415 -2.775558e-17 5.551115e-17 5.684342e-14\n", + "415 -2.775558e-17 5.551115e-17 5.684342e-14\n", + "415 -2.775558e-17 5.551115e-17 5.684342e-14\n", + " Left_Gaze_X Right_Gaze_X Right_Gaze_Y Gaze_X\n", + "416 5.551115e-17 1.110223e-16 2.775558e-17 2.273737e-13\n", + "416 5.551115e-17 1.110223e-16 2.775558e-17 2.273737e-13\n", + "416 5.551115e-17 1.110223e-16 2.775558e-17 2.273737e-13\n", + "416 5.551115e-17 1.110223e-16 2.775558e-17 2.273737e-13\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "417 2.775558e-17 1.110223e-16 5.551115e-17 5.684342e-14\n", + "417 2.775558e-17 1.110223e-16 5.551115e-17 5.684342e-14\n", + "417 2.775558e-17 1.110223e-16 5.551115e-17 5.684342e-14\n", + "417 2.775558e-17 1.110223e-16 5.551115e-17 5.684342e-14\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_X \\\n", + "418 5.551115e-17 2.775558e-17 1.110223e-16 2.775558e-17 2.273737e-13 \n", + "418 5.551115e-17 2.775558e-17 1.110223e-16 2.775558e-17 2.273737e-13 \n", + "418 5.551115e-17 2.775558e-17 1.110223e-16 2.775558e-17 2.273737e-13 \n", + "418 5.551115e-17 2.775558e-17 1.110223e-16 2.775558e-17 2.273737e-13 \n", + "418 5.551115e-17 2.775558e-17 1.110223e-16 2.775558e-17 2.273737e-13 \n", + "418 5.551115e-17 2.775558e-17 1.110223e-16 2.775558e-17 2.273737e-13 \n", + "\n", + " Gaze_Y \n", + "418 1.136868e-13 \n", + "418 1.136868e-13 \n", + "418 1.136868e-13 \n", + "418 1.136868e-13 \n", + "418 1.136868e-13 \n", + "418 1.136868e-13 \n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "419 5.551115e-17 1.110223e-16 8.326673e-17 1.136868e-13\n", + "419 5.551115e-17 1.110223e-16 8.326673e-17 1.136868e-13\n", + "419 5.551115e-17 1.110223e-16 8.326673e-17 1.136868e-13\n", + "419 5.551115e-17 1.110223e-16 8.326673e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_X\n", + "420 2.775558e-17 1.110223e-16\n", + "420 2.775558e-17 1.110223e-16\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "421 2.775558e-17 5.551115e-17 1.136868e-13\n", + "421 2.775558e-17 5.551115e-17 1.136868e-13\n", + "421 2.775558e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "422 1.110223e-16 5.551115e-17 5.551115e-17 1.136868e-13\n", + "422 1.110223e-16 5.551115e-17 5.551115e-17 1.136868e-13\n", + "422 1.110223e-16 5.551115e-17 5.551115e-17 1.136868e-13\n", + "422 1.110223e-16 5.551115e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "423 5.551115e-17 2.775558e-17 1.136868e-13\n", + "423 5.551115e-17 2.775558e-17 1.136868e-13\n", + "423 5.551115e-17 2.775558e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "424 5.551115e-17 2.775558e-17 1.136868e-13\n", + "424 5.551115e-17 2.775558e-17 1.136868e-13\n", + "424 5.551115e-17 2.775558e-17 1.136868e-13\n", + " Left_Gaze_X\n", + "425 1.110223e-16\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "426 5.551115e-17 5.551115e-17 1.136868e-13\n", + "426 5.551115e-17 5.551115e-17 1.136868e-13\n", + "426 5.551115e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "428 5.551115e-17 5.551115e-17 1.110223e-16 1.705303e-13\n", + "428 5.551115e-17 5.551115e-17 1.110223e-16 1.705303e-13\n", + "428 5.551115e-17 5.551115e-17 1.110223e-16 1.705303e-13\n", + "428 5.551115e-17 5.551115e-17 1.110223e-16 1.705303e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_X \\\n", + "429 5.551115e-17 2.775558e-17 1.110223e-16 5.551115e-17 2.273737e-13 \n", + "429 5.551115e-17 2.775558e-17 1.110223e-16 5.551115e-17 2.273737e-13 \n", + "429 5.551115e-17 2.775558e-17 1.110223e-16 5.551115e-17 2.273737e-13 \n", + "429 5.551115e-17 2.775558e-17 1.110223e-16 5.551115e-17 2.273737e-13 \n", + "429 5.551115e-17 2.775558e-17 1.110223e-16 5.551115e-17 2.273737e-13 \n", + "429 5.551115e-17 2.775558e-17 1.110223e-16 5.551115e-17 2.273737e-13 \n", + "\n", + " Gaze_Y \n", + "429 1.136868e-13 \n", + "429 1.136868e-13 \n", + "429 1.136868e-13 \n", + "429 1.136868e-13 \n", + "429 1.136868e-13 \n", + "429 1.136868e-13 \n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "430 5.551115e-17 2.775558e-17 1.136868e-13\n", + "430 5.551115e-17 2.775558e-17 1.136868e-13\n", + "430 5.551115e-17 2.775558e-17 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "431 8.326673e-17 1.110223e-16 2.775558e-17 1.136868e-13\n", + "431 8.326673e-17 1.110223e-16 2.775558e-17 1.136868e-13\n", + "431 8.326673e-17 1.110223e-16 2.775558e-17 1.136868e-13\n", + "431 8.326673e-17 1.110223e-16 2.775558e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_X \\\n", + "432 5.551115e-17 5.551115e-17 1.110223e-16 8.326673e-17 2.273737e-13 \n", + "432 5.551115e-17 5.551115e-17 1.110223e-16 8.326673e-17 2.273737e-13 \n", + "432 5.551115e-17 5.551115e-17 1.110223e-16 8.326673e-17 2.273737e-13 \n", + "432 5.551115e-17 5.551115e-17 1.110223e-16 8.326673e-17 2.273737e-13 \n", + "432 5.551115e-17 5.551115e-17 1.110223e-16 8.326673e-17 2.273737e-13 \n", + "432 5.551115e-17 5.551115e-17 1.110223e-16 8.326673e-17 2.273737e-13 \n", + "\n", + " Gaze_Y \n", + "432 1.136868e-13 \n", + "432 1.136868e-13 \n", + "432 1.136868e-13 \n", + "432 1.136868e-13 \n", + "432 1.136868e-13 \n", + "432 1.136868e-13 \n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "433 5.551115e-17 5.551115e-17 1.136868e-13\n", + "433 5.551115e-17 5.551115e-17 1.136868e-13\n", + "433 5.551115e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_X\n", + "434 5.551115e-17\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_Y\n", + "435 2.775558e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + "435 2.775558e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + "435 2.775558e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + "435 2.775558e-17 5.551115e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_X Right_Gaze_X\n", + "436 -5.551115e-17 5.551115e-17\n", + "436 -5.551115e-17 5.551115e-17\n", + " Right_Gaze_Y Gaze_Y\n", + "437 5.551115e-17 5.684342e-14\n", + "437 5.551115e-17 5.684342e-14\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_Y Gaze_X Gaze_Y\n", + "438 1.110223e-16 2.775558e-17 1.110223e-16 2.273737e-13 1.136868e-13\n", + "438 1.110223e-16 2.775558e-17 1.110223e-16 2.273737e-13 1.136868e-13\n", + "438 1.110223e-16 2.775558e-17 1.110223e-16 2.273737e-13 1.136868e-13\n", + "438 1.110223e-16 2.775558e-17 1.110223e-16 2.273737e-13 1.136868e-13\n", + "438 1.110223e-16 2.775558e-17 1.110223e-16 2.273737e-13 1.136868e-13\n", + " Left_Gaze_Y Right_Gaze_X Right_Gaze_Y Gaze_X Gaze_Y\n", + "439 5.551115e-17 -5.551115e-17 8.326673e-17 -2.273737e-13 1.136868e-13\n", + "439 5.551115e-17 -5.551115e-17 8.326673e-17 -2.273737e-13 1.136868e-13\n", + "439 5.551115e-17 -5.551115e-17 8.326673e-17 -2.273737e-13 1.136868e-13\n", + "439 5.551115e-17 -5.551115e-17 8.326673e-17 -2.273737e-13 1.136868e-13\n", + "439 5.551115e-17 -5.551115e-17 8.326673e-17 -2.273737e-13 1.136868e-13\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "440 1.110223e-16 2.775558e-17 1.136868e-13\n", + "440 1.110223e-16 2.775558e-17 1.136868e-13\n", + "440 1.110223e-16 2.775558e-17 1.136868e-13\n", + " Left_Gaze_X Left_Gaze_Y Right_Gaze_X Gaze_X Gaze_Y\n", + "441 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "441 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "441 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "441 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "441 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + " Left_Gaze_X Right_Gaze_X Right_Gaze_Y Gaze_X Gaze_Y\n", + "442 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "442 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "442 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "442 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + "442 1.110223e-16 5.551115e-17 5.551115e-17 2.273737e-13 5.684342e-14\n", + " Left_Gaze_Y Gaze_Y\n", + "443 5.551115e-17 5.684342e-14\n", + "443 5.551115e-17 5.684342e-14\n", + " Left_Gaze_Y Right_Gaze_Y Gaze_Y\n", + "444 5.551115e-17 5.551115e-17 1.136868e-13\n", + "444 5.551115e-17 5.551115e-17 1.136868e-13\n", + "444 5.551115e-17 5.551115e-17 1.136868e-13\n", + " Left_Gaze_X Right_Gaze_Y Gaze_Y\n", + "445 5.551115e-17 1.110223e-16 1.136868e-13\n", + "445 5.551115e-17 1.110223e-16 1.136868e-13\n", + "445 5.551115e-17 1.110223e-16 1.136868e-13\n" + ] + }, + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m/tmp/ipykernel_91706/2711523652.py\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0midxx\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0munique\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtolist\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpre_result\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0miloc\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mwhere\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m==\u001b[0m\u001b[0midxx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mwhere\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m==\u001b[0m\u001b[0midxx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0mresult\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0miloc\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mwhere\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m==\u001b[0m\u001b[0midxx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mwhere\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m==\u001b[0m\u001b[0midxx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 3\u001b[0m \u001b[0;31m#pdb.set_trace()\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0;31m#pd.concat([result,pre_result]).drop_duplicates(keep=False)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mKeyboardInterrupt\u001b[0m: " + ] + } + ], + "source": [ + "for idxx in np.unique(idx[0]).tolist():\n", + " print(pre_result.iloc[idx[0][np.where(idx[0]==idxx)[0]],idx[1][np.where(idx[0]==idxx)[0]]]-result.iloc[idx[0][np.where(idx[0]==idxx)[0]],idx[1][np.where(idx[0]==idxx)[0]]])\n", + " #pdb.set_trace()\n", + "#pd.concat([result,pre_result]).drop_duplicates(keep=False)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.7" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/keyboard_and_mouse/dataset/.ipynb_checkpoints/01-PreProcessing-checkpoint.ipynb b/keyboard_and_mouse/dataset/.ipynb_checkpoints/01-PreProcessing-checkpoint.ipynb new file mode 100644 index 0000000..9723910 --- /dev/null +++ b/keyboard_and_mouse/dataset/.ipynb_checkpoints/01-PreProcessing-checkpoint.ipynb @@ -0,0 +1,4008 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "da341276", + "metadata": {}, + "outputs": [], + "source": [ + "import pandas as pd\n", + "import numpy as np\n", + "import pdb, os\n", + "from tqdm import tqdm" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "38b48ab2", + "metadata": { + "scrolled": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
level_0TimestamptypePIDkeydatarangeindexLeft_Gaze_XLeft_Gaze_Y...clientYconditionparttaskIDbuttonisCurrentlyPastinglorem_formatlorem_textruleslabel
event
undefined2222000000...0222000022
Align centerAlign498498498498000000...04984984980000498498
Align leftAlign52525252000000...052525200005252
Align rightAlign436436436436000000...04364364360000436436
Bold (CTRL+B)Bold338338338338000000...03383383380000338338
Font FamilyFont3058305830583058000000...0305830583058000030583058
Font SizeFont1558155815581558000000...0155815581558000015581558
Italic (CTRL+I)Italic349349349349000000...03493493490000349349
Underline (CTRL+U)Underline396396396396000000...03963963960000396396
\n", + "

9 rows × 27 columns

\n", + "
" + ], + "text/plain": [ + " level_0 Timestamp type PID key data range \\\n", + "event \n", + " undefined 2 2 2 2 0 0 0 \n", + "Align centerAlign 498 498 498 498 0 0 0 \n", + "Align leftAlign 52 52 52 52 0 0 0 \n", + "Align rightAlign 436 436 436 436 0 0 0 \n", + "Bold (CTRL+B)Bold 338 338 338 338 0 0 0 \n", + "Font FamilyFont 3058 3058 3058 3058 0 0 0 \n", + "Font SizeFont 1558 1558 1558 1558 0 0 0 \n", + "Italic (CTRL+I)Italic 349 349 349 349 0 0 0 \n", + "Underline (CTRL+U)Underline 396 396 396 396 0 0 0 \n", + "\n", + " index Left_Gaze_X Left_Gaze_Y ... clientY \\\n", + "event ... \n", + " undefined 0 0 0 ... 0 \n", + "Align centerAlign 0 0 0 ... 0 \n", + "Align leftAlign 0 0 0 ... 0 \n", + "Align rightAlign 0 0 0 ... 0 \n", + "Bold (CTRL+B)Bold 0 0 0 ... 0 \n", + "Font FamilyFont 0 0 0 ... 0 \n", + "Font SizeFont 0 0 0 ... 0 \n", + "Italic (CTRL+I)Italic 0 0 0 ... 0 \n", + "Underline (CTRL+U)Underline 0 0 0 ... 0 \n", + "\n", + " condition part taskID button \\\n", + "event \n", + " undefined 2 2 2 0 \n", + "Align centerAlign 498 498 498 0 \n", + "Align leftAlign 52 52 52 0 \n", + "Align rightAlign 436 436 436 0 \n", + "Bold (CTRL+B)Bold 338 338 338 0 \n", + "Font FamilyFont 3058 3058 3058 0 \n", + "Font SizeFont 1558 1558 1558 0 \n", + "Italic (CTRL+I)Italic 349 349 349 0 \n", + "Underline (CTRL+U)Underline 396 396 396 0 \n", + "\n", + " isCurrentlyPasting lorem_format lorem_text \\\n", + "event \n", + " undefined 0 0 0 \n", + "Align centerAlign 0 0 0 \n", + "Align leftAlign 0 0 0 \n", + "Align rightAlign 0 0 0 \n", + "Bold (CTRL+B)Bold 0 0 0 \n", + "Font FamilyFont 0 0 0 \n", + "Font SizeFont 0 0 0 \n", + "Italic (CTRL+I)Italic 0 0 0 \n", + "Underline (CTRL+U)Underline 0 0 0 \n", + "\n", + " rules label \n", + "event \n", + " undefined 2 2 \n", + "Align centerAlign 498 498 \n", + "Align leftAlign 52 52 \n", + "Align rightAlign 436 436 \n", + "Bold (CTRL+B)Bold 338 338 \n", + "Font FamilyFont 3058 3058 \n", + "Font SizeFont 1558 1558 \n", + "Italic (CTRL+I)Italic 349 349 \n", + "Underline (CTRL+U)Underline 396 396 \n", + "\n", + "[9 rows x 27 columns]" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "#study_data_path = '../IntentData/'\n", + "#data = pd.read_pickle(study_data_path + \"/Preprocessing_data/filtered_data.pkl\")\n", + "data = pd.read_pickle(\"filtered_data.pkl\")\n", + "\n", + "# Nine events (1 underdefined included)\n", + "data[(data.type == \"toolbar\")].groupby([\"event\"]).count()" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "41e16153", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array(['Tab', 'Ctrl+I', 'Ctrl+Z', 'Esc', 'Italic (CTRL+I)Italic',\n", + " 'Font FamilyFont', 'Ctrl+B', 'Align centerAlign', 'Font SizeFont',\n", + " 'Align rightAlign', 'Underline (CTRL+U)Underline', nan, 'Ctrl+C',\n", + " 'Shift+Tab', 'Bold (CTRL+B)Bold', ' undefined', 'Ctrl+A',\n", + " 'Align leftAlign', 'Ctrl+N', 'Ctrl+V', 'Ctrl+H', 'Ctrl+Y',\n", + " 'Ctrl+S', 'Ctrl+Tab', 'Ctrl+7'], dtype=object)" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Remove samples where rules=None\n", + "data = data[(data.rules != 'None')]\n", + "\n", + "action_array = data[(data.type == \"commands\") | (data.type == \"toolbar\")].event.unique()\n", + "action_array" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "46d239fc", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'Tab': 8,\n", + " 'Ctrl+I': 1,\n", + " 'Ctrl+Z': 9,\n", + " 'Esc': 10,\n", + " 'Italic (CTRL+I)Italic': 1,\n", + " 'Font FamilyFont': 4,\n", + " 'Ctrl+B': 0,\n", + " 'Align centerAlign': 5,\n", + " 'Font SizeFont': 3,\n", + " 'Align rightAlign': 6,\n", + " 'Underline (CTRL+U)Underline': 2,\n", + " nan: 12,\n", + " 'Ctrl+C': 11,\n", + " 'Shift+Tab': 8,\n", + " 'Bold (CTRL+B)Bold': 0,\n", + " ' undefined': 12}" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "toolbar_list = ['Bold (CTRL+B)Bold',\n", + " 'Italic (CTRL+I)Italic',\n", + " 'Underline (CTRL+U)Underline',\n", + " 'Font SizeFont',\n", + " 'Font FamilyFont',\n", + " 'Align leftAlign',\n", + " 'Align centerAlign',\n", + " 'Align rightAlign',\n", + " 'undefined']\n", + "''''command_list = ['Ctrl+B',\n", + " 'Ctrl+I',\n", + " 'Tab',\n", + " 'Shift+Tab',\n", + " 'Ctrl+C',\n", + " 'Ctrl+V',\n", + " 'Ctrl+Z']'''\n", + "allowed_command_list = ['Ctrl+B',\n", + " 'Ctrl+I',\n", + " 'Tab',\n", + " 'Shift+Tab',\n", + " 'Ctrl+U'] # As written in the paper, only allowed these shortcuts\n", + "action_list = ['Bold', \n", + " 'Italic', \n", + " 'Underline',\n", + " 'FontSize',\n", + " 'FontFamily',\n", + " 'AlignmentLeft',\n", + " 'AlignmentCenter',\n", + " 'AlignmentRight',\n", + " 'Indent']\n", + "# Map the keyboard shortcuts and operations on the toolbar to actions\n", + "# 'Bold' 0\n", + "# 'Italic' 1\n", + "# 'Underline' 2\n", + "# 'Font Size' 3\n", + "# 'Font Family' 4\n", + "# 'Alignment center' 5\n", + "# 'Alignment right' 6\n", + "# 'Alignment left' 7\n", + "# 'Indent' 8\n", + "# 'Revert' 9 \n", + "# 'Next Task' 10\n", + "# 'Copy' 11 \n", + "# 'nan/undefined' 12\n", + "action_numbers = [8, 1, 9, 10, 1, 4, 0, 5, 3, 6, 2, 12, 11, 8, 0, 12]\n", + "action_dict = dict(zip(action_array, action_numbers))\n", + "action_dict" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "14829521", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array(['commands', 'key', 'selection', 'toolbar', 'eye', 'pos', 'click',\n", + " 'start', 'end', 'lorem'], dtype=object)" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "data.type.unique()" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "a77cebd0", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "keys: 34407 \n", + "eyes: 8106054 \n", + "mouse: 1947827 \n", + "cmd: 3459 \n", + "toolbar: 6859\n" + ] + } + ], + "source": [ + "# Take out dataframes\n", + "df_keys = data[data.type == \"key\"]\n", + "df_eyes = data[data.type == \"eye\"]\n", + "df_mouse_pos = data[data.type == \"pos\"]\n", + "df_mouse_click = data[data.type == \"click\"]\n", + "df_mouses = pd.concat([df_mouse_pos, df_mouse_click])\n", + "df_cmds = data[data.type == \"commands\"]\n", + "df_toolbars = data[data.type == \"toolbar\"]\n", + "print(\"keys:\", len(df_keys), \"\\neyes:\", len(df_eyes), \"\\nmouse:\", len(df_mouses), \"\\ncmd:\",len(df_cmds), \"\\ntoolbar:\",len(df_toolbars))" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "6418e565", + "metadata": {}, + "outputs": [], + "source": [ + "# Only take needed columns\n", + "df_key = pd.DataFrame({\"Timestamp\": df_keys.Timestamp, \n", + " \"Event\": df_keys.event, \n", + " \"Key\": df_keys.key,\n", + " \"TaskID\": df_keys.taskID,\n", + " \"Part\": df_keys.part, \n", + " \"PID\": df_keys.PID,\n", + " \"TextRule\": df_keys.rules,\n", + " \"Rule\": df_keys.label})\n", + "df_eye = pd.DataFrame({\"Timestamp\": df_eyes.Timestamp, \n", + " \"Gaze_X\": df_eyes.Gaze_X, \n", + " \"Gaze_Y\": df_eyes.Gaze_Y, \n", + " \"TaskID\": df_eyes.taskID,\n", + " \"Part\": df_eyes.part, \n", + " \"PID\": df_eyes.PID,\n", + " \"TextRule\": df_eyes.rules,\n", + " \"Rule\": df_eyes.label})\n", + "df_mouse = pd.DataFrame({\"Timestamp\": df_mouses.Timestamp, \n", + " \"X\": df_mouses.clientX, \n", + " \"Y\": df_mouses.clientY,\n", + " \"Type\": df_mouses.type, \n", + " \"TaskID\": df_mouses.taskID,\n", + " \"Part\": df_mouses.part, \n", + " \"PID\": df_mouses.PID,\n", + " \"TextRule\": df_mouses.rules,\n", + " \"Event\": df_mouses.event,\n", + " \"Rule\": df_mouses.label})\n", + "df_mousePos = pd.DataFrame({\"Timestamp\": df_mouse_pos.Timestamp, \n", + " \"X\": df_mouse_pos.clientX, \n", + " \"Y\": df_mouse_pos.clientY,\n", + " \"Type\": df_mouse_pos.type, \n", + " \"TaskID\": df_mouse_pos.taskID,\n", + " \"Part\": df_mouse_pos.part, \n", + " \"PID\": df_mouse_pos.PID,\n", + " \"TextRule\": df_mouse_pos.rules,\n", + " \"Rule\": df_mouse_pos.label})\n", + "df_cmd = pd.DataFrame({\"Timestamp\": df_cmds.Timestamp, \n", + " \"Event\": df_cmds.event, \n", + " \"TaskID\": df_cmds.taskID,\n", + " \"Part\": df_cmds.part, \n", + " \"PID\": df_cmds.PID,\n", + " \"TextRule\": df_cmds.rules,\n", + " \"Rule\": df_cmds.label})\n", + "df_toolbar = pd.DataFrame({\"Timestamp\": df_toolbars.Timestamp, \n", + " \"Event\": df_toolbars.event, \n", + " \"TaskID\": df_toolbars.taskID,\n", + " \"Part\": df_toolbars.part, \n", + " \"PID\": df_toolbars.PID,\n", + " \"TextRule\": df_toolbars.rules,\n", + " \"Rule\": df_toolbars.label})\n", + "# Keep PID, Part, TaskID, TextRule and Rule. Add Gaze and Pos\n", + "df_label = df_eye.groupby([\"PID\",\"Part\",\"TaskID\"]).count().reset_index().drop([\"Timestamp\", \"Gaze_X\", \"Gaze_Y\"], axis=1)\n", + "df_label[\"Gaze\"] = 0\n", + "df_label[\"Pos\"] = 0\n", + "#print(df_label)\n", + "\n", + "# Change keyboard values to uppercase\n", + "df_key.Key = df_key.Key.apply(lambda x: x.upper())" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "bcc70e36", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
TimestampEventKeyTaskIDPartPIDTextRuleRule
911575388456995keydownESCAPE011{'Title': ['1', 'Indent', 'and', 'Italic'], 'S...3
1158941575388801164keydownESCAPE511{'Title': ['Size', 'Big', 'and', 'Underline'],...1
1350361575388888708keydownESCAPE611{'Title': ['Font', 'Family', 'Consolas', 'and'...6
3031981575389414398keydownESCAPE621{'Title': ['Font', 'Family', 'Consolas', 'and'...6
3710581575389612671keydownESCAPE231{'Title': ['Alignment', 'Center', 'and', 'Unde...0
3886041575389680615keydownESCAPE331{'Title': ['Size', 'Big'], 'Subtitle': ['Bold'...5
4340691575389811028keydownESCAPE531{'Title': ['Size', 'Big', 'and', 'Underline'],...1
1483031575983493827keydownESCAPE612{'Title': ['Bold', 'and', 'Underline'], 'Subti...2
2096861575983764051keydownESCAPE222{'Title': ['Font', 'Family', 'Consolas', 'and'...6
2351551575983852406keydownESCAPE322{'Title': ['Size', 'Big', 'and', 'Underline'],...1
2616591575983923344keydownESCAPE422{'Title': ['Alignment', 'Center', 'and', 'Unde...0
3184271575984168751keydownESCAPE032{'Title': ['Size', 'Big'], 'Subtitle': ['Bold'...5
3394701575984281075keydownESCAPE132{'Title': ['1', 'Indent', 'and', 'Italic'], 'S...3
3721641575984373613keydownESCAPE232{'Title': ['Font', 'Family', 'Consolas', 'and'...6
3967791575984444888keydownESCAPE332{'Title': ['Size', 'Big', 'and', 'Underline'],...1
4188121575984507738keydownESCAPE432{'Title': ['Alignment', 'Center', 'and', 'Unde...0
4563871575984630149keydownESCAPE632{'Title': ['Bold', 'and', 'Underline'], 'Subti...2
5222491575984892276keydownESCAPE242{'Title': ['Font', 'Family', 'Consolas', 'and'...6
6143901575985243792keydownESCAPE052{'Title': ['Size', 'Big'], 'Subtitle': ['Bold'...5
6504741575985354507keydownESCAPE252{'Title': ['Font', 'Family', 'Consolas', 'and'...6
6662611575985448687keydownESCAPE352{'Title': ['Size', 'Big', 'and', 'Underline'],...1
6942491575985497666keydownESCAPE452{'Title': ['Alignment', 'Center', 'and', 'Unde...0
7082791575985549757keydownESCAPE552{'Title': ['Size', 'Big', 'and', 'Bold'], 'Sub...4
871576500820142keydownESCAPE013{'Title': ['Size', 'Big', 'and', 'Bold'], 'Sub...4
186301576500895888keydownESCAPE113{'Title': ['Size', 'Big'], 'Subtitle': ['Bold'...5
1179341576501236105keydownESCAPE613{'Title': ['Font', 'Family', 'Consolas', 'and'...6
1377681576501303236keydownESCAPE023{'Title': ['Size', 'Big', 'and', 'Bold'], 'Sub...4
1746301576501417687keydownESCAPE223{'Title': ['1', 'Indent', 'and', 'Italic'], 'S...3
3283711576501894917keydownESCAPE433{'Title': ['Alignment', 'Center', 'and', 'Unde...0
3448821576501949469keydownESCAPE533{'Title': ['Size', 'Big', 'and', 'Underline'],...1
4786071576502364010keydownESCAPE643{'Title': ['Font', 'Family', 'Consolas', 'and'...6
5316421576502533703keydownESCAPE253{'Title': ['1', 'Indent', 'and', 'Italic'], 'S...3
5856771576502711019keydownESCAPE553{'Title': ['Size', 'Big', 'and', 'Underline'],...1
528621576508583224keydownESCAPE214{'Title': ['Bold', 'and', 'Underline'], 'Subti...2
761821576508676858keydownESCAPE314{'Title': ['1', 'Indent', 'and', 'Italic'], 'S...3
1147751576508829687keydownESCAPE514{'Title': ['Size', 'Big'], 'Subtitle': ['Bold'...5
1308201576508910256keydownESCAPE614{'Title': ['Font', 'Family', 'Consolas', 'and'...6
2087431576509229027keydownESCAPE324{'Title': ['1', 'Indent', 'and', 'Italic'], 'S...3
2243671576509290491keydownESCAPE424{'Title': ['Size', 'Big', 'and', 'Bold'], 'Sub...4
2717601576509510395keydownESCAPE034{'Title': ['Alignment', 'Center', 'and', 'Unde...0
2846621576509692400keydownESCAPE334{'Title': ['1', 'Indent', 'and', 'Italic'], 'S...3
2886001576509746972keydownESCAPE434{'Title': ['Size', 'Big', 'and', 'Bold'], 'Sub...4
2931061576509806032keydownESCAPE534{'Title': ['Size', 'Big'], 'Subtitle': ['Bold'...5
2990941576509876765keydownESCAPE634{'Title': ['Font', 'Family', 'Consolas', 'and'...6
3199721576510098343keydownESCAPE244{'Title': ['Bold', 'and', 'Underline'], 'Subti...2
3996721576510384286keydownESCAPE644{'Title': ['Font', 'Family', 'Consolas', 'and'...6
4735641576510632190keydownESCAPE354{'Title': ['1', 'Indent', 'and', 'Italic'], 'S...3
5112571576510763377keydownESCAPE554{'Title': ['Size', 'Big'], 'Subtitle': ['Bold'...5
2571602506765679keydownESCAPE015{'Title': ['Bold', 'and', 'Underline'], 'Subti...2
1045901603451016010keydownESCAPE5112{'Title': ['Size', 'Big'], 'Subtitle': ['Bold'...5
2461741603451554880keydownESCAPE5212{'Title': ['Size', 'Big'], 'Subtitle': ['Bold'...5
3840721603452064761keydownESCAPE5312{'Title': ['Size', 'Big'], 'Subtitle': ['Bold'...5
5043471603460189671keydownESCAPE5413{'Title': ['Bold', 'and', 'Underline'], 'Subti...2
5161491603889725164keydownESCAPE6415{'Title': ['Font', 'Family', 'Consolas', 'and'...6
2591891603895855723keydownESCAPE4216{'Title': ['Alignment', 'Center', 'and', 'Unde...0
\n", + "
" + ], + "text/plain": [ + " Timestamp Event Key TaskID Part PID \\\n", + "91 1575388456995 keydown ESCAPE 0 1 1 \n", + "115894 1575388801164 keydown ESCAPE 5 1 1 \n", + "135036 1575388888708 keydown ESCAPE 6 1 1 \n", + "303198 1575389414398 keydown ESCAPE 6 2 1 \n", + "371058 1575389612671 keydown ESCAPE 2 3 1 \n", + "388604 1575389680615 keydown ESCAPE 3 3 1 \n", + "434069 1575389811028 keydown ESCAPE 5 3 1 \n", + "148303 1575983493827 keydown ESCAPE 6 1 2 \n", + "209686 1575983764051 keydown ESCAPE 2 2 2 \n", + "235155 1575983852406 keydown ESCAPE 3 2 2 \n", + "261659 1575983923344 keydown ESCAPE 4 2 2 \n", + "318427 1575984168751 keydown ESCAPE 0 3 2 \n", + "339470 1575984281075 keydown ESCAPE 1 3 2 \n", + "372164 1575984373613 keydown ESCAPE 2 3 2 \n", + "396779 1575984444888 keydown ESCAPE 3 3 2 \n", + "418812 1575984507738 keydown ESCAPE 4 3 2 \n", + "456387 1575984630149 keydown ESCAPE 6 3 2 \n", + "522249 1575984892276 keydown ESCAPE 2 4 2 \n", + "614390 1575985243792 keydown ESCAPE 0 5 2 \n", + "650474 1575985354507 keydown ESCAPE 2 5 2 \n", + "666261 1575985448687 keydown ESCAPE 3 5 2 \n", + "694249 1575985497666 keydown ESCAPE 4 5 2 \n", + "708279 1575985549757 keydown ESCAPE 5 5 2 \n", + "87 1576500820142 keydown ESCAPE 0 1 3 \n", + "18630 1576500895888 keydown ESCAPE 1 1 3 \n", + "117934 1576501236105 keydown ESCAPE 6 1 3 \n", + "137768 1576501303236 keydown ESCAPE 0 2 3 \n", + "174630 1576501417687 keydown ESCAPE 2 2 3 \n", + "328371 1576501894917 keydown ESCAPE 4 3 3 \n", + "344882 1576501949469 keydown ESCAPE 5 3 3 \n", + "478607 1576502364010 keydown ESCAPE 6 4 3 \n", + "531642 1576502533703 keydown ESCAPE 2 5 3 \n", + "585677 1576502711019 keydown ESCAPE 5 5 3 \n", + "52862 1576508583224 keydown ESCAPE 2 1 4 \n", + "76182 1576508676858 keydown ESCAPE 3 1 4 \n", + "114775 1576508829687 keydown ESCAPE 5 1 4 \n", + "130820 1576508910256 keydown ESCAPE 6 1 4 \n", + "208743 1576509229027 keydown ESCAPE 3 2 4 \n", + "224367 1576509290491 keydown ESCAPE 4 2 4 \n", + "271760 1576509510395 keydown ESCAPE 0 3 4 \n", + "284662 1576509692400 keydown ESCAPE 3 3 4 \n", + "288600 1576509746972 keydown ESCAPE 4 3 4 \n", + "293106 1576509806032 keydown ESCAPE 5 3 4 \n", + "299094 1576509876765 keydown ESCAPE 6 3 4 \n", + "319972 1576510098343 keydown ESCAPE 2 4 4 \n", + "399672 1576510384286 keydown ESCAPE 6 4 4 \n", + "473564 1576510632190 keydown ESCAPE 3 5 4 \n", + "511257 1576510763377 keydown ESCAPE 5 5 4 \n", + "257 1602506765679 keydown ESCAPE 0 1 5 \n", + "104590 1603451016010 keydown ESCAPE 5 1 12 \n", + "246174 1603451554880 keydown ESCAPE 5 2 12 \n", + "384072 1603452064761 keydown ESCAPE 5 3 12 \n", + "504347 1603460189671 keydown ESCAPE 5 4 13 \n", + "516149 1603889725164 keydown ESCAPE 6 4 15 \n", + "259189 1603895855723 keydown ESCAPE 4 2 16 \n", + "\n", + " TextRule Rule \n", + "91 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3 \n", + "115894 {'Title': ['Size', 'Big', 'and', 'Underline'],... 1 \n", + "135036 {'Title': ['Font', 'Family', 'Consolas', 'and'... 6 \n", + "303198 {'Title': ['Font', 'Family', 'Consolas', 'and'... 6 \n", + "371058 {'Title': ['Alignment', 'Center', 'and', 'Unde... 0 \n", + "388604 {'Title': ['Size', 'Big'], 'Subtitle': ['Bold'... 5 \n", + "434069 {'Title': ['Size', 'Big', 'and', 'Underline'],... 1 \n", + "148303 {'Title': ['Bold', 'and', 'Underline'], 'Subti... 2 \n", + "209686 {'Title': ['Font', 'Family', 'Consolas', 'and'... 6 \n", + "235155 {'Title': ['Size', 'Big', 'and', 'Underline'],... 1 \n", + "261659 {'Title': ['Alignment', 'Center', 'and', 'Unde... 0 \n", + "318427 {'Title': ['Size', 'Big'], 'Subtitle': ['Bold'... 5 \n", + "339470 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3 \n", + "372164 {'Title': ['Font', 'Family', 'Consolas', 'and'... 6 \n", + "396779 {'Title': ['Size', 'Big', 'and', 'Underline'],... 1 \n", + "418812 {'Title': ['Alignment', 'Center', 'and', 'Unde... 0 \n", + "456387 {'Title': ['Bold', 'and', 'Underline'], 'Subti... 2 \n", + "522249 {'Title': ['Font', 'Family', 'Consolas', 'and'... 6 \n", + "614390 {'Title': ['Size', 'Big'], 'Subtitle': ['Bold'... 5 \n", + "650474 {'Title': ['Font', 'Family', 'Consolas', 'and'... 6 \n", + "666261 {'Title': ['Size', 'Big', 'and', 'Underline'],... 1 \n", + "694249 {'Title': ['Alignment', 'Center', 'and', 'Unde... 0 \n", + "708279 {'Title': ['Size', 'Big', 'and', 'Bold'], 'Sub... 4 \n", + "87 {'Title': ['Size', 'Big', 'and', 'Bold'], 'Sub... 4 \n", + "18630 {'Title': ['Size', 'Big'], 'Subtitle': ['Bold'... 5 \n", + "117934 {'Title': ['Font', 'Family', 'Consolas', 'and'... 6 \n", + "137768 {'Title': ['Size', 'Big', 'and', 'Bold'], 'Sub... 4 \n", + "174630 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3 \n", + "328371 {'Title': ['Alignment', 'Center', 'and', 'Unde... 0 \n", + "344882 {'Title': ['Size', 'Big', 'and', 'Underline'],... 1 \n", + "478607 {'Title': ['Font', 'Family', 'Consolas', 'and'... 6 \n", + "531642 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3 \n", + "585677 {'Title': ['Size', 'Big', 'and', 'Underline'],... 1 \n", + "52862 {'Title': ['Bold', 'and', 'Underline'], 'Subti... 2 \n", + "76182 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3 \n", + "114775 {'Title': ['Size', 'Big'], 'Subtitle': ['Bold'... 5 \n", + "130820 {'Title': ['Font', 'Family', 'Consolas', 'and'... 6 \n", + "208743 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3 \n", + "224367 {'Title': ['Size', 'Big', 'and', 'Bold'], 'Sub... 4 \n", + "271760 {'Title': ['Alignment', 'Center', 'and', 'Unde... 0 \n", + "284662 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3 \n", + "288600 {'Title': ['Size', 'Big', 'and', 'Bold'], 'Sub... 4 \n", + "293106 {'Title': ['Size', 'Big'], 'Subtitle': ['Bold'... 5 \n", + "299094 {'Title': ['Font', 'Family', 'Consolas', 'and'... 6 \n", + "319972 {'Title': ['Bold', 'and', 'Underline'], 'Subti... 2 \n", + "399672 {'Title': ['Font', 'Family', 'Consolas', 'and'... 6 \n", + "473564 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3 \n", + "511257 {'Title': ['Size', 'Big'], 'Subtitle': ['Bold'... 5 \n", + "257 {'Title': ['Bold', 'and', 'Underline'], 'Subti... 2 \n", + "104590 {'Title': ['Size', 'Big'], 'Subtitle': ['Bold'... 5 \n", + "246174 {'Title': ['Size', 'Big'], 'Subtitle': ['Bold'... 5 \n", + "384072 {'Title': ['Size', 'Big'], 'Subtitle': ['Bold'... 5 \n", + "504347 {'Title': ['Bold', 'and', 'Underline'], 'Subti... 2 \n", + "516149 {'Title': ['Font', 'Family', 'Consolas', 'and'... 6 \n", + "259189 {'Title': ['Alignment', 'Center', 'and', 'Unde... 0 " + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# All events for 'ESCAPE' are keydown !!!!!!\n", + "df_key[df_key.Key=='ESCAPE']" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "a2934358", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ENTER keydown pre: 12480\n", + "ENTER keydown post: 12480\n", + "ENTER keyup pre: 12480\n", + "ENTER keyup post: 12480\n", + "TAB keydown pre: 2738\n", + "TAB keydown post: 2738\n", + "TAB keyup pre: 2738\n", + "TAB keyup post: 2738\n", + "CONTROL keydown pre: 8256\n", + "CONTROL keydown post: 5716\n", + "CONTROL keyup pre: 5716\n", + "CONTROL keyup post: 5712\n", + "I keydown pre: 1770\n", + "I keydown post: 1770\n", + "I keyup pre: 1770\n", + "I keyup post: 1770\n", + "Z keydown pre: 210\n", + "Z keydown post: 210\n", + "Z keyup pre: 210\n", + "Z keyup post: 210\n", + "CAPSLOCK keydown pre: 20\n", + "CAPSLOCK keydown post: 20\n", + "CAPSLOCK keyup pre: 20\n", + "CAPSLOCK keyup post: 20\n", + "SHIFT keydown pre: 394\n", + "SHIFT keydown post: 222\n", + "SHIFT keyup pre: 222\n", + "SHIFT keyup post: 222\n", + "ARROWRIGHT keydown pre: 642\n", + "ARROWRIGHT keydown post: 616\n", + "ARROWRIGHT keyup pre: 616\n", + "ARROWRIGHT keyup post: 616\n", + "ESCAPE keydown pre: 55\n", + "ESCAPE keydown post: 1\n", + "ESCAPE keyup pre: 1\n", + "ESCAPE keyup post: 1\n", + "B keydown pre: 1982\n", + "B keydown post: 1982\n", + "B keyup pre: 1982\n", + "B keyup post: 1982\n", + "U keydown pre: 1722\n", + "U keydown post: 1722\n", + "U keyup pre: 1722\n", + "U keyup post: 1722\n", + "ARROWLEFT keydown pre: 158\n", + "ARROWLEFT keydown post: 158\n", + "ARROWLEFT keyup pre: 158\n", + "ARROWLEFT keyup post: 158\n", + "UNIDENTIFIED keydown pre: 11\n", + "UNIDENTIFIED keydown post: 10\n", + "UNIDENTIFIED keyup pre: 10\n", + "UNIDENTIFIED keyup post: 10\n", + "Q keydown pre: 4\n", + "Q keydown post: 4\n", + "Q keyup pre: 4\n", + "Q keyup post: 4\n", + "BACKSPACE keydown pre: 3400\n", + "BACKSPACE keydown post: 3400\n", + "BACKSPACE keyup pre: 3400\n", + "BACKSPACE keyup post: 3400\n", + "HOME keydown pre: 30\n", + "HOME keydown post: 30\n", + "HOME keyup pre: 30\n", + "HOME keyup post: 30\n", + "DELETE keydown pre: 104\n", + "DELETE keydown post: 104\n", + "DELETE keyup pre: 104\n", + "DELETE keyup post: 104\n", + "ARROWUP keydown pre: 83\n", + "ARROWUP keydown post: 72\n", + "ARROWUP keyup pre: 72\n", + "ARROWUP keyup post: 72\n", + "C keydown pre: 6\n", + "C keydown post: 6\n", + "C keyup pre: 6\n", + "C keyup post: 6\n", + "+ keydown pre: 2\n", + "+ keydown post: 2\n", + "+ keyup pre: 2\n", + "+ keyup post: 2\n", + "ARROWDOWN keydown pre: 110\n", + "ARROWDOWN keydown post: 110\n", + "ARROWDOWN keyup pre: 110\n", + "ARROWDOWN keyup post: 110\n", + "END keydown pre: 8\n", + "END keydown post: 8\n", + "END keyup pre: 8\n", + "END keyup post: 8\n", + "A keydown pre: 20\n", + "A keydown post: 20\n", + "A keyup pre: 20\n", + "A keyup post: 20\n", + "N keydown pre: 4\n", + "N keydown post: 4\n", + "N keyup pre: 4\n", + "N keyup post: 4\n", + "V keydown pre: 18\n", + "V keydown post: 18\n", + "V keyup pre: 18\n", + "V keyup post: 18\n", + " keydown pre: 9\n", + " keydown post: 8\n", + " keyup pre: 8\n", + " keyup post: 8\n", + ". keydown pre: 32\n", + ". keydown post: 32\n", + ". keyup pre: 32\n", + ". keyup post: 32\n", + "S keydown pre: 10\n", + "S keydown post: 10\n", + "S keyup pre: 10\n", + "S keyup post: 10\n", + "ALT keydown pre: 29\n", + "ALT keydown post: 2\n", + "ALT keyup pre: 2\n", + "ALT keyup post: 2\n", + "D keydown pre: 8\n", + "D keydown post: 8\n", + "D keyup pre: 8\n", + "D keyup post: 8\n", + "E keydown pre: 2\n", + "E keydown post: 2\n", + "E keyup pre: 2\n", + "E keyup post: 2\n", + "DEAD keydown pre: 14\n", + "DEAD keydown post: 14\n", + "DEAD keyup pre: 14\n", + "DEAD keyup post: 14\n", + "H keydown pre: 4\n", + "H keydown post: 4\n", + "H keyup pre: 4\n", + "H keyup post: 4\n", + "Y keydown pre: 64\n", + "Y keydown post: 64\n", + "Y keyup pre: 64\n", + "Y keyup post: 64\n", + "; keydown pre: 2\n", + "; keydown post: 2\n", + "; keyup pre: 2\n", + "; keyup post: 2\n", + "Ö keydown pre: 4\n", + "Ö keydown post: 4\n", + "Ö keyup pre: 4\n", + "Ö keyup post: 4\n", + "7 keydown pre: 2\n", + "7 keydown post: 2\n", + "7 keyup pre: 2\n", + "7 keyup post: 2\n" + ] + } + ], + "source": [ + "# Consecutive multiple key up and down events -> only keep the first one\n", + "def detect_duplicate(row, dup, direction):\n", + " v = 1 if (row.Key == dup) & (row.Event == direction) else 0\n", + " return v\n", + "for dup in df_key.Key.unique():\n", + " for direction in [\"keydown\", \"keyup\"]:\n", + " df_key[\"Error_detection\"] = 0\n", + " df_key[\"Error_detection_id\"] = 0\n", + " # Mark all samples with dup as the value of key\n", + " df_key.loc[df_key.Key == dup, \"Error_detection\"] = df_key.loc[df_key.Key == dup].apply(lambda row: detect_duplicate(row, dup, direction), axis=1)\n", + " # Mark all samples after the first one in a consecutive segment\n", + " df_key.loc[df_key.Key == dup, \"Error_detection_id\"] = df_key.loc[df_key.Key == dup, \"Error_detection\"].diff()\n", + " # And remove them\n", + " print(dup , direction, \"pre: \", len(df_key.loc[df_key.Key == dup]))\n", + " df_key = df_key[~((df_key.Key == dup) & (df_key.Event == direction) & ( df_key.Error_detection_id == 0))]\n", + " print(dup , direction, \"post: \", len(df_key.loc[df_key.Key == dup]))" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "20699e03", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ENTER down: 6240\n", + "ENTER up: 6240\n", + "ENTER\n", + "TAB down: 1369\n", + "TAB up: 1369\n", + "TAB\n", + "CONTROL down: 2856\n", + "CONTROL up: 2856\n", + "CONTROL\n", + "I down: 885\n", + "I up: 885\n", + "I\n", + "Z down: 105\n", + "Z up: 105\n", + "Z\n", + "CAPSLOCK down: 10\n", + "CAPSLOCK up: 10\n", + "CAPSLOCK\n", + "SHIFT down: 111\n", + "SHIFT up: 111\n", + "SHIFT\n", + "ARROWRIGHT down: 308\n", + "ARROWRIGHT up: 308\n", + "ARROWRIGHT\n", + "ESCAPE down: 1\n", + "ESCAPE up: 0\n", + "B down: 991\n", + "B up: 991\n", + "B\n", + "U down: 861\n", + "U up: 861\n", + "U\n", + "ARROWLEFT down: 79\n", + "ARROWLEFT up: 79\n", + "ARROWLEFT\n", + "UNIDENTIFIED down: 5\n", + "UNIDENTIFIED up: 5\n", + "UNIDENTIFIED\n", + "Q down: 2\n", + "Q up: 2\n", + "Q\n", + "BACKSPACE down: 1700\n", + "BACKSPACE up: 1700\n", + "BACKSPACE\n", + "HOME down: 15\n", + "HOME up: 15\n", + "HOME\n", + "DELETE down: 52\n", + "DELETE up: 52\n", + "DELETE\n", + "ARROWUP down: 36\n", + "ARROWUP up: 36\n", + "ARROWUP\n", + "C down: 3\n", + "C up: 3\n", + "C\n", + "+ down: 1\n", + "+ up: 1\n", + "+\n", + "ARROWDOWN down: 55\n", + "ARROWDOWN up: 55\n", + "ARROWDOWN\n", + "END down: 4\n", + "END up: 4\n", + "END\n", + "A down: 10\n", + "A up: 10\n", + "A\n", + "N down: 2\n", + "N up: 2\n", + "N\n", + "V down: 9\n", + "V up: 9\n", + "V\n", + " down: 4\n", + " up: 4\n", + " \n", + ". down: 16\n", + ". up: 16\n", + ".\n", + "S down: 5\n", + "S up: 5\n", + "S\n", + "ALT down: 1\n", + "ALT up: 1\n", + "ALT\n", + "D down: 4\n", + "D up: 4\n", + "D\n", + "E down: 1\n", + "E up: 1\n", + "E\n", + "DEAD down: 7\n", + "DEAD up: 7\n", + "DEAD\n", + "H down: 2\n", + "H up: 2\n", + "H\n", + "Y down: 32\n", + "Y up: 32\n", + "Y\n", + "; down: 1\n", + "; up: 1\n", + ";\n", + "Ö down: 2\n", + "Ö up: 2\n", + "Ö\n", + "7 down: 1\n", + "7 up: 1\n", + "7\n" + ] + } + ], + "source": [ + "df_key.reset_index(drop=True, inplace=True)\n", + "\n", + "for k in df_key.Key.unique():\n", + " k_downs = df_key[(df_key.Key == k) & (df_key.Event == \"keydown\")].index.tolist()\n", + " k_ups = df_key[(df_key.Key == k) & (df_key.Event == \"keyup\")].index.tolist()\n", + " print(k, \"down: \" ,len(k_downs))\n", + " print(k, \"up: \",len(k_ups))\n", + " if k!='ESCAPE':\n", + " print(k)\n", + " assert len(k_downs)==len(k_ups)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "62a1ca46", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True)\n", + "/tmp/ipykernel_625665/1936093847.py:17: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n", + " df_cmd = df_cmd.append(df_cmd_u)\n" + ] + } + ], + "source": [ + "# append ctrl+u to df_cmd as it is not registered\n", + "Ctrl_downs = df_key[(df_key.Key == \"CONTROL\") & (df_key.Event == \"keydown\")].index.tolist()\n", + "Ctrl_ups = df_key[(df_key.Key == \"CONTROL\") & (df_key.Event == \"keyup\")].index.tolist()\n", + "df_cmd_u = pd.DataFrame()\n", + "for s,e in zip(Ctrl_downs, Ctrl_ups):\n", + " tmp = df_key.loc[s:e]\n", + " tmp = tmp[(tmp.Event == \"keydown\") & (tmp.Key == \"U\")]\n", + " if (len(tmp) > 0):\n", + " tmp_dict = {\"Timestamp\": tmp.Timestamp.iloc[0], \n", + " \"Event\": \"Ctrl+U\", \n", + " \"TaskID\": tmp.TaskID.iloc[0], \n", + " \"Part\": tmp.Part.iloc[0], \n", + " \"PID\":tmp.PID.iloc[0], \n", + " \"TextRule\":tmp.TextRule.iloc[0], \n", + " \"Rule\":tmp.Rule.iloc[0]}\n", + " df_cmd_u = df_cmd_u.append(tmp_dict, ignore_index=True) \n", + "df_cmd = df_cmd.append(df_cmd_u)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "a84563ae", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
TimestampTaskIDPartPIDTextRuleRule
Event
Ctrl+7111111
Ctrl+A888888
Ctrl+B991991991991991991
Ctrl+C111111
Ctrl+H222222
Ctrl+I884884884884884884
Ctrl+N111111
Ctrl+S222222
Ctrl+Tab111111
Ctrl+U854854854854854854
Ctrl+V777777
Ctrl+Y323232323232
Ctrl+Z104104104104104104
Esc575757575757
Shift+Tab151515151515
Tab135313531353135313531353
\n", + "
" + ], + "text/plain": [ + " Timestamp TaskID Part PID TextRule Rule\n", + "Event \n", + "Ctrl+7 1 1 1 1 1 1\n", + "Ctrl+A 8 8 8 8 8 8\n", + "Ctrl+B 991 991 991 991 991 991\n", + "Ctrl+C 1 1 1 1 1 1\n", + "Ctrl+H 2 2 2 2 2 2\n", + "Ctrl+I 884 884 884 884 884 884\n", + "Ctrl+N 1 1 1 1 1 1\n", + "Ctrl+S 2 2 2 2 2 2\n", + "Ctrl+Tab 1 1 1 1 1 1\n", + "Ctrl+U 854 854 854 854 854 854\n", + "Ctrl+V 7 7 7 7 7 7\n", + "Ctrl+Y 32 32 32 32 32 32\n", + "Ctrl+Z 104 104 104 104 104 104\n", + "Esc 57 57 57 57 57 57\n", + "Shift+Tab 15 15 15 15 15 15\n", + "Tab 1353 1353 1353 1353 1353 1353" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "df_cmd.groupby([\"Event\"]).count()" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "ee86dcb7", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array(['Tab', 'Ctrl+I', 'Ctrl+Z', 'Esc', 'Ctrl+B', 'Ctrl+C', 'Shift+Tab',\n", + " 'Ctrl+A', 'Ctrl+N', 'Ctrl+V', 'Ctrl+H', 'Ctrl+Y', 'Ctrl+S',\n", + " 'Ctrl+Tab', 'Ctrl+7', 'Ctrl+U'], dtype=object)" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "df_cmd.Event.unique()" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "109e2046", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[' undefined',\n", + " 'Align centerAlign',\n", + " 'Align leftAlign',\n", + " 'Align rightAlign',\n", + " 'Bold (CTRL+B)Bold',\n", + " 'Font FamilyFont',\n", + " 'Font SizeFont',\n", + " 'Italic (CTRL+I)Italic',\n", + " 'Underline (CTRL+U)Underline']" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "df_toolbar = df_toolbar[~df_toolbar.Event.isna()]\n", + "toolbar_array = sorted(df_toolbar.Event.unique())\n", + "toolbar_array" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "5155e937", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|████████████████████████████████████████████| 3058/3058 [20:03<00:00, 2.55it/s]\n" + ] + } + ], + "source": [ + "#if not os.path.exists(\"../ReRun/df_mouse.pkl\"):\n", + "if not os.path.exists(\"df_mouse.pkl\"):\n", + " df_mouse[\"Keep\"] = 0\n", + " df_toolbar[\"X\"] = 0\n", + " df_toolbar[\"Y\"] = 0\n", + " # Font Family: all timestamps of event FamilyFont should have ONE corresponding (within 20ms) mouse click event\n", + " for time in tqdm(df_toolbar[df_toolbar.Event == \"Font FamilyFont\"].Timestamp.values):\n", + " assert len(df_mouse[(df_mouse.Event == \"up\") & (df_mouse.Type == \"click\") & (df_mouse.Timestamp <= time + 20) & (df_mouse.Timestamp >= time - 20)]) == 1 \n", + " df_toolbar.loc[(df_toolbar.Timestamp == time) & (df_toolbar.Event == \"Font FamilyFont\"), \"X\"] = df_mouse[(df_mouse.Event == \"up\") & (df_mouse.Type == \"click\") & (df_mouse.Timestamp <= time + 20) & (df_mouse.Timestamp >= time - 20)].X.iloc[0]\n", + " df_toolbar.loc[(df_toolbar.Timestamp == time) & (df_toolbar.Event == \"Font FamilyFont\"), \"Y\"] = df_mouse[(df_mouse.Event == \"up\") & (df_mouse.Type == \"click\") & (df_mouse.Timestamp <= time + 20) & (df_mouse.Timestamp >= time - 20)].Y.iloc[0]\n", + " df_mouse.loc[(df_mouse.Event == \"up\") & (df_mouse.Type == \"click\") & (df_mouse.Timestamp <= time + 20) & (df_mouse.Timestamp >= time - 20), \"Keep\"] = 1\n", + " #df_mouse.to_pickle(\"../ReRun/df_mouse.pkl\")\n", + " #df_toolbar.to_pickle(\"../ReRun/df_toolbar.pkl\")\n", + " df_mouse.to_pickle(\"df_mouse.pkl\")\n", + " df_toolbar.to_pickle(\"df_toolbar.pkl\")\n", + "else:\n", + " #df_mouse = pd.read_pickle(\"../ReRun/df_mouse.pkl\")\n", + " #df_toolbar = pd.read_pickle(\"../ReRun/df_toolbar.pkl\")\n", + " df_mouse = pd.read_pickle(\"df_mouse.pkl\")\n", + " df_toolbar = pd.read_pickle(\"df_toolbar.pkl\")" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "d97268bc", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"'found = False\\nindexes = []\\nfor i, l in enumerate(df_toolbar.Event.tolist() + ['ENDE']):\\n #print(i, l, found)\\n if l in 'Font FamilyFont':\\n ind = i\\n found = True\\n elif l not in 'Font FamilyFont' and found:\\n indexes.append(ind)\\n found = False\\n elif l in 'Font FamilyFont' and found:\\n indexes.append(i)\\nprint(indexes)\\nlen(indexes)\"" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "df_toolbar.reset_index(drop=True, inplace=True)\n", + "# Find the last \"Font FamilyFont\" in consecutive multiple \"Font FamilyFont\"s\n", + "''''found = False\n", + "indexes = []\n", + "for i, l in enumerate(df_toolbar.Event.tolist() + ['ENDE']):\n", + " #print(i, l, found)\n", + " if l in 'Font FamilyFont':\n", + " ind = i\n", + " found = True\n", + " elif l not in 'Font FamilyFont' and found:\n", + " indexes.append(ind)\n", + " found = False\n", + " elif l in 'Font FamilyFont' and found:\n", + " indexes.append(i)\n", + "print(indexes)\n", + "len(indexes)'''" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "id": "37af6fc7", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['Ctrl+7', 'Ctrl+A', 'Ctrl+B', 'Ctrl+C', 'Ctrl+H', 'Ctrl+I', 'Ctrl+N', 'Ctrl+S', 'Ctrl+Tab', 'Ctrl+U', 'Ctrl+V', 'Ctrl+Y', 'Ctrl+Z', 'Esc', 'Shift+Tab', 'Tab']\n", + "cmd_dict\n", + " {'Ctrl+7': 'bold', 'Ctrl+A': 'italic', 'Ctrl+B': 'underline', 'Ctrl+C': 'indent', 'Ctrl+H': 'indent'}\n", + "toolbar_dict\n", + " {' undefined': 'indent', 'Align centerAlign': 'align', 'Align leftAlign': 'align', 'Align rightAlign': 'align', 'Bold (CTRL+B)Bold': 'bold', 'Font FamilyFont': 'fontFamily', 'Font SizeFont': 'fontSize', 'Italic (CTRL+I)Italic': 'italic', 'Underline (CTRL+U)Underline': 'underline'}\n" + ] + } + ], + "source": [ + "cmd_array = sorted(df_cmd.Event.unique())\n", + "print(cmd_array)\n", + "#actions_array = [\"Italic\", \"Bold\", \"Underline\", \"Indent\", \"Align\", \"FontSize\", \"FontFamily\"]\n", + "actions_array = [\"italic\", \"bold\", \"underline\", \"indent\", \"align\", \"fontSize\", \"fontFamily\"]\n", + "toolbar_actions = [actions_array[3], actions_array[4], actions_array[4], actions_array[4], actions_array[1], actions_array[6], actions_array[5], actions_array[0], actions_array[2]]\n", + "cmd_actions = [actions_array[1], actions_array[0], actions_array[2], actions_array[3], actions_array[3]]\n", + "cmd_dict = dict(zip(cmd_array, cmd_actions))\n", + "toolbar_dict = dict(zip(toolbar_array, toolbar_actions))\n", + "print(\"cmd_dict\\n\",cmd_dict)\n", + "print(\"toolbar_dict\\n\",toolbar_dict)" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "69b2b9f5", + "metadata": {}, + "outputs": [ + { + "ename": "KeyError", + "evalue": "'Italic'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[24], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m df_toolbar\u001b[38;5;241m.\u001b[39mEvent \u001b[38;5;241m=\u001b[39m \u001b[43mdf_toolbar\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mEvent\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mapply\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43;01mlambda\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mx\u001b[49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mtoolbar_dict\u001b[49m\u001b[43m[\u001b[49m\u001b[43mx\u001b[49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/.local/lib/python3.8/site-packages/pandas/core/series.py:4433\u001b[0m, in \u001b[0;36mSeries.apply\u001b[0;34m(self, func, convert_dtype, args, **kwargs)\u001b[0m\n\u001b[1;32m 4323\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mapply\u001b[39m(\n\u001b[1;32m 4324\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 4325\u001b[0m func: AggFuncType,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 4328\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs,\n\u001b[1;32m 4329\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m DataFrame \u001b[38;5;241m|\u001b[39m Series:\n\u001b[1;32m 4330\u001b[0m \u001b[38;5;124m\"\"\"\u001b[39m\n\u001b[1;32m 4331\u001b[0m \u001b[38;5;124m Invoke function on values of Series.\u001b[39m\n\u001b[1;32m 4332\u001b[0m \n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 4431\u001b[0m \u001b[38;5;124m dtype: float64\u001b[39m\n\u001b[1;32m 4432\u001b[0m \u001b[38;5;124m \"\"\"\u001b[39m\n\u001b[0;32m-> 4433\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mSeriesApply\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mfunc\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mconvert_dtype\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mapply\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/.local/lib/python3.8/site-packages/pandas/core/apply.py:1088\u001b[0m, in \u001b[0;36mSeriesApply.apply\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 1084\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mf, \u001b[38;5;28mstr\u001b[39m):\n\u001b[1;32m 1085\u001b[0m \u001b[38;5;66m# if we are a string, try to dispatch\u001b[39m\n\u001b[1;32m 1086\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mapply_str()\n\u001b[0;32m-> 1088\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mapply_standard\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/.local/lib/python3.8/site-packages/pandas/core/apply.py:1143\u001b[0m, in \u001b[0;36mSeriesApply.apply_standard\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 1137\u001b[0m values \u001b[38;5;241m=\u001b[39m obj\u001b[38;5;241m.\u001b[39mastype(\u001b[38;5;28mobject\u001b[39m)\u001b[38;5;241m.\u001b[39m_values\n\u001b[1;32m 1138\u001b[0m \u001b[38;5;66m# error: Argument 2 to \"map_infer\" has incompatible type\u001b[39m\n\u001b[1;32m 1139\u001b[0m \u001b[38;5;66m# \"Union[Callable[..., Any], str, List[Union[Callable[..., Any], str]],\u001b[39m\n\u001b[1;32m 1140\u001b[0m \u001b[38;5;66m# Dict[Hashable, Union[Union[Callable[..., Any], str],\u001b[39m\n\u001b[1;32m 1141\u001b[0m \u001b[38;5;66m# List[Union[Callable[..., Any], str]]]]]\"; expected\u001b[39m\n\u001b[1;32m 1142\u001b[0m \u001b[38;5;66m# \"Callable[[Any], Any]\"\u001b[39m\n\u001b[0;32m-> 1143\u001b[0m mapped \u001b[38;5;241m=\u001b[39m \u001b[43mlib\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmap_infer\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1144\u001b[0m \u001b[43m \u001b[49m\u001b[43mvalues\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1145\u001b[0m \u001b[43m \u001b[49m\u001b[43mf\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43m# type: ignore[arg-type]\u001b[39;49m\n\u001b[1;32m 1146\u001b[0m \u001b[43m \u001b[49m\u001b[43mconvert\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mconvert_dtype\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1147\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1149\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mlen\u001b[39m(mapped) \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(mapped[\u001b[38;5;241m0\u001b[39m], ABCSeries):\n\u001b[1;32m 1150\u001b[0m \u001b[38;5;66m# GH#43986 Need to do list(mapped) in order to get treated as nested\u001b[39m\n\u001b[1;32m 1151\u001b[0m \u001b[38;5;66m# See also GH#25959 regarding EA support\u001b[39m\n\u001b[1;32m 1152\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m obj\u001b[38;5;241m.\u001b[39m_constructor_expanddim(\u001b[38;5;28mlist\u001b[39m(mapped), index\u001b[38;5;241m=\u001b[39mobj\u001b[38;5;241m.\u001b[39mindex)\n", + "File \u001b[0;32m~/.local/lib/python3.8/site-packages/pandas/_libs/lib.pyx:2870\u001b[0m, in \u001b[0;36mpandas._libs.lib.map_infer\u001b[0;34m()\u001b[0m\n", + "Cell \u001b[0;32mIn[24], line 1\u001b[0m, in \u001b[0;36m\u001b[0;34m(x)\u001b[0m\n\u001b[0;32m----> 1\u001b[0m df_toolbar\u001b[38;5;241m.\u001b[39mEvent \u001b[38;5;241m=\u001b[39m df_toolbar\u001b[38;5;241m.\u001b[39mEvent\u001b[38;5;241m.\u001b[39mapply(\u001b[38;5;28;01mlambda\u001b[39;00m x: \u001b[43mtoolbar_dict\u001b[49m\u001b[43m[\u001b[49m\u001b[43mx\u001b[49m\u001b[43m]\u001b[49m)\n", + "\u001b[0;31mKeyError\u001b[0m: 'Italic'" + ] + } + ], + "source": [ + "df_toolbar.Event = df_toolbar.Event.apply(lambda x: toolbar_dict[x], axis=1)" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "id": "5fc17226", + "metadata": {}, + "outputs": [ + { + "ename": "KeyError", + "evalue": "'Tab'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[25], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m df_cmd\u001b[38;5;241m.\u001b[39mEvent \u001b[38;5;241m=\u001b[39m \u001b[43mdf_cmd\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mEvent\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mapply\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43;01mlambda\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mx\u001b[49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mcmd_dict\u001b[49m\u001b[43m[\u001b[49m\u001b[43mx\u001b[49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/.local/lib/python3.8/site-packages/pandas/core/series.py:4433\u001b[0m, in \u001b[0;36mSeries.apply\u001b[0;34m(self, func, convert_dtype, args, **kwargs)\u001b[0m\n\u001b[1;32m 4323\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mapply\u001b[39m(\n\u001b[1;32m 4324\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 4325\u001b[0m func: AggFuncType,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 4328\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs,\n\u001b[1;32m 4329\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m DataFrame \u001b[38;5;241m|\u001b[39m Series:\n\u001b[1;32m 4330\u001b[0m \u001b[38;5;124m\"\"\"\u001b[39m\n\u001b[1;32m 4331\u001b[0m \u001b[38;5;124m Invoke function on values of Series.\u001b[39m\n\u001b[1;32m 4332\u001b[0m \n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 4431\u001b[0m \u001b[38;5;124m dtype: float64\u001b[39m\n\u001b[1;32m 4432\u001b[0m \u001b[38;5;124m \"\"\"\u001b[39m\n\u001b[0;32m-> 4433\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mSeriesApply\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mfunc\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mconvert_dtype\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mapply\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/.local/lib/python3.8/site-packages/pandas/core/apply.py:1088\u001b[0m, in \u001b[0;36mSeriesApply.apply\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 1084\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mf, \u001b[38;5;28mstr\u001b[39m):\n\u001b[1;32m 1085\u001b[0m \u001b[38;5;66m# if we are a string, try to dispatch\u001b[39m\n\u001b[1;32m 1086\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mapply_str()\n\u001b[0;32m-> 1088\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mapply_standard\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/.local/lib/python3.8/site-packages/pandas/core/apply.py:1143\u001b[0m, in \u001b[0;36mSeriesApply.apply_standard\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 1137\u001b[0m values \u001b[38;5;241m=\u001b[39m obj\u001b[38;5;241m.\u001b[39mastype(\u001b[38;5;28mobject\u001b[39m)\u001b[38;5;241m.\u001b[39m_values\n\u001b[1;32m 1138\u001b[0m \u001b[38;5;66m# error: Argument 2 to \"map_infer\" has incompatible type\u001b[39m\n\u001b[1;32m 1139\u001b[0m \u001b[38;5;66m# \"Union[Callable[..., Any], str, List[Union[Callable[..., Any], str]],\u001b[39m\n\u001b[1;32m 1140\u001b[0m \u001b[38;5;66m# Dict[Hashable, Union[Union[Callable[..., Any], str],\u001b[39m\n\u001b[1;32m 1141\u001b[0m \u001b[38;5;66m# List[Union[Callable[..., Any], str]]]]]\"; expected\u001b[39m\n\u001b[1;32m 1142\u001b[0m \u001b[38;5;66m# \"Callable[[Any], Any]\"\u001b[39m\n\u001b[0;32m-> 1143\u001b[0m mapped \u001b[38;5;241m=\u001b[39m \u001b[43mlib\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmap_infer\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1144\u001b[0m \u001b[43m \u001b[49m\u001b[43mvalues\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1145\u001b[0m \u001b[43m \u001b[49m\u001b[43mf\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43m# type: ignore[arg-type]\u001b[39;49m\n\u001b[1;32m 1146\u001b[0m \u001b[43m \u001b[49m\u001b[43mconvert\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mconvert_dtype\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1147\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1149\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mlen\u001b[39m(mapped) \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(mapped[\u001b[38;5;241m0\u001b[39m], ABCSeries):\n\u001b[1;32m 1150\u001b[0m \u001b[38;5;66m# GH#43986 Need to do list(mapped) in order to get treated as nested\u001b[39m\n\u001b[1;32m 1151\u001b[0m \u001b[38;5;66m# See also GH#25959 regarding EA support\u001b[39m\n\u001b[1;32m 1152\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m obj\u001b[38;5;241m.\u001b[39m_constructor_expanddim(\u001b[38;5;28mlist\u001b[39m(mapped), index\u001b[38;5;241m=\u001b[39mobj\u001b[38;5;241m.\u001b[39mindex)\n", + "File \u001b[0;32m~/.local/lib/python3.8/site-packages/pandas/_libs/lib.pyx:2870\u001b[0m, in \u001b[0;36mpandas._libs.lib.map_infer\u001b[0;34m()\u001b[0m\n", + "Cell \u001b[0;32mIn[25], line 1\u001b[0m, in \u001b[0;36m\u001b[0;34m(x)\u001b[0m\n\u001b[0;32m----> 1\u001b[0m df_cmd\u001b[38;5;241m.\u001b[39mEvent \u001b[38;5;241m=\u001b[39m df_cmd\u001b[38;5;241m.\u001b[39mEvent\u001b[38;5;241m.\u001b[39mapply(\u001b[38;5;28;01mlambda\u001b[39;00m x: \u001b[43mcmd_dict\u001b[49m\u001b[43m[\u001b[49m\u001b[43mx\u001b[49m\u001b[43m]\u001b[49m)\n", + "\u001b[0;31mKeyError\u001b[0m: 'Tab'" + ] + } + ], + "source": [ + "df_cmd.Event = df_cmd.Event.apply(lambda x: cmd_dict[x], axis=1)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3506f41b", + "metadata": {}, + "outputs": [], + "source": [ + "df_toolbar[\"Type\"] = \"Toolbar\"\n", + "df_cmd[\"Type\"] = \"Cmd\"\n", + "df_actions = pd.concat([df_toolbar, df_cmd])\n", + "df_actions = df_actions.sort_values(by=\"Timestamp\")\n", + "df_actions.head()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c286b2f0", + "metadata": {}, + "outputs": [], + "source": [ + "actions_array_num = np.arange(1,len(actions_array)+1)\n", + "action_dict = dict(zip(actions_array,actions_array_num))\n", + "action_dict" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c74aae24", + "metadata": {}, + "outputs": [], + "source": [ + "df_actions.Event = df_actions.Event.apply(lambda x: action_dict[x])\n", + "#df_actions.reset_index(drop=True).to_pickle(study_data_path + \"../ReRun/clean_data.pkl\")\n", + "df_actions.reset_index(drop=True).to_pickle(\"clean_data.pkl\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8fc2f38e", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.10" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/keyboard_and_mouse/dataset/.ipynb_checkpoints/02-RandomForest-checkpoint.ipynb b/keyboard_and_mouse/dataset/.ipynb_checkpoints/02-RandomForest-checkpoint.ipynb new file mode 100644 index 0000000..175a493 --- /dev/null +++ b/keyboard_and_mouse/dataset/.ipynb_checkpoints/02-RandomForest-checkpoint.ipynb @@ -0,0 +1,852 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "f5cb2ecf", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2021-09-28 16:10:28.497166: I tensorflow/stream_executor/platform/default/dso_loader.cc:49] Successfully opened dynamic library libcudart.so.10.1\n" + ] + } + ], + "source": [ + "import pandas as pd\n", + "import numpy as np\n", + "import os, pdb\n", + "from sklearn.model_selection import GridSearchCV \n", + "from sklearn.ensemble import RandomForestClassifier\n", + "from sklearn.metrics import accuracy_score\n", + "from tensorflow import keras\n", + "from keras.preprocessing.sequence import pad_sequences" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "459ad77b", + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "study_data_path = \"../IntentData/\"\n", + "data = pd.read_pickle(study_data_path + \"/Preprocessing_data/clean_data.pkl\")\n", + "Task_IDs = np.arange(7).tolist()\n", + "StartIndexOffset = 0 #if set to 5 ignore first 5 elements\n", + "EndIndexOffset = 0 #if set to 5 ignore last 5 elements" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "4ab8c0cc", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array(['Cmd', 'Toolbar'], dtype=object)" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "data.Rule.unique()\n", + "data.columns\n", + "data.Type.unique()" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "05550387", + "metadata": {}, + "outputs": [], + "source": [ + "# grouping by part is needed to have one ruleset for the whole part\n", + "# Participant [1,16]\n", + "# Repeat for 5 times [1,5]\n", + "# ???????? [0,6]\n", + "g = data.groupby([\"PID\", \"Part\", \"TaskID\"])" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "7819da48", + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "param_grid = {'n_estimators': [10,50,100], \n", + " 'max_depth': [10,20,30]}\n", + "\n", + "grid = GridSearchCV(RandomForestClassifier(), param_grid, refit = True, verbose = 0, return_train_score=True) " + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "e64a6920", + "metadata": {}, + "outputs": [], + "source": [ + "def createTrainTest(test_IDs, task_IDs, start_index_offset, end_index_offset, shapes=False):\n", + " assert isinstance(test_IDs, list)\n", + " assert isinstance(task_IDs, list)\n", + " # Fill data arrays\n", + " y_train = []\n", + " x_train = []\n", + " y_test = []\n", + " x_test = []\n", + " for current in g.groups.keys():\n", + " c = g.get_group(current)\n", + " if (c.TaskID.isin(task_IDs).all()):\n", + " new_rule = c.Rule.unique()[0]\n", + " if end_index_offset == 0:\n", + " new_data = c.Event.values[start_index_offset:]\n", + " else:\n", + " new_data = c.Event.values[start_index_offset:-end_index_offset]\n", + " if (c.PID.isin(test_IDs).all()):\n", + " y_test.append(new_rule)\n", + " x_test.append(new_data)\n", + " else:\n", + " y_train.append(new_rule)\n", + " x_train.append(new_data)\n", + " x_train = np.array(x_train)\n", + " y_train = np.array(y_train)\n", + " x_test = np.array(x_test)\n", + " y_test = np.array(y_test)\n", + " print('x_train\\n',x_train)\n", + " print('y_train\\n',y_train)\n", + " print('x_test\\n',x_test)\n", + " print('y_test\\n',y_test)\n", + " pdb.set_trace()\n", + " if (shapes):\n", + " print(x_train.shape)\n", + " print(y_train.shape)\n", + " print(x_test.shape)\n", + " print(y_test.shape)\n", + " print(np.unique(y_test))\n", + " print(np.unique(y_train))\n", + " return (x_train, y_train, x_test, y_test)\n", + "\n", + "def runSVMS(train_test, maxlen=None, plots=False, last_elements=False):\n", + " x_train, y_train, x_test, y_test = train_test\n", + " # Get maxlen to pad and pad\n", + " if (maxlen==None):\n", + " maxlen = 0\n", + " for d in np.concatenate((x_train,x_test)):\n", + " if len(d) > maxlen:\n", + " maxlen = len(d)\n", + " \n", + " truncating_elements = \"post\"\n", + " if last_elements:\n", + " truncating_elements = \"pre\"\n", + "\n", + " x_train = keras.preprocessing.sequence.pad_sequences(x_train, maxlen=maxlen, dtype='int32', padding='post', truncating=truncating_elements, value=0)\n", + " x_test = keras.preprocessing.sequence.pad_sequences(x_test, maxlen=maxlen, dtype='int32', padding='post', truncating=truncating_elements, value=0)\n", + "\n", + " # fitting the model for grid search \n", + " grid.fit(x_train, y_train) \n", + "\n", + " # print how our model looks after hyper-parameter tuning\n", + " if (plots==True):\n", + " print(grid.best_estimator_) \n", + "\n", + " # Predict with best SVM\n", + " pred = grid.predict(x_test)\n", + "\n", + " return accuracy_score(pred, y_test), pred, y_test " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "50dac7db", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/tmp/ipykernel_97850/1264473745.py:23: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray.\n", + " x_train = np.array(x_train)\n", + "/tmp/ipykernel_97850/1264473745.py:25: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray.\n", + " x_test = np.array(x_test)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "x_train\n", + " [array([2, 7, 7, 7, 7, 7, 7, 2, 6, 6, 6, 2, 2, 2])\n", + " array([4, 1, 4, 1, 1, 1, 1, 1, 7, 7, 7, 7, 7, 7])\n", + " array([5, 7, 5, 7, 5, 7, 7, 7, 7, 1, 1, 1, 1, 1, 1])\n", + " array([3, 3, 3, 3, 3, 3, 6, 6, 5, 5, 5, 5, 5, 5, 5])\n", + " array([5, 3, 5, 3, 3, 5, 3, 5, 3, 3, 4, 4, 4, 4, 4, 4])\n", + " array([2, 6, 2, 6, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1])\n", + " array([2, 3, 2, 3, 2, 3, 2, 2, 2, 4, 4, 4, 4, 4, 4])\n", + " array([6, 6, 6, 2, 2, 2, 7, 7, 7, 7, 7, 7, 7])\n", + " array([1, 4, 4, 7, 4, 1, 1, 4, 4, 4, 7, 7, 7, 7, 7])\n", + " array([7, 5, 7, 1, 5, 7, 7, 5, 7, 7, 1, 1, 1, 1, 1])\n", + " array([3, 6, 3, 5, 3, 6, 3, 6, 3, 6, 3, 5, 5, 5, 5, 5])\n", + " array([3, 5, 3, 4, 3, 5, 3, 3, 3, 4, 4, 4, 4, 4])\n", + " array([2, 6, 2, 1, 2, 6, 2, 6, 2, 2, 1, 1, 1, 1, 1])\n", + " array([2, 3, 3, 4, 2, 3, 3, 3, 3, 4, 4, 4, 4, 4])\n", + " array([6, 2, 7, 6, 6, 2, 2, 7, 7, 7, 7, 7])\n", + " array([4, 1, 4, 7, 4, 1, 4, 1, 7, 7, 7, 7, 7, 4, 4])\n", + " array([5, 7, 7, 1, 5, 7, 7, 7, 7, 1, 1, 1, 1, 1])\n", + " array([3, 6, 3, 5, 3, 6, 3, 3, 3, 5, 5, 5, 5, 5])\n", + " array([5, 3, 3, 4, 3, 5, 3, 5, 3, 5, 3, 4, 4, 4, 4, 4])\n", + " array([2, 6, 2, 1, 2, 6, 2, 6, 2, 2, 1, 1, 1, 1, 1])\n", + " array([2, 3, 2, 4, 2, 3, 2, 3, 2, 2, 4, 4, 4, 4, 4])\n", + " array([6, 2, 7, 6, 6, 6, 2, 2, 7, 2, 7, 7, 7, 7, 7])\n", + " array([4, 1, 4, 7, 4, 1, 4, 4, 4, 7, 7, 7, 7, 7])\n", + " array([5, 7, 7, 1, 5, 7, 5, 7, 7, 7, 1, 1, 1, 1, 1])\n", + " array([3, 6, 3, 5, 2, 2, 6, 3, 3, 6, 3, 3, 4, 4, 4, 4, 4, 4])\n", + " array([5, 3, 3, 4, 3, 5, 3, 5, 3, 3, 3, 4, 4, 4, 4, 4])\n", + " array([2, 6, 2, 1, 1, 1, 2, 6, 2, 6, 2, 2, 1, 1, 1, 1, 1])\n", + " array([2, 3, 2, 4, 2, 3, 2, 3, 2, 2, 3, 4, 4, 4, 4, 4, 4])\n", + " array([6, 2, 7, 6, 6, 2, 2, 7, 7, 7, 7, 7])\n", + " array([4, 1, 4, 7, 4, 1, 4, 4, 4, 7, 7, 7, 7, 7])\n", + " array([7, 5, 7, 1, 7, 5, 7, 7, 7, 1, 1, 1, 1, 1])\n", + " array([3, 6, 3, 5, 3, 6, 6, 3, 6, 3, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 3])\n", + " array([3, 5, 3, 4, 3, 5, 3, 5, 3, 3, 4, 4, 4, 4, 4])\n", + " array([2, 6, 2, 1, 2, 6, 2, 6, 2, 2, 1, 1, 1, 1, 1])\n", + " array([2, 3, 2, 4, 2, 3, 2, 3, 2, 2, 4, 4, 4, 4, 4])\n", + " array([2, 6, 2, 2, 6, 2, 2, 6, 2, 1, 1, 1, 1, 1, 1])\n", + " array([6, 2, 2, 2, 2, 6, 7, 7, 7, 7, 7, 7])\n", + " array([1, 4, 4, 7, 1, 4, 1, 4, 4, 4, 7, 7, 7, 7, 7])\n", + " array([2, 3, 2, 4, 2, 3, 2, 2, 2, 3, 4, 4, 4, 4, 4])\n", + " array([3, 5, 3, 4, 3, 3, 5, 3, 5, 3, 5, 4, 4, 4, 4, 4])\n", + " array([3, 6, 3, 5, 3, 6, 3, 3, 3, 6, 5, 5, 5, 5, 5])\n", + " array([5, 7, 7, 1, 7, 7, 7, 7, 5, 1, 1, 1, 1, 1])\n", + " array([2, 6, 2, 1, 2, 6, 2, 6, 2, 2, 6, 1, 1, 1, 1, 1, 1, 1])\n", + " array([6, 2, 7, 2, 2, 2, 6, 7, 7, 7, 7, 7])\n", + " array([4, 1, 4, 7, 1, 4, 1, 4, 4, 4, 7, 7, 7, 7, 7])\n", + " array([3, 2, 2, 4, 2, 2, 3, 2, 2, 3, 4, 4, 4, 4, 4])\n", + " array([3, 5, 3, 4, 3, 3, 5, 3, 5, 3, 4, 4, 4, 4, 4])\n", + " array([3, 6, 3, 5, 3, 3, 3, 6, 3, 6, 5, 5, 5, 5, 5])\n", + " array([7, 5, 5, 5, 7, 1, 7, 7, 5, 7, 5, 7, 1, 1, 1, 1, 1, 1])\n", + " array([2, 6, 2, 1, 2, 2, 2, 6, 2, 1, 1, 1, 1, 1])\n", + " array([6, 2, 7, 2, 2, 6, 6, 7, 7, 7, 7, 7])\n", + " array([1, 4, 4, 7, 1, 4, 4, 4, 4, 7, 7, 7, 7, 7])\n", + " array([2, 3, 2, 4, 2, 3, 2, 2, 2, 4, 4, 4, 4, 4])\n", + " array([3, 5, 3, 4, 3, 3, 3, 3, 5, 5, 4, 4, 4, 4, 4])\n", + " array([3, 6, 3, 5, 3, 6, 3, 3, 6, 3, 5, 5, 5, 5, 5])\n", + " array([5, 7, 7, 1, 5, 7, 7, 7, 5, 7, 5, 1, 1, 1, 1, 1])\n", + " array([2, 6, 2, 1, 2, 2, 2, 6, 2, 2, 6, 1, 1, 1, 1, 1])\n", + " array([6, 2, 7, 2, 2, 2, 2, 6, 6, 7, 7, 7, 7, 7])\n", + " array([1, 4, 4, 7, 4, 4, 4, 4, 1, 1, 7, 7, 7, 7, 7])\n", + " array([2, 3, 2, 4, 2, 3, 2, 2, 2, 4, 4, 4, 4, 4])\n", + " array([3, 5, 3, 4, 3, 5, 3, 3, 3, 3, 3, 5, 3, 5, 4, 4, 4, 4, 4])\n", + " array([3, 6, 3, 5, 3, 3, 6, 3, 3, 5, 5, 5, 5, 5])\n", + " array([7, 5, 7, 1, 7, 7, 7, 5, 7, 5, 1, 1, 1, 1, 1])\n", + " array([2, 6, 2, 1, 2, 2, 2, 2, 6, 6, 6, 1, 1, 1, 1, 1])\n", + " array([6, 2, 7, 2, 2, 6, 6, 7, 7, 7, 7, 7])\n", + " array([1, 4, 4, 7, 1, 4, 1, 4, 4, 4, 7, 7, 7, 7, 7])\n", + " array([2, 3, 2, 4, 2, 2, 3, 2, 2, 3, 4, 4, 4, 4, 4])\n", + " array([3, 5, 3, 4, 3, 5, 3, 3, 3, 5, 3, 4, 4, 4, 4, 4, 4, 4, 4])\n", + " array([3, 6, 3, 5, 3, 6, 3, 3, 6, 3, 5, 5, 5, 5, 5])\n", + " array([7, 5, 7, 1, 7, 7, 5, 5, 7, 7, 1, 1, 1, 1, 1])\n", + " array([3, 5, 3, 4, 4, 4, 3, 5, 3, 5, 5, 3, 3])\n", + " array([6, 3, 5, 5, 5, 5, 5, 5, 6, 3, 6, 3, 3, 3, 3])\n", + " array([2, 2, 3, 2, 2, 2, 3, 3, 2, 2, 2, 3, 2, 2, 2, 3])\n", + " array([1, 7, 1, 7, 7, 7, 7, 7])\n", + " array([2, 6, 2, 1, 1, 1, 6, 2, 1, 2, 6, 1, 2, 1])\n", + " array([6, 2, 7, 2, 2, 6, 6, 7, 7, 7, 7, 7])\n", + " array([7, 5, 7, 1, 7, 7, 1, 1, 1, 1, 1, 1, 5, 7, 5, 7])\n", + " array([3, 3, 5, 3, 3, 5, 3, 3, 3, 3, 5])\n", + " array([2, 3, 3, 5, 3, 3, 3, 3, 2]) array([2, 3, 2, 2, 2, 2, 2, 3])\n", + " array([1, 7, 1, 1, 7, 7, 7, 7, 7])\n", + " array([2, 6, 2, 1, 2, 6, 2, 6, 2, 2, 1, 1, 1, 1, 1])\n", + " array([6, 2, 6, 7, 7, 2, 2, 2, 6, 7, 7, 7, 7, 7])\n", + " array([7, 5, 7, 1, 5, 7, 7, 5, 7, 7, 1, 1, 1, 1, 1])\n", + " array([5, 3, 3, 3, 5, 3, 5, 3, 5, 3])\n", + " array([6, 3, 3, 5, 6, 3, 3, 6, 3, 6, 3, 5, 5, 5, 5, 5])\n", + " array([2, 3, 2, 2, 2, 3, 2, 3, 2]) array([1, 7, 1, 1, 7, 7, 7, 7, 7])\n", + " array([2, 6, 2, 1, 2, 2, 2, 2, 6, 1, 1, 1, 1, 1])\n", + " array([6, 2, 7, 7, 7, 2, 2, 7, 6, 7, 7, 2])\n", + " array([5, 7, 7, 1, 1, 7, 5, 7, 1, 1, 7, 1, 7, 1])\n", + " array([3, 5, 3, 3, 5, 3, 5, 3, 5, 3])\n", + " array([3, 6, 3, 5, 3, 6, 6, 3, 3, 6, 3, 6, 5, 5, 5, 5, 5, 6, 6, 6])\n", + " array([2, 2, 3, 2, 2, 3, 2, 2, 3]) array([1, 7, 7, 1, 7, 7, 7, 7, 1])\n", + " array([2, 6, 2, 1, 2, 6, 2, 6, 2, 6, 2, 1, 1, 1, 1, 1])\n", + " array([6, 2, 7, 6, 6, 2, 2, 2, 2, 7, 7, 4, 7, 7, 7])\n", + " array([7, 7, 5, 1, 7, 5, 7, 5, 7, 7, 1, 1, 1, 1, 1, 7])\n", + " array([3, 5, 3, 5, 3, 3, 5, 3, 3])\n", + " array([3, 6, 3, 5, 3, 3, 3, 6, 3, 5, 5, 5, 5, 5])\n", + " array([2, 3, 2, 2, 2, 2, 2, 3]) array([1, 7, 1, 1, 7, 7, 7, 7, 7])\n", + " array([2, 6, 2, 1, 1, 1, 1, 1, 1, 1, 2, 6, 6, 2, 2, 6, 2])\n", + " array([6, 2, 7, 6, 6, 6, 6, 2, 7, 7, 7, 7, 7])\n", + " array([7, 5, 7, 1, 1, 1, 1, 1, 1, 7, 7, 5, 7, 5, 7, 5])\n", + " array([2, 3, 2, 4, 2, 3, 2, 3, 2, 2, 4, 4, 4, 4, 4])\n", + " array([4, 1, 4, 4, 4, 4, 4, 7, 4, 1, 4, 1, 4, 1, 4, 7, 7, 7, 7, 7])\n", + " array([7, 5, 7, 1, 1, 1, 1, 1, 1, 5, 7, 5, 7, 7, 7])\n", + " array([6, 2, 2, 1, 1, 2, 2, 1, 1, 1, 1, 2, 2, 6])\n", + " array([4, 5, 3, 3, 3, 3, 3, 3, 3, 5, 3, 4, 4, 4, 4, 4])\n", + " array([6, 6, 2, 7, 7, 7, 7, 7, 7, 6, 2, 2, 2])\n", + " array([6, 3, 3, 5, 5, 5, 5, 5, 5, 3, 6, 3, 3, 6, 3])\n", + " array([2, 3, 2, 4, 2, 3, 2, 2, 3, 2, 4, 4, 4, 4, 4])\n", + " array([4, 1, 4, 7, 4, 4, 4, 1, 4, 1, 7, 7, 7, 7, 7])\n", + " array([1, 5, 7, 7, 1, 7, 7, 7, 7, 5, 5])\n", + " array([6, 2, 2, 1, 1, 2, 1, 2, 6, 1, 2, 1, 2, 6, 1])\n", + " array([5, 4, 3, 4, 5, 3, 4, 3, 4, 3, 4, 5, 3, 4])\n", + " array([6, 2, 7, 7, 6, 7, 2, 6, 7, 7, 6, 7])\n", + " array([6, 3, 3, 5, 5, 1, 1, 3, 5, 6, 3, 5, 6, 3, 5, 6, 3])\n", + " array([2, 3, 2, 4, 2, 3, 2, 2, 2, 4, 4, 4, 4, 4])\n", + " array([4, 1, 4, 7, 7, 4, 1, 7, 4, 7, 4, 1, 4, 1, 7, 7])\n", + " array([7, 4, 5, 7, 1, 1, 7, 1, 7, 5, 5, 7, 2, 7, 7, 7])\n", + " array([6, 2, 2, 1, 2, 1, 2, 6, 6, 2, 6, 2, 1, 1, 1])\n", + " array([5, 3, 3, 4, 3, 4, 4, 3, 4, 5, 3, 4, 4, 3])\n", + " array([6, 2, 7, 7, 2, 7, 7, 6, 2, 2, 7, 7])\n", + " array([1, 3, 1, 6, 3, 3, 5, 3, 5, 5, 6, 3, 5, 3, 5, 5, 3])\n", + " array([2, 3, 4, 3, 4, 2, 3, 2, 3, 4, 4, 2, 3, 3, 4, 3, 3, 4, 3, 2, 3, 2])\n", + " array([4, 1, 4, 7, 7, 4, 1, 4, 7, 7, 4, 7, 4, 1, 7])\n", + " array([7, 5, 7, 1, 1, 7, 5, 1, 7, 7, 7, 1, 1, 1, 7, 5])\n", + " array([6, 2, 2, 1, 1, 1, 1, 1, 1, 2, 6, 2, 2, 2])\n", + " array([5, 3, 3, 4, 3, 5, 3, 5, 3, 3, 4, 4, 4, 4, 4])\n", + " array([6, 2, 7, 2, 2, 6, 6, 7, 7, 7, 7, 7])\n", + " array([6, 3, 3, 5, 3, 6, 6, 3, 3, 3, 5, 5, 5, 5, 5])\n", + " array([2, 3, 2, 4, 2, 2, 2, 3, 2, 3, 4, 4, 4, 4, 4])\n", + " array([4, 1, 4, 7, 7, 7, 7, 7, 7, 4, 1, 4, 4, 1, 4, 1])\n", + " array([7, 5, 7, 1, 1, 7, 5, 1, 7, 7, 5, 1, 1, 1, 7])\n", + " array([6, 2, 2, 1, 1, 2, 2, 1, 1, 2, 1, 2, 6, 1])\n", + " array([5, 3, 3, 4, 3, 3, 3, 3, 5, 4, 4, 4, 4, 4])\n", + " array([6, 2, 7, 2, 2, 6, 6, 7, 7, 7, 7, 7])\n", + " array([6, 3, 3, 5, 5, 3, 6, 3, 5, 5, 6, 3, 5, 5, 3, 5, 5])\n", + " array([6, 2, 7, 7, 7, 7, 7, 7, 6, 6, 2, 2])\n", + " array([5, 3, 4, 3, 3, 3, 3, 3, 5, 4, 4, 4, 4, 4])\n", + " array([5, 7, 1, 7, 1, 3, 1, 1, 3, 1, 1, 1, 1, 7, 7, 7, 7, 5, 5])\n", + " array([1, 7, 4, 4, 1, 1, 7, 7, 7, 7, 7, 4, 4, 4, 1, 4])\n", + " array([6, 2, 1, 2, 2, 2, 2, 2, 6, 6])\n", + " array([6, 3, 5, 5, 6, 6, 6, 6, 6, 3, 3, 5, 5, 5, 5, 5])\n", + " array([2, 3, 2, 4, 2, 2, 2, 2, 3, 4, 4, 4, 4, 4])\n", + " array([6, 2, 7, 2, 2, 2, 6, 7, 7, 7, 7, 7])\n", + " array([3, 5, 4, 3, 3, 3, 3, 3, 5, 3, 5, 4, 4, 4, 4, 4])\n", + " array([7, 5, 7, 1, 1, 1, 1, 1, 1, 7, 7, 7, 7, 5, 5, 5, 5])\n", + " array([1, 4, 7, 4, 7, 7, 7, 7, 7, 1, 1])\n", + " array([6, 2, 2, 1, 2, 2, 2, 2, 6, 6, 1, 1, 1, 1, 1])\n", + " array([6, 3, 3, 5, 3, 6, 6, 3, 3, 3, 6])\n", + " array([2, 3, 2, 4, 2, 3, 2, 2, 2, 4, 4, 4, 4, 4])\n", + " array([6, 7, 2, 2, 2, 6, 6, 7, 7, 7, 7, 7])\n", + " array([3, 5, 3, 4, 3, 3, 3, 3, 5, 5, 4, 4, 4, 4, 4])\n", + " array([7, 5, 7, 1, 7, 5, 7, 7, 7, 5, 1, 1, 1, 1, 1])\n", + " array([1, 4, 4, 7, 4, 4, 4, 4, 7, 7, 7, 7, 7, 1, 1, 1])\n", + " array([2, 6, 1, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 6, 6])\n", + " array([3, 6, 3, 5, 3, 3, 3, 3, 3, 5, 5, 5, 5, 5, 6])\n", + " array([2, 3, 2, 4, 2, 3, 2, 2, 3, 2, 4, 4, 4, 4, 4])\n", + " array([2, 2, 6, 2, 7, 6, 2, 2, 6, 2, 2, 7, 7, 7, 7, 7, 7])\n", + " array([5, 3, 3, 4, 3, 3, 5, 5, 3, 3, 5, 4, 4, 4, 4, 4])\n", + " array([7, 5, 1, 7, 2, 2, 3, 7, 7, 7, 7, 5, 1, 1, 1, 1, 1])\n", + " array([1, 4, 4, 7, 1, 4, 7, 7, 7, 7, 7, 4, 1, 1, 4, 4])\n", + " array([6, 2, 2, 1, 2, 2, 2, 2, 6, 6, 1, 1, 1, 1, 1])\n", + " array([3, 6, 5, 3, 3, 3, 3, 3, 6, 5, 5, 5, 5, 5])\n", + " array([2, 3, 2, 4, 2, 3, 2, 2, 3, 2, 3, 4, 4, 4, 4, 4])\n", + " array([6, 2, 7, 2, 2, 2, 6, 7, 7, 7, 7, 7])\n", + " array([5, 3, 3, 4, 3, 3, 3, 3, 5, 5, 4, 4, 4, 4, 4])\n", + " array([7, 5, 7, 1, 1, 1, 1, 1, 1, 5, 5, 7, 4, 7, 7, 7])\n", + " array([4, 1, 4, 7, 7, 7, 7, 7, 7, 4, 1, 4, 1, 4, 4])\n", + " array([6, 2, 2, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 6, 6])\n", + " array([6, 3, 3, 5, 3, 3, 3, 3, 6, 5, 5, 5, 5, 5])\n", + " array([2, 3, 2, 4, 2, 3, 2, 2, 3, 2, 4, 4, 4, 4, 4])\n", + " array([3, 5, 4, 3, 3, 5, 5, 3, 3, 5, 3, 5, 4, 4, 4, 4, 4, 4])\n", + " array([2, 3, 2, 2, 3, 2, 3, 2, 2, 3, 4, 4, 4, 4, 4, 4])\n", + " array([2, 3, 3, 2, 3, 3, 2, 3, 3])\n", + " array([2, 6, 2, 2, 2, 6, 2, 6, 2, 1, 1, 1, 1, 1, 1])\n", + " array([1, 4, 4, 1, 4, 1, 4, 4, 4, 7, 7, 7, 7, 7, 7])\n", + " array([7, 5, 1, 7, 1, 7, 5, 1, 7, 1, 7, 1, 7, 5, 1])\n", + " array([2, 2, 2, 2, 7, 7, 7, 7, 7, 7, 2, 2, 7, 2, 6, 2, 6, 6, 2, 2, 6])\n", + " array([5, 3, 5, 3, 3, 3, 5, 3, 3, 5, 4, 4, 4, 4, 4, 4])\n", + " array([3, 2, 4, 3, 3, 2, 4, 2, 4, 4, 2, 2, 3, 4, 4, 2, 3, 4])\n", + " array([2, 3, 3, 3, 4, 2, 3, 4, 3, 4, 2, 3, 4])\n", + " array([6, 2, 1, 2, 1, 2, 6, 1, 2, 6, 1, 2, 1, 2, 6, 1])\n", + " array([4, 1, 7, 4, 1, 1, 7, 1, 4, 7, 4, 7, 4, 7, 1, 4, 7])\n", + " array([7, 5, 1, 7, 1, 7, 7, 1, 7, 5, 1, 7, 5, 1, 7, 1])\n", + " array([6, 7, 2, 7, 2, 7, 6, 7, 2, 2, 2, 2, 7, 6])\n", + " array([3, 5, 4, 3, 4, 4, 3, 5, 4, 3, 5, 4, 3, 4, 3, 4])\n", + " array([2, 3, 4, 2, 4, 2, 3, 4, 2, 4, 2, 4, 2, 3, 4])\n", + " array([6, 3, 3, 4, 3, 4, 6, 3, 4, 3, 4, 4, 3])\n", + " array([2, 6, 1, 2, 1, 2, 6, 1, 2, 1, 2, 6, 1, 2, 1, 2])\n", + " array([1, 4, 7, 4, 7, 1, 4, 7, 1, 4, 7, 4, 7, 7, 1, 4])\n", + " array([7, 4, 5, 1, 7, 7, 1, 7, 7, 1, 7, 5, 1, 7, 1, 7, 1])\n", + " array([6, 7, 2, 7, 6, 7, 2, 7, 2, 7, 6, 7])\n", + " array([3, 5, 3, 3, 3, 3, 3, 5, 4, 4, 4, 4, 4, 4])\n", + " array([2, 3, 2, 2, 2, 2, 3, 4, 4, 4, 4, 2, 4, 4])\n", + " array([2, 3, 3, 2, 3, 2, 3, 2, 3, 3, 4, 4, 2, 6, 2, 6, 2, 6, 2, 6])\n", + " array([2, 6, 1, 2, 1, 2, 1, 2, 2, 6, 1, 1, 1, 1, 2, 1])\n", + " array([4, 1, 7, 1, 7, 1, 7, 1, 4, 7, 1, 4, 7, 1, 7, 7])\n", + " array([7, 5, 1, 7, 1, 7, 5, 1, 7, 1, 7, 5, 1, 7, 5, 1])\n", + " array([6, 7, 2, 7, 6, 7, 6, 7, 6, 7, 2, 7])\n", + " array([3, 5, 3, 3, 3, 5, 3, 3, 4, 4, 4, 4, 4, 4])\n", + " array([2, 3, 2, 2, 2, 3, 2, 2, 3, 4, 4, 4, 4, 4, 4])\n", + " array([6, 3, 3, 5, 5, 5, 5, 3, 5, 3, 5, 3, 6, 5, 3, 6, 5])\n", + " array([2, 6, 1, 2, 1, 2, 6, 1, 2, 1, 2, 2, 2, 1, 2, 1])\n", + " array([1, 4, 7, 4, 7, 1, 4, 7, 4, 7, 1, 4, 7, 4, 7])\n", + " array([7, 5, 1, 7, 1, 7, 5, 1, 7, 1, 7, 1, 7, 5])\n", + " array([6, 6, 7, 2, 7, 6, 7, 6, 7, 6, 7, 2, 7])\n", + " array([2, 3, 2, 6, 3, 5, 5, 3, 6, 3, 6, 3, 6, 3, 5, 5, 5, 5])\n", + " array([3, 5, 3, 4, 3, 5, 3, 3, 3, 4, 4, 4, 4, 4])\n", + " array([2, 6, 2, 1, 1, 1, 1, 1, 1, 2, 2, 6, 2, 6, 2])\n", + " array([2, 3, 3, 4, 2, 3, 3, 3, 2, 2, 2, 4, 4, 4, 4, 4, 2, 3, 3, 2, 3, 2])\n", + " array([4, 4, 1, 7, 4, 4, 4, 4, 1, 1, 1, 7, 7, 7, 7, 7])\n", + " array([6, 2, 7, 2, 2, 2, 6, 7, 7, 7, 7, 7])\n", + " array([7, 5, 7, 1, 7, 7, 7, 5, 7, 5, 1, 1, 1, 1, 1])\n", + " array([6, 3, 3, 5, 3, 3, 6, 3, 3, 6, 5, 5, 5, 5, 5])\n", + " array([5, 3, 3, 4, 4, 4, 4, 4, 4, 3, 5, 3, 3, 3, 5])\n", + " array([6, 2, 2, 1, 2, 6, 2, 2, 6, 6, 2, 1, 1, 1, 1, 1])\n", + " array([2, 3, 2, 4, 2, 3, 2, 2, 3, 2, 3, 4, 4, 4, 4, 4])\n", + " array([4, 1, 4, 7, 4, 1, 1, 4, 4, 4, 7, 7, 7, 7, 7])\n", + " array([6, 2, 7, 2, 6, 6, 2, 7, 7, 7, 7, 7])\n", + " array([7, 5, 7, 1, 7, 7, 7, 5, 7, 5, 1, 1, 1, 1, 1])\n", + " array([2, 3, 3, 5, 2, 2, 3, 6, 3, 3, 2, 6, 6, 3, 3, 3, 6, 5, 5, 5, 5, 5])\n", + " array([5, 3, 3, 4, 3, 5, 3, 5, 3, 5, 3, 4, 4, 4, 4, 4])\n", + " array([2, 6, 2, 1, 2, 2, 2, 6, 2, 1, 1, 1, 1, 1])\n", + " array([2, 2, 2, 3, 2, 4, 2, 3, 2, 2, 2, 3, 2, 4, 4, 4, 4, 4])\n", + " array([4, 1, 4, 7, 4, 4, 4, 4, 1, 7, 7, 7, 7, 7, 1])\n", + " array([6, 2, 7, 6, 6, 2, 2, 7, 7, 7, 7, 7])\n", + " array([7, 5, 7, 1, 1, 7, 1, 7, 5, 1, 7, 1, 7, 1])\n", + " array([6, 3, 3, 5, 3, 6, 5, 5, 3, 6, 5, 3, 5, 3, 5])\n", + " array([3, 5, 3, 4, 3, 3, 5, 3, 3, 5, 4, 4, 4, 4, 4])\n", + " array([2, 6, 2, 1, 2, 2, 6, 2, 6, 2, 2, 6, 3, 3, 1, 1, 1, 1, 1])\n", + " array([2, 3, 2, 4, 2, 3, 4, 4, 2, 3, 4, 2, 3, 3, 4, 2, 3, 4])\n", + " array([3, 3, 1, 4, 4, 7, 4, 4, 4, 4, 1, 7, 7, 7, 7, 7])\n", + " array([6, 2, 7, 2, 2, 7, 7, 6, 6, 7, 7, 7, 7, 7])\n", + " array([7, 5, 7, 1, 7, 7, 5, 5, 5, 7, 7, 5, 1, 1, 1, 1, 1])\n", + " array([3, 6, 3, 5, 5, 3, 5, 5, 3, 6, 5, 3, 5, 3, 6, 5])\n", + " array([5, 3, 3, 3, 4, 4, 3, 5, 4, 3, 4, 3, 5, 4])\n", + " array([6, 2, 2, 1, 2, 2, 6, 2, 6, 2, 1, 1, 1, 1, 1])\n", + " array([2, 3, 2, 4, 2, 2, 3, 2, 3, 2, 4, 4, 4, 4, 4])\n", + " array([1, 4, 4, 7, 1, 4, 1, 4, 1, 4, 4, 7, 7, 7, 7, 7])\n", + " array([6, 2, 7, 7, 2, 7, 2, 7, 6, 7, 6, 7, 7, 6])\n", + " array([7, 5, 7, 1, 7, 1, 1, 7, 1, 7, 5, 1, 7, 5, 1])\n", + " array([7, 5, 1, 7, 2, 2, 5, 5, 5, 5, 5, 2, 2, 2, 2, 1, 5, 5, 5, 5, 2, 7,\n", + " 2, 7, 1, 1, 1, 1])\n", + " array([2, 3, 2, 4, 4, 2, 2, 2, 2, 2, 3, 3, 4, 4, 4, 4, 2])\n", + " array([2, 6, 2, 1, 2, 2, 2, 2, 6, 6, 1, 1, 1, 1, 1])\n", + " array([3, 5, 4, 3, 4, 3, 5, 4, 3, 3, 4, 3, 5, 4, 4, 3, 3, 5, 4])\n", + " array([2, 6, 7, 2, 7, 2, 2, 2, 6, 7, 2, 6, 7, 2, 7, 2, 6, 7])\n", + " array([2, 3, 5, 3, 3, 5, 2, 3, 5, 2, 3, 3, 5, 3, 4, 5, 2, 3, 5])\n", + " array([1, 4, 7, 4, 7, 4, 1, 7, 4, 7, 1, 4, 7, 4, 7])\n", + " array([5, 7, 1, 7, 1, 5, 7, 1, 7, 5, 1, 7, 1, 7, 1])\n", + " array([2, 3, 1, 2, 1, 2, 1, 2, 1, 3, 2, 3, 1])\n", + " array([2, 6, 1, 6, 1, 6, 1, 2, 6, 1, 2, 6, 1, 6, 1])\n", + " array([5, 3, 4, 3, 3, 3, 4, 3, 4, 3, 4, 4, 3, 5, 4, 3, 5, 4])\n", + " array([6, 7, 2, 7, 2, 7, 6, 7, 6, 7, 2, 7])\n", + " array([2, 3, 5, 3, 5, 3, 5, 3, 5, 6, 3, 2, 6, 5, 5, 3])\n", + " array([1, 4, 7, 4, 7, 4, 1, 7, 1, 4, 7, 4, 7, 4, 7])\n", + " array([5, 7, 1, 7, 1, 5, 7, 1, 7, 5, 1, 7, 1, 1, 5, 7])\n", + " array([3, 2, 4, 2, 4, 2, 4, 2, 4, 2, 4, 2, 3, 4])\n", + " array([2, 6, 1, 2, 1, 2, 6, 1, 2, 6, 1, 2, 6, 1, 2, 1])\n", + " array([3, 5, 4, 5, 4, 5, 4, 5, 4, 5, 3, 4, 4, 5])\n", + " array([7, 2, 6, 7, 7, 2, 2, 6, 7, 2, 7, 2, 7, 6, 7])\n", + " array([6, 3, 5, 3, 5, 6, 5, 5, 3, 5, 3, 5, 5, 5, 3, 3, 5, 3, 5])\n", + " array([1, 4, 7, 4, 7, 4, 1, 7, 4, 1, 7, 4, 7, 4, 1, 7])\n", + " array([5, 7, 1, 7, 1, 7, 1, 5, 7, 1, 5, 7, 1, 7, 1])\n", + " array([3, 2, 4, 2, 4, 4, 2, 4, 2, 3, 4, 2, 3, 4, 4, 2, 3])\n", + " array([2, 6, 1, 2, 1, 2, 6, 1, 2, 6, 1, 2, 1, 2, 1])\n", + " array([5, 3, 5, 5, 4, 3, 4, 3, 4, 3, 5, 4, 3, 5, 4, 4, 3, 5])\n", + " array([3, 6, 3, 7, 2, 7, 6, 7, 2, 7, 6, 2, 6, 7])\n", + " array([6, 3, 5, 3, 5, 6, 3, 5, 5, 3, 5, 6, 3, 5, 5, 3, 6])\n", + " array([1, 4, 7, 4, 7, 4, 1, 7, 4, 1, 7, 4, 7, 4, 7])\n", + " array([5, 7, 1, 7, 1, 7, 1, 7, 7, 1, 5, 7, 1, 5, 7, 1])\n", + " array([3, 2, 4, 2, 4, 3, 2, 4, 2, 4, 3, 2, 4, 2, 4])\n", + " array([2, 6, 1, 2, 1, 2, 6, 1, 2, 6, 1, 2, 1, 2, 6, 1])\n", + " array([5, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 5, 4])\n", + " array([6, 7, 2, 7, 2, 6, 2, 6, 2, 2, 5, 7, 6, 7, 2, 7, 2, 7])\n", + " array([6, 3, 5, 3, 5, 3, 6, 5, 5, 3, 5, 3, 6, 5, 6, 3, 5])\n", + " array([1, 4, 7, 4, 7, 4, 7, 4, 7, 4, 7, 1, 4, 7])\n", + " array([7, 5, 5, 7, 5, 7, 7, 7, 7, 1, 1, 1, 1, 1, 1])\n", + " array([5, 5, 5, 3, 3, 3, 3, 3, 3, 3])\n", + " array([7, 7, 7, 7, 7, 7, 7, 6, 6, 6, 6, 2, 2])\n", + " array([2, 3, 2, 3, 2, 3, 2, 2, 2])\n", + " array([1, 1, 1, 4, 1, 4, 4, 4, 7, 7, 7, 7, 7, 7])\n", + " array([2, 6, 2, 6, 2, 6, 2, 6, 2, 2, 1, 1, 1, 1, 1, 1])\n", + " array([3, 6, 3, 6, 3, 6, 3, 3, 3, 5, 5, 5, 5, 5, 5])\n", + " array([7, 5, 7, 5, 7, 7, 7, 7, 1, 1, 1, 1, 1, 1, 1])\n", + " array([5, 3, 5, 3, 3, 3, 3, 3, 4, 4, 4, 4])\n", + " array([6, 6, 2, 2, 2, 2, 7, 7, 7, 7, 7, 7])\n", + " array([2, 3, 2, 3, 2, 3, 2, 3, 2, 2, 4, 4, 4, 4, 4, 4])\n", + " array([1, 4, 1, 4, 1, 4, 4, 4, 4, 7, 7, 7, 7, 7, 7])\n", + " array([6, 2, 6, 2, 6, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1])\n", + " array([6, 3, 6, 3, 6, 3, 3, 3, 3, 5, 5, 5, 5, 5, 5])\n", + " array([7, 5, 7, 5, 7, 7, 7, 7, 1, 1, 1, 1, 1, 1])\n", + " array([5, 3, 5, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4])\n", + " array([6, 6, 6, 6, 2, 2, 2, 7, 7, 7, 7, 7, 7])\n", + " array([2, 3, 2, 3, 2, 3, 2, 2, 2, 4, 4, 4, 4, 4, 4])\n", + " array([1, 1, 1, 4, 4, 4, 4, 4, 4, 7, 7, 7, 7, 7, 7])\n", + " array([6, 2, 2, 6, 2, 6, 2, 6, 2, 2, 1, 1, 1, 1, 1, 1])\n", + " array([3, 6, 3, 6, 3, 3, 3, 3, 5, 5, 5, 5, 5, 5])\n", + " array([5, 7, 5, 7, 7, 7, 7, 7, 1, 1, 1, 1, 1, 1])\n", + " array([5, 3, 3, 5, 3, 5, 3, 3, 3, 4, 4, 4, 4, 4, 4])\n", + " array([6, 6, 6, 6, 2, 2, 7, 7, 7, 7, 7, 7, 7])\n", + " array([2, 3, 3, 2, 2, 2, 2, 2, 4, 4, 4, 4, 4, 4])\n", + " array([1, 1, 1, 4, 4, 4, 4, 4, 4, 7, 7, 7, 7, 7, 7])\n", + " array([2, 2, 2, 2, 2, 2, 6, 6, 6, 6, 1, 1, 1, 1, 1, 1])\n", + " array([3, 3, 3, 3, 3, 3, 6, 6, 7, 5, 5, 5, 5, 5, 5])\n", + " array([7, 5, 7, 5, 7, 7, 7, 7, 1, 1, 1, 1, 1, 1])\n", + " array([5, 3, 3, 3, 5, 3, 3, 5, 3, 5, 4, 4, 4, 4, 4, 4])\n", + " array([6, 6, 6, 6, 2, 2, 7, 7, 7, 7, 7, 7])\n", + " array([2, 3, 2, 2, 3, 2, 2, 3, 2, 4, 4, 4, 4, 4, 4])\n", + " array([4, 4, 4, 4, 4, 4, 1, 1, 7, 7, 7, 7, 7, 7])\n", + " array([2, 2, 2, 2, 2, 2, 6, 6, 6, 6, 1, 1, 1, 1, 1, 1])\n", + " array([5, 5, 5, 5, 5, 5, 3, 3, 3, 3, 3, 3, 6, 6, 6, 6])\n", + " array([7, 5, 7, 1, 1, 7, 7, 7, 7, 5, 1, 1, 1, 1])\n", + " array([2, 6, 1, 2, 1, 2, 1, 2, 6, 2, 6, 1, 1, 1, 2])\n", + " array([6, 2, 7, 7, 2, 6, 2, 2, 7, 7, 7, 7, 2, 6])\n", + " array([5, 3, 3, 4, 4, 3, 4, 3, 3, 4, 3, 5, 4, 3, 5, 4])\n", + " array([2, 3, 2, 4, 2, 3, 2, 3, 2, 2, 4, 4, 4, 4, 4])\n", + " array([3, 6, 3, 5, 3, 3, 6, 3, 6, 3, 6, 5, 5, 5, 5, 5])\n", + " array([4, 1, 4, 7, 7, 4, 7, 4, 7, 1, 4, 7, 4, 6, 6, 7, 6])\n", + " array([7, 5, 7, 1, 1, 7, 5, 1, 7, 1, 7, 5, 1, 7, 1, 5])\n", + " array([2, 6, 2, 1, 1, 2, 1, 2, 1, 2, 6, 1, 2, 6, 1])\n", + " array([6, 2, 7, 7, 6, 7, 2, 7, 6, 7, 2, 7])\n", + " array([5, 3, 3, 4, 4, 3, 5, 3, 3, 5, 3, 4, 4, 4, 4])\n", + " array([2, 3, 1, 1, 2, 4, 4, 2, 4, 2, 3, 2, 3, 4, 4, 2, 4])\n", + " array([3, 6, 3, 5, 5, 3, 6, 5, 3, 5, 6, 3, 5, 3, 5])\n", + " array([1, 4, 4, 7, 7, 1, 4, 5, 5, 7, 4, 7, 4, 7, 1, 4, 7])\n", + " array([7, 5, 7, 1, 7, 5, 1, 7, 5, 1, 7, 5, 1, 7, 1, 7, 1])\n", + " array([2, 6, 2, 1, 1, 2, 6, 2, 1, 1, 2, 1, 2, 1])\n", + " array([6, 2, 7, 7, 6, 2, 7, 6, 7, 2, 7, 6, 7])\n", + " array([5, 3, 3, 4, 4, 3, 4, 3, 5, 3, 5, 4, 4, 3, 4])\n", + " array([2, 3, 2, 4, 4, 2, 3, 4, 2, 4, 2, 3, 4, 2, 3, 3, 4])\n", + " array([6, 3, 3, 5, 5, 3, 3, 6, 3, 3, 5, 5, 5, 5])\n", + " array([4, 1, 4, 7, 7, 4, 7, 4, 7, 4, 7, 1, 4, 7])\n", + " array([7, 5, 7, 1, 7, 5, 1, 7, 1, 5, 7, 1, 7, 1, 1, 5, 1, 7, 1])\n", + " array([2, 6, 2, 1, 1, 2, 1, 2, 1, 2, 6, 1, 2, 1])\n", + " array([6, 2, 7, 7, 6, 7, 2, 7, 6, 7, 2, 7])\n", + " array([5, 3, 3, 4, 5, 3, 4, 5, 3, 4, 3, 4, 3, 4, 3, 5, 4])\n", + " array([2, 3, 2, 4, 4, 2, 3, 4, 2, 4, 2, 3, 4, 2, 4])\n", + " array([3, 6, 3, 5, 5, 3, 5, 3, 6, 5, 3, 6, 5, 3, 3, 5])\n", + " array([4, 1, 4, 7, 4, 7, 7, 4, 7, 4, 1, 1, 4, 7, 7])\n", + " array([7, 5, 7, 1, 1, 7, 1, 7, 5, 5, 1, 7, 1, 7, 5, 1])\n", + " array([2, 6, 2, 1, 1, 1, 1, 1, 1, 2, 6, 2, 2, 2, 6])\n", + " array([6, 2, 7, 7, 6, 7, 6, 7, 2, 7, 2, 7])\n", + " array([3, 5, 3, 4, 4, 5, 3, 4, 3, 4, 5, 3, 4, 3, 4])\n", + " array([2, 3, 2, 4, 4, 2, 4, 2, 4, 2, 3, 4, 2, 4])\n", + " array([6, 3, 3, 5, 5, 3, 6, 3, 5, 5, 3, 6, 5, 3, 6, 5])\n", + " array([1, 4, 4, 7, 7, 4, 1, 7, 4, 4, 4, 7, 7, 7, 1])\n", + " array([2, 3, 2, 2, 3, 2, 3, 2, 2])\n", + " array([7, 5, 7, 1, 1, 1, 1, 1, 1, 1, 7, 7, 7, 5, 7, 5])\n", + " array([2, 6, 1, 2, 1, 1, 2, 6, 2, 6, 2, 2, 1, 1, 1, 1, 1])\n", + " array([5, 3, 3, 3, 3, 3, 5, 5, 3, 4, 4, 4, 4])\n", + " array([3, 6, 3, 5, 3, 6, 3, 6, 3, 3, 5, 5, 5, 5, 5])\n", + " array([6, 2, 7, 2, 2, 2, 6, 7, 7, 7, 7, 7, 7])\n", + " array([1, 4, 4, 7, 7, 7, 7, 7, 7, 1, 4, 4, 4])\n", + " array([2, 3, 2, 4, 4, 4, 4, 4, 4, 2, 3, 2, 3, 2, 2])\n", + " array([1, 7, 5, 7, 7, 5, 7, 5, 7, 5, 7, 1, 1, 1, 1, 1])\n", + " array([2, 6, 2, 1, 2, 6, 2, 6, 2, 2, 1, 1, 1, 1, 1])\n", + " array([4, 3, 5, 3, 4, 3, 5, 5, 3, 3, 5, 3, 4, 4, 4, 4, 4])\n", + " array([6, 3, 3, 5, 3, 3, 3, 6, 3, 6, 5, 5, 5, 5, 5])\n", + " array([6, 2, 7, 2, 2, 2, 6, 7, 7, 7, 7, 7])\n", + " array([4, 4, 7, 3, 7, 3, 1, 1, 4, 4, 1, 4, 4, 7, 7, 7, 7])\n", + " array([2, 3, 2, 4, 2, 2, 2, 3, 3, 2, 3, 4, 4, 4, 4, 4])\n", + " array([7, 5, 7, 1, 7, 5, 7, 7, 5, 7, 1, 1, 1, 1, 1])\n", + " array([2, 6, 2, 1, 2, 6, 2, 6, 2, 6, 2, 1, 1, 1, 1, 1])\n", + " array([3, 3, 5, 4, 3, 5, 3, 5, 3, 3, 4, 4, 4, 4, 4])\n", + " array([5, 6, 3, 3, 3, 3, 3, 3, 5, 5, 5, 5, 5, 6, 6, 6])\n", + " array([6, 2, 7, 6, 6, 2, 2, 7, 7, 7, 7, 7])\n", + " array([4, 1, 4, 7, 4, 4, 4, 4, 1, 1, 7, 7, 7, 7, 7])\n", + " array([2, 3, 2, 4, 3, 2, 2, 2, 2, 4, 4, 4, 4, 4])\n", + " array([1, 7, 5, 7, 7, 7, 7, 7, 5, 1, 1, 1, 1, 1])\n", + " array([2, 6, 2, 1, 2, 6, 2, 6, 2, 2, 6, 1, 1, 1, 1, 1])\n", + " array([4, 3, 3, 5, 3, 3, 5, 3, 5, 3, 4, 4, 4, 4, 4])\n", + " array([3, 6, 3, 5, 3, 3, 3, 3, 6, 6, 5, 5, 5, 5, 5])\n", + " array([6, 2, 7, 6, 6, 6, 2, 7, 7, 7, 7, 7])\n", + " array([4, 4, 1, 7, 4, 4, 4, 4, 1, 1, 7, 7, 7, 7, 7])\n", + " array([4, 2, 3, 2, 2, 3, 2, 3, 2, 2, 4, 4, 4, 4, 4])\n", + " array([1, 7, 5, 7, 1, 1, 1, 1, 1, 7, 7, 7, 7, 5])\n", + " array([2, 6, 2, 1, 2, 2, 2, 2, 6, 1, 1, 1, 1, 1])\n", + " array([5, 3, 3, 4, 3, 5, 3, 5, 5, 3, 3, 4, 4, 4, 4, 4])\n", + " array([2, 3, 3, 5, 2, 3, 2, 6, 6, 3, 2, 6, 3, 3, 3, 6, 5, 5, 5, 5, 5])\n", + " array([6, 2, 7, 2, 2, 6, 6, 7, 7, 7, 7, 7])\n", + " array([4, 1, 4, 7, 4, 4, 4, 4, 1, 7, 7, 7, 7, 7, 4])\n", + " array([4, 4, 4, 4, 4, 4, 7, 7, 7, 7, 7, 7, 1, 1])\n", + " array([2, 6, 2, 2, 2, 6, 2, 6, 2, 6, 1, 1, 1, 1, 1, 1])\n", + " array([4, 4, 4, 4, 4, 4, 4, 5, 2, 5, 5, 5, 5, 5, 2, 2, 2])\n", + " array([6, 3, 3, 6, 3, 3, 3, 3, 5, 5, 5, 5, 5, 5])\n", + " array([7, 5, 7, 7, 7, 7, 7, 5, 1, 1, 1, 1, 1, 1])\n", + " array([2, 3, 2, 3, 2, 3, 2, 3, 2, 2, 4, 4, 4, 4, 4, 4])\n", + " array([6, 6, 6, 6, 2, 2, 4, 4, 4, 4, 4, 4, 7, 7, 7, 7, 7, 7])\n", + " array([4, 4, 4, 4, 4, 4, 1, 1, 1, 7, 7, 7, 7, 7, 7])\n", + " array([6, 2, 2, 6, 2, 2, 6, 2, 2, 1, 1, 1, 1, 1, 1])\n", + " array([5, 3, 4, 5, 5, 3, 3, 3, 5, 5, 5, 4, 4, 4, 4, 4, 4])\n", + " array([6, 3, 3, 3, 6, 3, 3, 3, 6, 5, 5, 5, 5, 5, 5])\n", + " array([7, 5, 7, 5, 7, 5, 7, 7, 7, 1, 1, 1, 1, 1, 1, 1, 1, 1])\n", + " array([2, 3, 2, 2, 2, 3, 2, 2, 4, 4, 4, 4, 4, 4])\n", + " array([6, 2, 6, 6, 2, 2, 7, 7, 7, 7, 7, 7])\n", + " array([1, 4, 4, 4, 4, 4, 4, 1, 7, 7, 7, 7, 7, 7])\n", + " array([2, 6, 2, 2, 6, 2, 2, 6, 2, 1, 1, 1, 1, 1, 1])\n", + " array([5, 3, 3, 3, 3, 3, 3, 5, 3, 5, 4, 4, 4, 4, 4, 4])\n", + " array([6, 3, 3, 3, 6, 3, 3, 3, 5, 5, 5, 5, 5, 5])\n", + " array([7, 5, 7, 7, 5, 7, 7, 5, 7, 1, 1, 1, 1, 1, 1])\n", + " array([2, 3, 2, 2, 3, 2, 3, 3, 2, 2, 2, 3, 3, 4, 4, 4, 4, 4, 4])\n", + " array([6, 2, 2, 6, 2, 2, 7, 7, 7, 7, 7, 7])\n", + " array([1, 4, 4, 1, 4, 4, 4, 1, 4, 7, 7, 7, 7, 7, 7])\n", + " array([2, 6, 2, 2, 2, 2, 6, 2, 6, 1, 1, 1, 1, 1, 1, 1])\n", + " array([5, 3, 3, 3, 5, 3, 3, 3, 5, 4, 4, 4, 4, 4, 4])\n", + " array([6, 3, 3, 3, 6, 3, 3, 3, 6, 5, 5, 5, 5, 5, 5])\n", + " array([7, 5, 7, 7, 7, 7, 5, 7, 5, 1, 1, 1, 1, 1, 1])\n", + " array([2, 3, 2, 2, 3, 2, 2, 3, 2, 4, 4, 4, 4, 4, 4])\n", + " array([6, 7, 2, 7, 2, 2, 6, 6, 6, 7, 6, 7, 7, 7])\n", + " array([1, 4, 4, 4, 4, 1, 1, 1, 4, 4, 7, 7, 7, 7, 7, 7])\n", + " array([2, 6, 2, 2, 2, 6, 2, 2, 1, 1, 1, 1, 1, 1])\n", + " array([3, 5, 3, 3, 3, 3, 5, 4, 4, 4, 4, 4])\n", + " array([3, 6, 3, 6, 3, 3, 6, 3, 3, 5, 5, 5, 5, 5, 5])\n", + " array([7, 5, 7, 7, 5, 7, 5, 7, 7, 5, 1, 1, 1, 1, 1, 1])\n", + " array([2, 3, 2, 2, 3, 2, 2, 3, 2, 3, 4, 4, 4, 4, 4, 4])\n", + " array([6, 2, 6, 6, 2, 2, 7, 7, 7, 7, 7, 7])\n", + " array([2, 3, 2, 4, 2, 3, 2, 2, 2, 4, 4, 4, 4, 4])\n", + " array([7, 6, 2, 6, 2, 2, 2, 6, 2, 7, 7, 7, 7, 7])\n", + " array([4, 4, 1, 7, 4, 4, 4, 4, 1, 7, 7, 7, 7, 7])\n", + " array([1, 7, 5, 7, 5, 7, 1, 1, 1, 1, 1, 5, 7, 7, 7])\n", + " array([4, 3, 3, 5, 3, 4, 4, 4, 3, 3, 3, 5, 5, 4, 4, 4])\n", + " array([6, 3, 3, 5, 2, 3, 2, 3, 3, 3, 5, 5, 5, 5, 5])\n", + " array([2, 2, 6, 1, 1, 1, 1, 1, 1, 2, 6, 2, 6, 2, 2])\n", + " array([2, 2, 3, 4, 2, 2, 3, 2, 2, 4, 4, 4, 4, 4])\n", + " array([2, 6, 7, 6, 2, 6, 2, 7, 7, 7, 7, 7, 7])\n", + " array([4, 1, 4, 4, 4, 7, 4, 1, 4, 1, 4, 4, 1, 7, 7, 7, 7, 7])\n", + " array([1, 7, 5, 7, 7, 7, 5, 5, 7, 7, 1, 1, 1, 1, 1])\n", + " array([3, 4, 5, 3, 3, 5, 4, 4, 4, 4, 4, 3, 3, 3, 5])\n", + " array([3, 3, 6, 5, 3, 3, 6, 3, 6, 3, 5, 5, 5, 5, 5])\n", + " array([1, 2, 2, 6, 2, 2, 6, 2, 2, 6, 1, 1, 1, 1, 1])\n", + " array([2, 3, 2, 4, 2, 3, 2, 2, 3, 2, 4, 4, 4, 4, 4])\n", + " array([2, 6, 7, 2, 2, 6, 6, 7, 7, 7, 7, 7])\n", + " array([4, 4, 7, 1, 4, 1, 4, 4, 1, 1, 4, 7, 7, 7, 7, 7])\n", + " array([1, 7, 7, 5, 7, 5, 7, 7, 7, 1, 1, 1, 1, 1, 5])\n", + " array([3, 3, 5, 4, 3, 3, 3, 5, 3, 5, 4, 4, 4, 4, 4])\n", + " array([5, 6, 3, 3, 3, 6, 3, 3, 3, 6, 5, 5, 5, 5, 5])\n", + " array([2, 1, 2, 6, 2, 6, 2, 6, 2, 2, 1, 1, 1, 1, 1])\n", + " array([2, 4, 2, 3, 2, 2, 2, 3, 2, 3, 4, 4, 4, 4, 4])\n", + " array([2, 6, 7, 2, 2, 6, 2, 7, 7, 7, 7, 7])\n", + " array([4, 4, 1, 1, 7, 4, 1, 4, 1, 4, 4, 7, 7, 7, 7, 7])\n", + " array([7, 7, 5, 1, 7, 5, 7, 7, 7, 1, 1, 1, 1, 1])\n", + " array([3, 5, 3, 4, 3, 5, 3, 3, 3, 5, 3, 4, 4, 4, 4, 4])\n", + " array([3, 3, 6, 5, 3, 6, 3, 6, 3, 3, 5, 5, 5, 5, 5])\n", + " array([2, 2, 6, 1, 2, 2, 2, 2, 1, 2, 2, 6, 1, 1, 1, 1])\n", + " array([2, 4, 2, 3, 2, 3, 2, 3, 2, 2, 4, 4, 4, 4, 4])\n", + " array([2, 6, 7, 2, 6, 6, 6, 7, 7, 7, 7, 7])\n", + " array([4, 4, 1, 7, 4, 4, 1, 4, 1, 4, 7, 7, 7, 7, 7])\n", + " array([1, 7, 7, 5, 7, 5, 7, 5, 7, 5, 7, 1, 1, 1, 1, 1])\n", + " array([4, 5, 3, 3, 3, 5, 3, 3, 5, 3, 5, 4, 4, 4, 4, 4])\n", + " array([5, 6, 3, 3, 3, 6, 6, 3, 3, 6, 3, 5, 5, 5, 5, 5])\n", + " array([2, 1, 2, 6, 2, 2, 6, 2, 2, 6, 1, 1, 1, 1, 1])\n", + " array([1, 2, 3, 1, 2, 2, 2, 3, 2, 3, 2, 4, 4, 4, 4, 4, 4])\n", + " array([6, 2, 2, 2, 6, 6, 7, 7, 7, 7, 7, 7])\n", + " array([1, 1, 1, 1, 7, 7, 7, 7, 7, 7, 4, 4, 4, 4, 4, 4])\n", + " array([3, 3, 3, 3, 3, 3, 5, 5, 5, 4, 4, 4, 4, 4, 4, 4, 4, 4])\n", + " array([5, 2, 2, 6, 3, 3, 3, 3, 6, 6, 3, 3, 5, 5, 5, 5, 5, 5])\n", + " array([2, 6, 2, 2, 2, 2, 2, 6, 1, 1, 1, 1, 1, 1])\n", + " array([7, 5, 7, 7, 7, 7, 7, 7, 1, 1, 1, 1, 1, 1, 5, 5])\n", + " array([2, 3, 2, 2, 3, 2, 2, 2, 3, 4, 4, 4, 4, 4, 4])\n", + " array([6, 2, 7, 7, 7, 7, 7, 7, 6, 6, 2, 2])\n", + " array([1, 1, 1, 7, 7, 7, 7, 7, 7, 4, 4, 4, 4, 4, 4])\n", + " array([5, 3, 3, 3, 3, 3, 3, 3, 3, 5, 4, 4, 4, 4, 4, 4, 4, 5, 5, 4, 5, 5,\n", + " 4])\n", + " array([3, 6, 3, 5, 5, 5, 5, 5, 5, 6, 3, 3, 6, 3, 3])\n", + " array([2, 6, 2, 1, 2, 2, 6, 2, 6, 2])\n", + " array([1, 7, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 5, 5, 5, 5, 5, 5, 5])\n", + " array([2, 3, 2, 2, 3, 2, 3, 2, 2, 3, 4, 4, 4, 4, 4, 4])\n", + " array([6, 2, 2, 2, 6, 6, 7, 7, 7, 7, 7, 7])\n", + " array([1, 7, 1, 7, 7, 7, 7, 7, 4, 4, 4, 4, 4, 4])\n", + " array([3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 5, 5, 5])\n", + " array([3, 6, 3, 3, 3, 6, 3, 6, 3, 6, 5, 5, 5, 5, 5, 5, 5, 5, 5])\n", + " array([6, 2, 2, 1, 2, 2, 2, 2, 6, 2, 6, 1, 1, 1, 1, 1])\n", + " array([7, 1, 1, 7, 7, 7, 7, 7, 1, 1, 1, 1, 1, 5, 5, 5])\n", + " array([2, 3, 2, 2, 2, 2, 3, 2, 3, 4, 4, 4, 4, 4, 4])\n", + " array([6, 2, 7, 6, 2, 2, 2, 7, 7, 7, 7, 7])\n", + " array([1, 7, 1, 1, 1, 7, 7, 7, 7, 7, 4, 4, 4, 4, 4, 4, 4, 4])\n", + " array([3, 3, 3, 3, 3, 3, 5, 5, 5, 4, 4, 4, 4, 4, 4, 4])\n", + " array([3, 6, 3, 3, 6, 3, 3, 6, 3, 6, 3, 6, 5, 5, 5, 5, 5, 5, 5])\n", + " array([2, 6, 2, 2, 2, 2, 6, 2, 6, 1, 1, 1, 1, 1, 1])\n", + " array([7, 1, 7, 7, 7, 7, 7, 7, 1, 1, 1, 1, 1, 5, 5])\n", + " array([2, 3, 2, 2, 3, 2, 2, 3, 2, 3, 2, 4, 4, 4, 4, 4, 4])\n", + " array([3, 3, 6, 2, 2, 2, 6, 6, 6, 7, 7, 7, 7, 7, 7])\n", + " array([1, 7, 7, 7, 7, 7, 7, 1, 1, 4, 4, 4, 4, 4, 4])\n", + " array([3, 3, 3, 3, 3, 3, 5, 5, 4, 4, 4, 4, 4, 4, 4, 4, 5, 4])\n", + " array([3, 6, 3, 3, 3, 3, 3, 6, 5, 5, 5, 5, 5, 5, 5])\n", + " array([2, 6, 2, 2, 6, 2, 2, 2, 2, 6, 1, 1, 1, 1, 1, 1])\n", + " array([7, 7, 1, 7, 7, 7, 7, 1, 1, 1, 1, 1, 5, 5])\n", + " array([5, 3, 6, 3, 5, 3, 5, 3, 5, 6, 3, 5, 3, 6, 5])\n", + " array([2, 1, 2, 6, 1, 2, 1, 2, 6, 1, 2, 6, 2, 2, 2, 6, 6, 6, 2, 6, 1, 2,\n", + " 1])\n", + " array([7, 5, 7, 1, 1, 1, 1, 1, 1, 7, 5, 7, 5, 7, 7, 5])\n", + " array([2, 2, 3, 3, 3, 2, 2, 3, 4, 4, 4, 4, 4, 2, 2])\n", + " array([3, 3, 5, 4, 4, 4, 4, 4, 4, 3, 3, 3, 3, 2, 2, 5, 5])\n", + " array([1, 4, 7, 5, 5, 7, 4, 1, 4, 4, 1, 4, 4, 7, 7, 7, 7, 7, 7])\n", + " array([2, 2, 6, 7, 7, 7, 7, 7, 7, 6, 6, 6, 2, 2, 6])\n", + " array([2, 3, 6, 2, 6, 5, 5, 5, 5, 5, 5, 6, 3, 3, 6, 3, 6, 6])\n", + " array([2, 6, 2, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 6, 6])\n", + " array([7, 5, 7, 1, 1, 1, 1, 1, 1, 1, 1, 1, 7, 5, 7, 5, 7, 7])\n", + " array([2, 2, 3, 4, 4, 4, 4, 4, 4, 2, 2, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3])\n", + " array([5, 3, 3, 4, 4, 4, 4, 4, 4, 3, 5, 3, 5, 3, 3])\n", + " array([1, 4, 4, 7, 7, 4, 1, 7, 4, 7, 4, 1, 7, 4, 7])\n", + " array([6, 2, 7, 7, 7, 7, 7, 7, 6, 2, 6, 2])\n", + " array([2, 3, 2, 5, 5, 5, 5, 5, 5, 2, 3, 2, 3, 2, 2])\n", + " array([2, 2, 6, 1, 2, 2, 2, 2, 6, 2, 6, 2, 2, 1, 1, 1, 1, 1])\n", + " array([7, 5, 7, 1, 1, 1, 1, 1, 1, 7, 7, 7, 7, 5])\n", + " array([2, 3, 4, 2, 4, 4, 4, 4, 4, 2, 3, 2, 2, 2])\n", + " array([3, 5, 3, 4, 4, 4, 4, 4, 4, 3, 5, 3, 5, 3, 5, 3])\n", + " array([4, 1, 4, 7, 7, 7, 7, 7, 7, 4, 4, 1, 4, 1, 1, 1, 4])\n", + " array([6, 2, 7, 7, 7, 7, 7, 7, 2, 2, 6, 6])\n", + " array([6, 3, 3, 3, 5, 3, 5, 5, 5, 5, 5, 3, 6, 3, 3, 3])\n", + " array([6, 2, 2, 1, 1, 1, 1, 1, 1, 2, 6, 2, 2, 2])\n", + " array([7, 5, 7, 1, 7, 7, 1, 1, 1, 1, 1, 7, 7, 7, 5, 7, 5])\n", + " array([2, 3, 2, 4, 4, 4, 4, 4, 4, 2, 3, 2, 2, 2, 2])\n", + " array([5, 3, 3, 4, 4, 4, 4, 4, 4, 3, 5, 3, 5, 3, 5, 3])\n", + " array([4, 1, 4, 7, 7, 7, 7, 7, 7, 4, 1, 1, 4, 4, 4])\n", + " array([6, 2, 7, 7, 7, 7, 7, 7, 2, 6, 6, 2])\n", + " array([6, 3, 3, 5, 5, 5, 5, 5, 5, 3, 3, 3, 3, 6])\n", + " array([6, 2, 2, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 6, 6])\n", + " array([7, 5, 7, 1, 1, 1, 1, 1, 1, 7, 5, 7, 5, 7, 7, 5])\n", + " array([2, 3, 2, 4, 4, 4, 4, 4, 4, 2, 3, 2, 3, 2, 2])\n", + " array([5, 3, 3, 4, 4, 4, 4, 4, 4, 3, 5, 3, 5, 3, 5, 3])\n", + " array([4, 1, 4, 7, 7, 7, 7, 7, 7, 4, 1, 4, 1, 1, 4, 4])\n", + " array([6, 2, 7, 7, 7, 7, 7, 7, 2, 2, 6, 6])]\n", + "y_train\n", + " [5. 3. 6. 1. 0. 4. 2. 5. 3. 6. 1. 0. 4. 2. 5. 3. 6. 1. 0. 4. 2. 5. 3. 6.\n", + " 1. 0. 4. 2. 5. 3. 6. 1. 0. 4. 2. 4. 5. 3. 2. 0. 1. 6. 4. 5. 3. 2. 0. 1.\n", + " 6. 4. 5. 3. 2. 0. 1. 6. 4. 5. 3. 2. 0. 1. 6. 4. 5. 3. 2. 0. 1. 6. 0. 1.\n", + " 2. 3. 4. 5. 6. 0. 1. 2. 3. 4. 5. 6. 0. 1. 2. 3. 4. 5. 6. 0. 1. 2. 3. 4.\n", + " 5. 6. 0. 1. 2. 3. 4. 5. 6. 2. 3. 6. 4. 0. 5. 1. 2. 3. 6. 4. 0. 5. 1. 2.\n", + " 3. 6. 4. 0. 5. 1. 2. 3. 6. 4. 0. 5. 1. 2. 3. 6. 4. 0. 5. 1. 5. 0. 6. 3.\n", + " 4. 1. 2. 5. 0. 6. 3. 4. 1. 2. 5. 0. 6. 3. 4. 1. 2. 5. 0. 6. 3. 4. 1. 2.\n", + " 5. 0. 6. 3. 4. 1. 2. 0. 2. 1. 4. 3. 6. 5. 0. 2. 1. 4. 3. 6. 5. 0. 2. 1.\n", + " 4. 3. 6. 5. 0. 2. 1. 4. 3. 6. 5. 0. 2. 1. 4. 3. 6. 5. 1. 0. 4. 2. 3. 5.\n", + " 6. 1. 0. 4. 2. 3. 5. 6. 1. 0. 4. 2. 3. 5. 6. 1. 0. 4. 2. 3. 5. 6. 1. 0.\n", + " 4. 2. 3. 5. 6. 6. 2. 4. 0. 5. 1. 3. 6. 2. 4. 0. 5. 1. 3. 6. 2. 4. 0. 5.\n", + " 1. 3. 6. 2. 4. 0. 5. 1. 3. 6. 2. 4. 0. 5. 1. 3. 6. 0. 5. 2. 3. 4. 1. 6.\n", + " 0. 5. 2. 3. 4. 1. 6. 0. 5. 2. 3. 4. 1. 6. 0. 5. 2. 3. 4. 1. 6. 0. 5. 2.\n", + " 3. 4. 1. 6. 4. 5. 0. 2. 1. 3. 6. 4. 5. 0. 2. 1. 3. 6. 4. 5. 0. 2. 1. 3.\n", + " 6. 4. 5. 0. 2. 1. 3. 6. 4. 5. 0. 2. 1. 3. 2. 6. 4. 0. 1. 5. 3. 2. 6. 4.\n", + " 0. 1. 5. 3. 2. 6. 4. 0. 1. 5. 3. 2. 6. 4. 0. 1. 5. 3. 2. 6. 4. 0. 1. 5.\n", + " 3. 3. 4. 0. 1. 6. 2. 5. 3. 4. 0. 1. 6. 2. 5. 3. 4. 0. 1. 6. 2. 5. 3. 4.\n", + " 0. 1. 6. 2. 5. 3. 4. 0. 1. 6. 2. 5. 2. 5. 3. 6. 0. 1. 4. 2. 5. 3. 6. 0.\n", + " 1. 4. 2. 5. 3. 6. 0. 1. 4. 2. 5. 3. 6. 0. 1. 4. 2. 5. 3. 6. 0. 1. 4. 2.\n", + " 5. 3. 0. 1. 4. 6. 2. 5. 3. 0. 1. 4. 6. 2. 5. 3. 0. 1. 4. 6. 2. 5. 3. 0.\n", + " 1. 4. 6. 2. 5. 3. 0. 1. 4. 6. 1. 4. 6. 2. 0. 3. 5. 1. 4. 6. 2. 0. 3. 5.\n", + " 1. 4. 6. 2. 0. 3. 5. 1. 4. 6. 2. 0. 3. 5. 1. 4. 6. 2. 0. 3. 5.]\n", + "x_test\n", + " [array([4, 1, 1, 4, 4, 7, 7, 7, 7, 7, 7, 4, 1, 4, 4, 4])\n", + " array([2, 3, 2, 4, 4, 4, 4, 4, 4, 2, 2, 2, 3, 2, 3])\n", + " array([5, 3, 3, 4, 3, 3, 3, 3, 4, 4, 4, 4, 4, 5, 5])\n", + " array([6, 2, 7, 6, 6, 2, 2, 7, 7, 7, 7])\n", + " array([2, 6, 2, 1, 2, 2, 2, 2, 6, 6, 1, 1, 1, 1, 1])\n", + " array([3, 3, 6, 5, 3, 3, 3, 3, 6, 5, 5, 5, 5, 5])\n", + " array([7, 7, 5, 1, 7, 7, 5, 7, 7, 1, 1, 1, 1, 1, 5, 5])\n", + " array([1, 4, 7, 4, 4, 4, 4, 4, 1, 1, 7, 7, 7, 7, 7])\n", + " array([2, 3, 2, 2, 4, 3, 2, 2, 3, 2, 2, 4, 4, 4, 4, 4])\n", + " array([3, 5, 3, 4, 4, 3, 3, 3, 3, 5, 5, 4, 4, 4, 4])\n", + " array([6, 2, 7, 2, 2, 7, 7, 7, 6, 6, 7, 6, 6, 7, 7, 7, 7, 7])\n", + " array([2, 2, 6, 1, 1, 2, 2, 2, 2, 6, 6, 6, 1, 1, 1, 1])\n", + " array([3, 6, 3, 5, 2, 2, 3, 6, 3, 3, 3, 6, 5, 5, 5, 5, 5])\n", + " array([1, 5, 7, 7, 7, 5, 7, 5, 7, 7, 1, 1, 1, 1, 1])\n", + " array([7, 4, 4, 7, 4, 4, 4, 4, 7, 1, 7, 7, 7, 7, 1, 1, 7])\n", + " array([2, 3, 2, 4, 2, 3, 2, 2, 2, 4, 4, 4, 4, 4])\n", + " array([3, 5, 3, 4, 3, 3, 3, 3, 5, 5, 5, 4, 4, 4, 4, 4])\n", + " array([6, 2, 7, 7, 7, 7, 7, 7, 2, 2, 6, 6])\n", + " array([2, 2, 1, 6, 2, 2, 2, 2, 6, 6, 1, 1, 1, 1, 1])\n", + " array([3, 6, 3, 5, 3, 3, 3, 3, 6, 6, 5, 5, 5, 5, 5])\n", + " array([7, 5, 7, 7, 7, 7, 7, 7, 5, 1, 1, 1, 1, 1, 1])\n", + " array([4, 1, 4, 4, 4, 4, 4, 1, 1, 1, 7, 7, 7, 7, 7, 7])\n", + " array([2, 3, 2, 4, 2, 2, 2, 2, 3, 4, 4, 4, 4, 4])\n", + " array([5, 3, 3, 4, 5, 3, 3, 3, 3, 5, 3, 4, 4, 4, 4, 4])\n", + " array([6, 2, 7, 6, 6, 2, 2, 7, 7, 7, 7, 7])\n", + " array([2, 6, 2, 1, 2, 2, 2, 2, 1, 1, 1, 1, 1, 6, 6])\n", + " array([3, 6, 3, 5, 3, 3, 3, 3, 6, 5, 5, 5, 5, 5])\n", + " array([5, 7, 7, 1, 7, 7, 5, 7, 5, 7, 1, 1, 1, 1, 1])\n", + " array([1, 4, 4, 7, 7, 4, 4, 4, 4, 1, 1, 7, 7, 7, 7])\n", + " array([2, 3, 2, 4, 4, 4, 4, 4, 4, 2, 3, 2, 2, 2])\n", + " array([4, 3, 5, 3, 3, 5, 3, 3, 3, 5, 4, 4, 4, 4, 4])\n", + " array([6, 2, 7, 6, 6, 6, 2, 7, 7, 7, 7, 7])\n", + " array([2, 2, 6, 1, 2, 6, 2, 6, 2, 2, 1, 1, 1, 1, 1])\n", + " array([3, 6, 3, 5, 3, 6, 3, 6, 3, 3, 6, 5, 5, 5, 5, 5])\n", + " array([7, 5, 7, 1, 7, 5, 1, 1, 1, 1, 1, 7, 7, 7, 5])]\n", + "y_test\n", + " [3. 2. 0. 5. 4. 1. 6. 3. 2. 0. 5. 4. 1. 6. 3. 2. 0. 5. 4. 1. 6. 3. 2. 0.\n", + " 5. 4. 1. 6. 3. 2. 0. 5. 4. 1. 6.]\n", + "> \u001b[0;32m/tmp/ipykernel_97850/1264473745.py\u001b[0m(32)\u001b[0;36mcreateTrainTest\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32m 30 \u001b[0;31m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'y_test\\n'\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0my_test\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 31 \u001b[0;31m \u001b[0mpdb\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_trace\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m---> 32 \u001b[0;31m \u001b[0;32mif\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mshapes\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 33 \u001b[0;31m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx_train\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 34 \u001b[0;31m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0my_train\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ipdb> x_train.shape\n", + "(525,)\n", + "ipdb> x_test.shape\n", + "(35,)\n" + ] + } + ], + "source": [ + "accuracies_full = dict()\n", + "accuracies_small = dict()\n", + "accuracies_last = dict()\n", + "\n", + "for current_PID in sorted(data.PID.unique()):\n", + " accuracies_full[current_PID], pred_label, test_label = runSVMS(createTrainTest([current_PID], Task_IDs, StartIndexOffset, EndIndexOffset, shapes=True))\n", + " # Only the first 5\n", + " accuracies_small[current_PID], pred_label, test_label = runSVMS(createTrainTest([current_PID], Task_IDs, StartIndexOffset, EndIndexOffset, shapes=True), 5)\n", + " # Only the last 5\n", + " accuracies_last[current_PID], pred_label, test_label = runSVMS(createTrainTest([current_PID], Task_IDs, StartIndexOffset, EndIndexOffset, shapes=True), 5, last_elements=True)\n", + " #pdb.set_trace()\n", + "print(accuracies_full)\n", + "print(accuracies_small)\n", + "print(accuracies_last)\n", + "print(\"mean full\", np.array(list(accuracies_full.values())).mean())\n", + "print(\"mean small\", np.array(list(accuracies_small.values())).mean())\n", + "print(\"mean last\", np.array(list(accuracies_last.values())).mean())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fdd4c915", + "metadata": {}, + "outputs": [], + "source": [ + "len(g.groups.keys())\n", + "g.groups.keys()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.7" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/keyboard_and_mouse/dataset/.ipynb_checkpoints/03-NextActionPrediction-checkpoint.ipynb b/keyboard_and_mouse/dataset/.ipynb_checkpoints/03-NextActionPrediction-checkpoint.ipynb new file mode 100644 index 0000000..4dab2c7 --- /dev/null +++ b/keyboard_and_mouse/dataset/.ipynb_checkpoints/03-NextActionPrediction-checkpoint.ipynb @@ -0,0 +1,687 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "3aed8aec", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2021-09-27 15:31:30.518074: I tensorflow/stream_executor/platform/default/dso_loader.cc:49] Successfully opened dynamic library libcudart.so.10.1\n" + ] + } + ], + "source": [ + "import pandas as pd\n", + "import numpy as np\n", + "import datetime\n", + "import time,pdb\n", + "import json\n", + "import random\n", + "import statistics\n", + "import matplotlib.pyplot as plt\n", + "\n", + "import tensorflow as tf\n", + "from tensorflow import keras\n", + "from sklearn import svm\n", + "from sklearn.model_selection import GridSearchCV \n", + "from sklearn.ensemble import RandomForestClassifier\n", + "from sklearn.metrics import accuracy_score\n", + "from tensorflow.keras.layers import *\n", + "from sklearn.model_selection import train_test_split\n", + "from tensorflow.keras.models import Sequential\n", + "from tensorflow.keras.optimizers import *\n", + "from tensorflow.keras.callbacks import TensorBoard, ModelCheckpoint, ReduceLROnPlateau, Callback\n", + "from tensorflow.keras.preprocessing.sequence import pad_sequences\n", + "from sklearn.neighbors import KNeighborsClassifier\n", + "from sklearn.metrics import mean_squared_error\n", + "from sklearn.metrics import accuracy_score\n", + "import tqdm\n", + "from multiprocessing import Pool\n", + "import os\n", + "from tensorflow.compat.v1.keras.layers import Bidirectional, CuDNNLSTM" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "817f7108", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "available PIDs [ 1. 2. 3. 4. 5. 6. 7. 8. 9. 10. 11. 12. 13. 14. 15. 16.]\n", + "available TaskIDs [0. 1. 2. 3. 4. 5. 6.]\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
TimestampEventTaskIDPartPIDTextRuleRuleType
01.575388e+1240.01.01.0{'Title': ['1', 'Indent', 'and', 'Italic'], 'S...3.0Cmd
11.575388e+1210.01.01.0{'Title': ['1', 'Indent', 'and', 'Italic'], 'S...3.0Toolbar
21.575388e+1210.01.01.0{'Title': ['1', 'Indent', 'and', 'Italic'], 'S...3.0Cmd
31.575388e+1240.01.01.0{'Title': ['1', 'Indent', 'and', 'Italic'], 'S...3.0Cmd
41.575388e+1240.01.01.0{'Title': ['1', 'Indent', 'and', 'Italic'], 'S...3.0Cmd
...........................
83761.603898e+1276.05.016.0{'Title': ['Size', 'Big'], 'Subtitle': ['Bold'...5.0Toolbar
83771.603898e+1226.05.016.0{'Title': ['Size', 'Big'], 'Subtitle': ['Bold'...5.0Cmd
83781.603898e+1226.05.016.0{'Title': ['Size', 'Big'], 'Subtitle': ['Bold'...5.0Cmd
83791.603898e+1266.05.016.0{'Title': ['Size', 'Big'], 'Subtitle': ['Bold'...5.0Toolbar
83801.603898e+1266.05.016.0{'Title': ['Size', 'Big'], 'Subtitle': ['Bold'...5.0Toolbar
\n", + "

8381 rows × 8 columns

\n", + "
" + ], + "text/plain": [ + " Timestamp Event TaskID Part PID \\\n", + "0 1.575388e+12 4 0.0 1.0 1.0 \n", + "1 1.575388e+12 1 0.0 1.0 1.0 \n", + "2 1.575388e+12 1 0.0 1.0 1.0 \n", + "3 1.575388e+12 4 0.0 1.0 1.0 \n", + "4 1.575388e+12 4 0.0 1.0 1.0 \n", + "... ... ... ... ... ... \n", + "8376 1.603898e+12 7 6.0 5.0 16.0 \n", + "8377 1.603898e+12 2 6.0 5.0 16.0 \n", + "8378 1.603898e+12 2 6.0 5.0 16.0 \n", + "8379 1.603898e+12 6 6.0 5.0 16.0 \n", + "8380 1.603898e+12 6 6.0 5.0 16.0 \n", + "\n", + " TextRule Rule Type \n", + "0 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3.0 Cmd \n", + "1 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3.0 Toolbar \n", + "2 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3.0 Cmd \n", + "3 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3.0 Cmd \n", + "4 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3.0 Cmd \n", + "... ... ... ... \n", + "8376 {'Title': ['Size', 'Big'], 'Subtitle': ['Bold'... 5.0 Toolbar \n", + "8377 {'Title': ['Size', 'Big'], 'Subtitle': ['Bold'... 5.0 Cmd \n", + "8378 {'Title': ['Size', 'Big'], 'Subtitle': ['Bold'... 5.0 Cmd \n", + "8379 {'Title': ['Size', 'Big'], 'Subtitle': ['Bold'... 5.0 Toolbar \n", + "8380 {'Title': ['Size', 'Big'], 'Subtitle': ['Bold'... 5.0 Toolbar \n", + "\n", + "[8381 rows x 8 columns]" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "study_data_path = \"../IntentData/\"\n", + "data = pd.read_pickle(study_data_path + \"/Preprocessing_data/clean_data.pkl\")\n", + "#val_data = pd.read_pickle(study_data_path + \"/Preprocessing_data/clean_data_condition2.pkl\")\n", + "\n", + "print(\"available PIDs\", data.PID.unique())\n", + "\n", + "print(\"available TaskIDs\", data.TaskID.unique())\n", + "\n", + "data.Event.unique()\n", + "data" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "ab778228", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "count 560.000000\n", + "mean 14.966071\n", + "std 2.195440\n", + "min 8.000000\n", + "25% 14.000000\n", + "50% 15.000000\n", + "75% 16.000000\n", + "max 28.000000\n", + "Name: Event, dtype: float64" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "data.groupby([\"PID\", \"Part\", \"TaskID\"])[\"Event\"].count().describe()" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "32550f71", + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "Task_IDs = list(range(0,7))\n", + "\n", + "# grouping by part is needed to have one ruleset for the whole part\n", + "g = data.groupby([\"PID\", \"Part\", \"TaskID\"])\n", + "df_all = []" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "f6fecc2f", + "metadata": {}, + "outputs": [], + "source": [ + "def createTrainTestalaSven(test_IDs, task_IDs, window_size, stride, shapes=False, val_IDs=None):\n", + " if not isinstance(test_IDs, list):\n", + " raise ValueError(\"Test_IDs are not a list\")\n", + " if not isinstance(task_IDs, list):\n", + " raise ValueError(\"Task_IDs are not a list\")\n", + " # Fill data arrays\n", + " all_elem = []\n", + " for current in g.groups.keys():\n", + " c = g.get_group(current)\n", + " if (c.TaskID.isin(task_IDs).all()):\n", + " \n", + " new_data = c.Event.values\n", + " stepper = 0\n", + " while stepper <= (len(new_data)-window_size-1):\n", + " tmp = new_data[stepper:stepper + window_size]\n", + " x = tmp[:-1]\n", + " y = tmp[-1]\n", + " stepper += stride\n", + " \n", + " if (c.PID.isin(test_IDs).all()):\n", + " all_elem.append([\"Test\", x, y])\n", + " elif (c.PID.isin(val_IDs).all()):\n", + " all_elem.append([\"Val\", x, y])\n", + " else:\n", + " all_elem.append([\"Train\", x, y])\n", + " df_tmp = pd.DataFrame(all_elem, columns =[\"Split\", \"X\", \"Y\"])\n", + " turbo = []\n", + " for s in df_tmp.Split.unique():\n", + " dfX = df_tmp[df_tmp.Split == s]\n", + " max_amount = dfX.groupby([\"Y\"]).count().max().X\n", + " for y in dfX.Y.unique():\n", + " df_turbotmp = dfX[dfX.Y == y]\n", + " turbo.append(df_turbotmp)\n", + " turbo.append(df_turbotmp.sample(max_amount-len(df_turbotmp), replace=True))\n", + " # if len(df_turbotmp) < max_amount:\n", + "\n", + " df_tmp = pd.concat(turbo)\n", + " x_train, y_train = df_tmp[df_tmp.Split == \"Train\"].X.values, df_tmp[df_tmp.Split == \"Train\"].Y.values\n", + " x_test, y_test = df_tmp[df_tmp.Split == \"Test\"].X.values, df_tmp[df_tmp.Split == \"Test\"].Y.values\n", + " x_val, y_val = df_tmp[df_tmp.Split == \"Val\"].X.values, df_tmp[df_tmp.Split == \"Val\"].Y.values\n", + " \n", + " x_train = np.expand_dims(np.stack(x_train), axis=2)\n", + " y_train = np.array(y_train)\n", + " x_test = np.expand_dims(np.stack(x_test), axis=2)\n", + " y_test = np.array(y_test)\n", + " if len(x_val) > 0:\n", + " x_val = np.expand_dims(np.stack(x_val), axis=2)\n", + " y_val = np.array(y_val)\n", + " return(x_train, y_train, x_test, y_test, x_val, y_val)\n", + " return(x_train, y_train, x_test, y_test)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "b8f92bc1", + "metadata": {}, + "outputs": [], + "source": [ + "def createTrainTest(test_IDs, task_IDs, window_size, stride, shapes=False, val_IDs=None):\n", + " if not isinstance(test_IDs, list):\n", + " raise ValueError(\"Test_IDs are not a list\")\n", + " if not isinstance(task_IDs, list):\n", + " raise ValueError(\"Task_IDs are not a list\")\n", + " # Fill data arrays\n", + " y_train = []\n", + " x_train = []\n", + " y_test = []\n", + " x_test = []\n", + " x_val = []\n", + " y_val = []\n", + " \n", + " for current in g.groups.keys():\n", + " c = g.get_group(current)\n", + " if (c.TaskID.isin(task_IDs).all()):\n", + " \n", + " new_data = c.Event.values\n", + " stepper = 0\n", + " while stepper <= (len(new_data)-window_size-1):\n", + " tmp = new_data[stepper:stepper + window_size]\n", + " pdb.set_trace()\n", + " x = tmp[:-1]\n", + " y = tmp[-1]\n", + " stepper += stride\n", + " if (c.PID.isin(test_IDs).all()):\n", + " if y == 6:\n", + " y_test.append(y)\n", + " x_test.append(x)\n", + " y_test.append(y)\n", + " x_test.append(x)\n", + " elif (c.PID.isin(val_IDs).all()):\n", + " if y == 6:\n", + " y_val.append(y)\n", + " x_val.append(x)\n", + " y_val.append(y)\n", + " x_val.append(x)\n", + " else:\n", + " if y == 6:\n", + " y_train.append(y)\n", + " x_train.append(x)\n", + " y_train.append(y)\n", + " x_train.append(x)\n", + " x_train = np.array(x_train)\n", + " y_train = np.array(y_train)\n", + " x_test = np.array(x_test)\n", + " y_test = np.array(y_test)\n", + " x_val = np.array(x_val)\n", + " y_val = np.array(y_val)\n", + " pdb.set_trace()\n", + " if (shapes):\n", + " print(x_train.shape)\n", + " print(y_train.shape)\n", + " print(x_test.shape)\n", + " print(y_test.shape)\n", + " print(x_val.shape)\n", + " print(y_val.shape)\n", + " print(np.unique(y_test))\n", + " print(np.unique(y_train))\n", + " if len(x_val) > 0:\n", + " return(x_train, y_train, x_test, y_test, x_val, y_val)\n", + " return (x_train, y_train, x_test, y_test)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "e56fbc58", + "metadata": {}, + "outputs": [], + "source": [ + "maxlen = 1000\n", + "lens = []\n", + "for current in g.groups.keys():\n", + " c = g.get_group(current)\n", + " lens.append(len(c.Event.values))\n", + " maxlen = min(maxlen, len(c.Event.values))" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "c02cbdae", + "metadata": {}, + "outputs": [], + "source": [ + "# Number of trees in random forest\n", + "n_estimators = np.arange(5,100, 5)\n", + "# Number of features to consider at every split\n", + "max_features = ['sqrt']\n", + "# Maximum number of levels in tree\n", + "max_depth = np.arange(5,100, 5)\n", + "# Minimum number of samples required to split a node\n", + "min_samples_split = np.arange(2,10, 1)\n", + "# Minimum number of samples required at each leaf node\n", + "min_samples_leaf = np.arange(2,5, 1)\n", + "# Method of selecting samples for training each tree\n", + "bootstrap = [True, False]\n", + "\n", + "# Create the random grid\n", + "param_grid = {'n_estimators': n_estimators,\n", + " 'max_features': max_features,\n", + " 'max_depth': max_depth,\n", + " 'min_samples_split': min_samples_split,\n", + " 'min_samples_leaf': min_samples_leaf,\n", + " 'bootstrap': bootstrap}\n", + "\n", + "grid = GridSearchCV(RandomForestClassifier(), param_grid, refit = True, verbose = 0, return_train_score=True) " + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "c2bcfe7f", + "metadata": {}, + "outputs": [], + "source": [ + "def doTrainSlideWindowNoPad(currentPid):\n", + " print(f\"doTrain: {currentPid}\")\n", + " dfs = []\n", + " for window_size in range(8, 15): \n", + " (x_train, y_train, x_test, y_test) = createTrainTest([currentPid], Task_IDs, window_size, 1, False, [200])\n", + " print(f\"doTrain: created TrainTestsplit\")\n", + "\n", + " # print(\"window_size\", 5, \"PID\", currentPid, \"samples\", x_train.shape[0], \"generated_samples\", \"samples\", x_train_window.shape[0])\n", + "\n", + " grid.fit(x_train, y_train)\n", + " print(\"fitted\")\n", + " # y_pred = grid.predict(x_test)\n", + "\n", + " df_params = pd.DataFrame(grid.cv_results_[\"params\"])\n", + " df_params[\"Mean_test\"] = grid.cv_results_[\"mean_test_score\"]\n", + " df_params[\"Mean_train\"] = grid.cv_results_[\"mean_train_score\"]\n", + " df_params[\"STD_test\"] = grid.cv_results_[\"std_test_score\"]\n", + " df_params[\"STD_train\"] = grid.cv_results_[\"std_train_score\"]\n", + " df_params['Window_Size'] = window_size\n", + " df_params['PID'] = currentPid\n", + " # df_params[\"Accuracy\"] = accuracy_score(y_pred, y_test)\n", + " dfs.append(df_params)\n", + "\n", + " return pd.concat(dfs)" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "9e3d86f1", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "doTrain: 1\n", + "> \u001b[0;32m/tmp/ipykernel_90176/2602038955.py\u001b[0m(23)\u001b[0;36mcreateTrainTest\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32m 21 \u001b[0;31m \u001b[0mtmp\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnew_data\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mstepper\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0mstepper\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mwindow_size\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 22 \u001b[0;31m \u001b[0mpdb\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_trace\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m---> 23 \u001b[0;31m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtmp\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 24 \u001b[0;31m \u001b[0my\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtmp\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 25 \u001b[0;31m \u001b[0mstepper\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0mstride\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\n", + "ipdb> tmp\n", + "array([4, 1, 1, 4, 4, 7, 7, 7])\n", + "ipdb> new_data\n", + "array([4, 1, 1, 4, 4, 7, 7, 7, 7, 7, 7, 4, 1, 4, 4, 4])\n", + "ipdb> current\n", + "(1.0, 1.0, 0.0)\n", + "ipdb> print(c)\n", + " Timestamp Event TaskID Part PID \\\n", + "0 1.575388e+12 4 0.0 1.0 1.0 \n", + "1 1.575388e+12 1 0.0 1.0 1.0 \n", + "2 1.575388e+12 1 0.0 1.0 1.0 \n", + "3 1.575388e+12 4 0.0 1.0 1.0 \n", + "4 1.575388e+12 4 0.0 1.0 1.0 \n", + "5 1.575388e+12 7 0.0 1.0 1.0 \n", + "6 1.575388e+12 7 0.0 1.0 1.0 \n", + "7 1.575388e+12 7 0.0 1.0 1.0 \n", + "8 1.575388e+12 7 0.0 1.0 1.0 \n", + "9 1.575388e+12 7 0.0 1.0 1.0 \n", + "10 1.575388e+12 7 0.0 1.0 1.0 \n", + "11 1.575388e+12 4 0.0 1.0 1.0 \n", + "12 1.575388e+12 1 0.0 1.0 1.0 \n", + "13 1.575388e+12 4 0.0 1.0 1.0 \n", + "14 1.575388e+12 4 0.0 1.0 1.0 \n", + "15 1.575388e+12 4 0.0 1.0 1.0 \n", + "\n", + " TextRule Rule Type \n", + "0 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3.0 Cmd \n", + "1 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3.0 Toolbar \n", + "2 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3.0 Cmd \n", + "3 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3.0 Cmd \n", + "4 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3.0 Cmd \n", + "5 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3.0 Toolbar \n", + "6 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3.0 Toolbar \n", + "7 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3.0 Toolbar \n", + "8 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3.0 Toolbar \n", + "9 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3.0 Toolbar \n", + "10 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3.0 Toolbar \n", + "11 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3.0 Cmd \n", + "12 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3.0 Toolbar \n", + "13 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3.0 Cmd \n", + "14 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3.0 Cmd \n", + "15 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S... 3.0 Cmd \n", + "ipdb> print(c.TextRule)\n", + "0 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S...\n", + "1 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S...\n", + "2 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S...\n", + "3 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S...\n", + "4 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S...\n", + "5 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S...\n", + "6 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S...\n", + "7 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S...\n", + "8 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S...\n", + "9 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S...\n", + "10 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S...\n", + "11 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S...\n", + "12 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S...\n", + "13 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S...\n", + "14 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S...\n", + "15 {'Title': ['1', 'Indent', 'and', 'Italic'], 'S...\n", + "Name: TextRule, dtype: object\n", + "ipdb> print(c.Event)\n", + "0 4\n", + "1 1\n", + "2 1\n", + "3 4\n", + "4 4\n", + "5 7\n", + "6 7\n", + "7 7\n", + "8 7\n", + "9 7\n", + "10 7\n", + "11 4\n", + "12 1\n", + "13 4\n", + "14 4\n", + "15 4\n", + "Name: Event, dtype: int64\n", + "ipdb> val\n", + "*** NameError: name 'val' is not defined\n", + "ipdb> val_IDs\n", + "[200]\n", + "--KeyboardInterrupt--\n", + "\n", + "KeyboardInterrupt: Interrupted by user\n", + "> \u001b[0;32m/tmp/ipykernel_90176/2602038955.py\u001b[0m(22)\u001b[0;36mcreateTrainTest\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32m 20 \u001b[0;31m \u001b[0;32mwhile\u001b[0m \u001b[0mstepper\u001b[0m \u001b[0;34m<=\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnew_data\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0mwindow_size\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 21 \u001b[0;31m \u001b[0mtmp\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnew_data\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mstepper\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0mstepper\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mwindow_size\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m---> 22 \u001b[0;31m \u001b[0mpdb\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_trace\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 23 \u001b[0;31m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtmp\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 24 \u001b[0;31m \u001b[0my\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtmp\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\n", + "--KeyboardInterrupt--\n", + "\n", + "KeyboardInterrupt: Interrupted by user\n", + "> \u001b[0;32m/tmp/ipykernel_90176/2602038955.py\u001b[0m(23)\u001b[0;36mcreateTrainTest\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32m 21 \u001b[0;31m \u001b[0mtmp\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnew_data\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mstepper\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0mstepper\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mwindow_size\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 22 \u001b[0;31m \u001b[0mpdb\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_trace\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m---> 23 \u001b[0;31m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtmp\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 24 \u001b[0;31m \u001b[0my\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtmp\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 25 \u001b[0;31m \u001b[0mstepper\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0mstride\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\n", + "ipdb> q\n" + ] + }, + { + "ename": "BdbQuit", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mBdbQuit\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m/tmp/ipykernel_90176/1128965594.py\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mdoTrainSlideWindowNoPad\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;32m/tmp/ipykernel_90176/2629087375.py\u001b[0m in \u001b[0;36mdoTrainSlideWindowNoPad\u001b[0;34m(currentPid)\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0mdfs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mwindow_size\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m8\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m15\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 5\u001b[0;31m \u001b[0;34m(\u001b[0m\u001b[0mx_train\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my_train\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mx_test\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my_test\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcreateTrainTest\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mcurrentPid\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mTask_IDs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mwindow_size\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mFalse\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;36m200\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 6\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34mf\"doTrain: created TrainTestsplit\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 7\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/tmp/ipykernel_90176/2602038955.py\u001b[0m in \u001b[0;36mcreateTrainTest\u001b[0;34m(test_IDs, task_IDs, window_size, stride, shapes, val_IDs)\u001b[0m\n\u001b[1;32m 21\u001b[0m \u001b[0mtmp\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnew_data\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mstepper\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0mstepper\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mwindow_size\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 22\u001b[0m \u001b[0mpdb\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_trace\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 23\u001b[0;31m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtmp\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 24\u001b[0m \u001b[0my\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtmp\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 25\u001b[0m \u001b[0mstepper\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0mstride\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/tmp/ipykernel_90176/2602038955.py\u001b[0m in \u001b[0;36mcreateTrainTest\u001b[0;34m(test_IDs, task_IDs, window_size, stride, shapes, val_IDs)\u001b[0m\n\u001b[1;32m 21\u001b[0m \u001b[0mtmp\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnew_data\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mstepper\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0mstepper\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mwindow_size\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 22\u001b[0m \u001b[0mpdb\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_trace\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 23\u001b[0;31m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtmp\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 24\u001b[0m \u001b[0my\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtmp\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 25\u001b[0m \u001b[0mstepper\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0mstride\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/miniconda3/envs/intentPrediction/lib/python3.9/bdb.py\u001b[0m in \u001b[0;36mtrace_dispatch\u001b[0;34m(self, frame, event, arg)\u001b[0m\n\u001b[1;32m 86\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0;31m# None\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 87\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mevent\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m'line'\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 88\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdispatch_line\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mframe\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 89\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mevent\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m'call'\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 90\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdispatch_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mframe\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/miniconda3/envs/intentPrediction/lib/python3.9/bdb.py\u001b[0m in \u001b[0;36mdispatch_line\u001b[0;34m(self, frame)\u001b[0m\n\u001b[1;32m 111\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstop_here\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mframe\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbreak_here\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mframe\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 112\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0muser_line\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mframe\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 113\u001b[0;31m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mquitting\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;32mraise\u001b[0m \u001b[0mBdbQuit\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 114\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrace_dispatch\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 115\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mBdbQuit\u001b[0m: " + ] + } + ], + "source": [ + "doTrainSlideWindowNoPad(1)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.7" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/keyboard_and_mouse/dataset/strategy_dataset/test_action_id_0.pkl b/keyboard_and_mouse/dataset/strategy_dataset/test_action_id_0.pkl new file mode 100644 index 0000000..8f1bd05 Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/test_action_id_0.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/test_action_id_1.pkl b/keyboard_and_mouse/dataset/strategy_dataset/test_action_id_1.pkl new file mode 100644 index 0000000..9b43f98 Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/test_action_id_1.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/test_action_id_2.pkl b/keyboard_and_mouse/dataset/strategy_dataset/test_action_id_2.pkl new file mode 100644 index 0000000..239664f Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/test_action_id_2.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/test_action_id_3.pkl b/keyboard_and_mouse/dataset/strategy_dataset/test_action_id_3.pkl new file mode 100644 index 0000000..f6bd6bd Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/test_action_id_3.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/test_action_id_4.pkl b/keyboard_and_mouse/dataset/strategy_dataset/test_action_id_4.pkl new file mode 100644 index 0000000..8167816 Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/test_action_id_4.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/test_action_id_5.pkl b/keyboard_and_mouse/dataset/strategy_dataset/test_action_id_5.pkl new file mode 100644 index 0000000..7e1cb98 Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/test_action_id_5.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/test_action_id_6.pkl b/keyboard_and_mouse/dataset/strategy_dataset/test_action_id_6.pkl new file mode 100644 index 0000000..af33147 Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/test_action_id_6.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/test_data_0.pkl b/keyboard_and_mouse/dataset/strategy_dataset/test_data_0.pkl new file mode 100644 index 0000000..d7b930e Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/test_data_0.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/test_data_1.pkl b/keyboard_and_mouse/dataset/strategy_dataset/test_data_1.pkl new file mode 100644 index 0000000..c97ed45 Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/test_data_1.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/test_data_2.pkl b/keyboard_and_mouse/dataset/strategy_dataset/test_data_2.pkl new file mode 100644 index 0000000..a2e8644 Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/test_data_2.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/test_data_3.pkl b/keyboard_and_mouse/dataset/strategy_dataset/test_data_3.pkl new file mode 100644 index 0000000..9d04789 Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/test_data_3.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/test_data_4.pkl b/keyboard_and_mouse/dataset/strategy_dataset/test_data_4.pkl new file mode 100644 index 0000000..69e22a3 Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/test_data_4.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/test_data_5.pkl b/keyboard_and_mouse/dataset/strategy_dataset/test_data_5.pkl new file mode 100644 index 0000000..0559605 Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/test_data_5.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/test_data_6.pkl b/keyboard_and_mouse/dataset/strategy_dataset/test_data_6.pkl new file mode 100644 index 0000000..4965b35 Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/test_data_6.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/test_label_0.pkl b/keyboard_and_mouse/dataset/strategy_dataset/test_label_0.pkl new file mode 100644 index 0000000..eceb04a Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/test_label_0.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/test_label_1.pkl b/keyboard_and_mouse/dataset/strategy_dataset/test_label_1.pkl new file mode 100644 index 0000000..6895ad7 Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/test_label_1.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/test_label_2.pkl b/keyboard_and_mouse/dataset/strategy_dataset/test_label_2.pkl new file mode 100644 index 0000000..c5f6d66 Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/test_label_2.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/test_label_3.pkl b/keyboard_and_mouse/dataset/strategy_dataset/test_label_3.pkl new file mode 100644 index 0000000..4005ca3 Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/test_label_3.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/test_label_4.pkl b/keyboard_and_mouse/dataset/strategy_dataset/test_label_4.pkl new file mode 100644 index 0000000..a88c8b5 Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/test_label_4.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/test_label_5.pkl b/keyboard_and_mouse/dataset/strategy_dataset/test_label_5.pkl new file mode 100644 index 0000000..c516021 Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/test_label_5.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/test_label_6.pkl b/keyboard_and_mouse/dataset/strategy_dataset/test_label_6.pkl new file mode 100644 index 0000000..b7d6c0f Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/test_label_6.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/train_data_0.pkl b/keyboard_and_mouse/dataset/strategy_dataset/train_data_0.pkl new file mode 100644 index 0000000..2fa023e Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/train_data_0.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/train_data_1.pkl b/keyboard_and_mouse/dataset/strategy_dataset/train_data_1.pkl new file mode 100644 index 0000000..68a6a1b Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/train_data_1.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/train_data_2.pkl b/keyboard_and_mouse/dataset/strategy_dataset/train_data_2.pkl new file mode 100644 index 0000000..8c67e8e Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/train_data_2.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/train_data_3.pkl b/keyboard_and_mouse/dataset/strategy_dataset/train_data_3.pkl new file mode 100644 index 0000000..5d914d1 Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/train_data_3.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/train_data_4.pkl b/keyboard_and_mouse/dataset/strategy_dataset/train_data_4.pkl new file mode 100644 index 0000000..9f1090e Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/train_data_4.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/train_data_5.pkl b/keyboard_and_mouse/dataset/strategy_dataset/train_data_5.pkl new file mode 100644 index 0000000..cc7a9c3 Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/train_data_5.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/train_data_6.pkl b/keyboard_and_mouse/dataset/strategy_dataset/train_data_6.pkl new file mode 100644 index 0000000..f4922e8 Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/train_data_6.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/train_label_0.pkl b/keyboard_and_mouse/dataset/strategy_dataset/train_label_0.pkl new file mode 100644 index 0000000..afd9219 Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/train_label_0.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/train_label_1.pkl b/keyboard_and_mouse/dataset/strategy_dataset/train_label_1.pkl new file mode 100644 index 0000000..a4e3002 Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/train_label_1.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/train_label_2.pkl b/keyboard_and_mouse/dataset/strategy_dataset/train_label_2.pkl new file mode 100644 index 0000000..b0aa1b7 Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/train_label_2.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/train_label_3.pkl b/keyboard_and_mouse/dataset/strategy_dataset/train_label_3.pkl new file mode 100644 index 0000000..6bb3dea Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/train_label_3.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/train_label_4.pkl b/keyboard_and_mouse/dataset/strategy_dataset/train_label_4.pkl new file mode 100644 index 0000000..2c80a0f Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/train_label_4.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/train_label_5.pkl b/keyboard_and_mouse/dataset/strategy_dataset/train_label_5.pkl new file mode 100644 index 0000000..307553b Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/train_label_5.pkl differ diff --git a/keyboard_and_mouse/dataset/strategy_dataset/train_label_6.pkl b/keyboard_and_mouse/dataset/strategy_dataset/train_label_6.pkl new file mode 100644 index 0000000..e2b3b0f Binary files /dev/null and b/keyboard_and_mouse/dataset/strategy_dataset/train_label_6.pkl differ diff --git a/keyboard_and_mouse/networks.py b/keyboard_and_mouse/networks.py new file mode 100644 index 0000000..272fe8c --- /dev/null +++ b/keyboard_and_mouse/networks.py @@ -0,0 +1,167 @@ +import torch +import torch.nn as nn + +class fc_block(nn.Module): + def __init__(self, in_channels, out_channels, norm, activation_fn): + super(fc_block, self).__init__() + block = nn.Sequential() + block.add_module('linear', nn.Linear(in_channels, out_channels)) + if norm: + block.add_module('batchnorm', nn.BatchNorm1d(out_channels)) + if activation_fn is not None: + block.add_module('activation', activation_fn()) + + self.block = block + + def forward(self, x): + return self.block(x) + +class ActionDemoEncoder(nn.Module): + def __init__(self, args, pooling): + super(ActionDemoEncoder, self).__init__() + hidden_size = args.demo_hidden + self.hidden_size = hidden_size + self.bs = args.batch_size + + len_action_predicates = 35 # max_action_len + self.action_embed = nn.Embedding(len_action_predicates, hidden_size) + + feat2hidden = nn.Sequential() + feat2hidden.add_module( + 'fc_block1', fc_block(hidden_size, hidden_size, False, nn.ReLU)) + self.feat2hidden = feat2hidden + + self.pooling = pooling + + if 'lstm' in self.pooling: + self.lstm = nn.LSTM(hidden_size, hidden_size) + + def forward(self, batch_data): + batch_data = batch_data.view(-1,1) + stacked_demo_feat = self.action_embed(batch_data) + stacked_demo_feat = self.feat2hidden(stacked_demo_feat) + batch_demo_feat = [] + start = 0 + + for length in range(0,batch_data.shape[0]): + if length == 0: + feat = stacked_demo_feat[0:1, :] + else: + feat = stacked_demo_feat[(length-1):length, :] + if len(feat.size()) == 3: + feat = feat.unsqueeze(0) + + if self.pooling == 'max': + feat = torch.max(feat, 0)[0] + elif self.pooling == 'avg': + feat = torch.mean(feat, 0) + elif self.pooling == 'lstmavg': + lstm_out, hidden = self.lstm(feat.view(len(feat), 1, -1)) + lstm_out = lstm_out.view(len(feat), -1) + feat = torch.mean(lstm_out, 0) + elif self.pooling == 'lstmlast': + lstm_out, hidden = self.lstm(feat.view(len(feat), 1, -1)) + lstm_out = lstm_out.view(len(feat), -1) + feat = lstm_out[-1] + else: + raise ValueError + + + batch_demo_feat.append(feat) + + demo_emb = torch.stack(batch_demo_feat, 0) + demo_emb = demo_emb.view(self.bs, 35, -1) + return demo_emb + +class PredicateClassifier(nn.Module): + + def __init__(self, args,): + super(PredicateClassifier, self).__init__() + hidden_size = args.demo_hidden + self.hidden_size = hidden_size + + classifier = nn.Sequential() + classifier.add_module('fc_block1', fc_block(hidden_size*35, hidden_size, False, nn.Tanh)) + classifier.add_module('dropout', nn.Dropout(args.dropout)) + classifier.add_module('fc_block2', fc_block(hidden_size, 7, False, None)) # 7 is all possible actions + + self.classifier = classifier + + def forward(self, input_emb): + input_emb = input_emb.view(-1, self.hidden_size*35) + return self.classifier(input_emb) + + +class ActionDemo2Predicate(nn.Module): + def __init__(self, args, **kwargs): + super(ActionDemo2Predicate, self).__init__() + + print('------------------------------------------------------------------------------------------') + print('ActionDemo2Predicate') + print('------------------------------------------------------------------------------------------') + + model_type = args.model_type + print('model_type', model_type) + + if model_type.lower() == 'max': + demo_encoder = ActionDemoEncoder(args, 'max') + elif model_type.lower() == 'avg': + demo_encoder = ActionDemoEncoder(args, 'avg') + elif model_type.lower() == 'lstmavg': + demo_encoder = ActionDemoEncoder(args, 'lstmavg') + elif model_type.lower() == 'bilstmavg': + demo_encoder = ActionDemoEncoder(args, 'bilstmavg') + elif model_type.lower() == 'lstmlast': + demo_encoder = ActionDemoEncoder(args, 'lstmlast') + elif model_type.lower() == 'bilstmlast': + demo_encoder = ActionDemoEncoder(args, 'bilstmlast') + else: + raise ValueError + demo_encoder = torch.nn.DataParallel(demo_encoder) + + predicate_decoder = PredicateClassifier(args) + + # for quick save and load + all_modules = nn.Sequential() + all_modules.add_module('demo_encoder', demo_encoder) + all_modules.add_module('predicate_decoder', predicate_decoder) + + self.demo_encoder = demo_encoder + self.predicate_decoder = predicate_decoder + self.all_modules = all_modules + self.to_cuda_fn = None + + def set_to_cuda_fn(self, to_cuda_fn): + self.to_cuda_fn = to_cuda_fn + + def forward(self, data, **kwargs): + ''' + Note: The order of the `data` won't change in this function + ''' + if self.to_cuda_fn: + data = self.to_cuda_fn(data) + + batch_demo_emb = self.demo_encoder(data) + pred = self.predicate_decoder(batch_demo_emb) + return pred + + def write_summary(self, writer, info, postfix): + model_name = 'Demo2Predicate-{}/'.format(postfix) + for k in self.summary_keys: + if k in info.keys(): + writer.scalar_summary(model_name + k, info[k]) + + def save(self, path, verbose=False): + if verbose: + print(colored('[*] Save model at {}'.format(path), 'magenta')) + torch.save(self.all_modules.state_dict(), path) + + def load(self, path, verbose=False): + if verbose: + print(colored('[*] Load model at {}'.format(path), 'magenta')) + self.all_modules.load_state_dict( + torch.load( + path, + map_location=lambda storage, + loc: storage)) + diff --git a/keyboard_and_mouse/process_data.py b/keyboard_and_mouse/process_data.py new file mode 100644 index 0000000..2212836 --- /dev/null +++ b/keyboard_and_mouse/process_data.py @@ -0,0 +1,207 @@ +import pickle +import pandas as pd +import numpy as np +import matplotlib.pyplot as plt +from pathlib import Path +import argparse + +def view_clean_data(): + with open('dataset/clean_data.pkl', 'rb') as f: + data = pickle.load(f) + print(type(data), len(data)) + print(data.keys()) + print('length of data:',len(data)) + print('event', data['Event'], 'length of event', len(data['Event'])) + print('rule', data['Rule'], 'length of event', len(data['Rule'])) + + print('rule unique', data.Rule.unique()) + print('task id unique', data.TaskID.unique()) + print('pid unique', data.PID.unique()) + print('event unique', data.Event.unique()) + +def split_org_data(): + # generate train, test data by split user, aggregate action sequence for next action prediction + # orignial action seq: a = [a_0 ... a_n] + # new action seq: for a: a0 = [a_0], a1 = [a_0, a_1] ... + + # split original data into train and test based on user + with open('dataset/clean_data.pkl', 'rb') as f: + data = pickle.load(f) + + print('original data keys', data.keys()) + print('len of original data', len(data)) + print('rule unique', data.Rule.unique()) + print('event unique', data.Event.unique()) + + data_train = data[data['PID']<=11] + data_test = data[data['PID']>11] + print('train set len', len(data_train)) + print('test set len', len(data_test)) + + # split data by task + train_data_intent = [] + test_data_intent = [] + for i in range(7): + # 7 different rules, each as an intention + train_data_intent.append(data_train[data_train['Rule']==i]) + test_data_intent.append(data_test[data_test['Rule']==i]) + + # generate train set + max_len = 0 # max len is 35 + for i in range(7): # 7 tasks/rules + train_data = [] # [task] + train_label = [] + for u in range(1,12): + user_data = train_data_intent[i][train_data_intent[i]['PID']==u] + for j in range(1,6): # 5 parts == 5 trials + part_data = user_data[user_data['Part']==j] + for l in range(1,len(part_data['Event'])-1): + print(part_data['Event'][:l].tolist()) + train_data.append(part_data['Event'][:l].tolist()) + train_label.append(part_data['Event'].iat[l+1]) + if len(part_data['Event'])>max_len: + max_len = len(part_data['Event']) + + for k in range(len(train_data)): + while len(train_data[k])<35: + train_data[k].append(0) # padding with 0 + + print('x_len', len(train_data), type(train_data[0]), len(train_data[0])) + print('y_len', len(train_label), type(train_label[0])) + + Path("dataset/strategy_dataset").mkdir(parents=True, exist_ok=True) + with open('dataset/strategy_dataset/train_label_'+str(i)+'.pkl', 'wb') as f: + pickle.dump(train_label, f) + with open('dataset/strategy_dataset/train_data_'+str(i)+'.pkl', 'wb') as f: + pickle.dump(train_data, f) + print('max_len', max_len) + + # generate test set + max_len = 0 # max len is 33, total max is 35 + for i in range(7): # 7 tasks/rules + test_data = [] # [task][user] + test_label = [] + test_action_id = [] + for u in range(12,17): + user_data = test_data_intent[i][test_data_intent[i]['PID']==u] + test_data_user = [] + test_label_user = [] + test_action_id_user = [] + for j in range(1,6): # 5 parts == 5 trials + part_data = user_data[user_data['Part']==j] + + for l in range(1,len(part_data['Event'])-1): + test_data_user.append(part_data['Event'][:l].tolist()) + test_label_user.append(part_data['Event'].iat[l+1]) + test_action_id_user.append(part_data['Part'].iat[l]) + + if len(part_data['Event'])>max_len: + max_len = len(part_data['Event']) + + for k in range(len(test_data_user)): + while len(test_data_user[k])<35: + test_data_user[k].append(0) # padding with 0 + + test_data.append(test_data_user) + test_label.append(test_label_user) + test_action_id.append(test_action_id_user) + + + print('x_len', len(test_data), type(test_data[0]), len(test_data[0])) + print('y_len', len(test_label), type(test_label[0])) + with open('dataset/strategy_dataset/test_label_'+str(i)+'.pkl', 'wb') as f: + pickle.dump(test_label, f) + with open('dataset/strategy_dataset/test_data_'+str(i)+'.pkl', 'wb') as f: + pickle.dump(test_data, f) + with open('dataset/strategy_dataset/test_action_id_'+str(i)+'.pkl', 'wb') as f: + pickle.dump(test_action_id, f) + print('max_len', max_len) + +def calc_gt_prob(): + # train set unique label + for i in range(7): + with open('dataset/strategy_dataset/train_label_'+str(i)+'.pkl', 'rb') as f: + y = pickle.load(f) + y = np.array(y) + print('task ', i) + print('unique train label', np.unique(y)) + +def plot_gt_dist(): + + full_data = [] + for i in range(7): + with open('dataset/strategy_dataset/' + 'test' + '_label_' + str(i) + '.pkl', 'rb') as f: + data = pickle.load(f) + #print(len(data)) + full_data.append(data) + + fig, axs = plt.subplots(7) + fig.set_figheight(10) + fig.set_figwidth(16) + act_name = ["Italic", "Bold", "Underline", "Indent", "Align", "FontSize", "FontFamily"] + x = np.arange(7) + + width = 0.1 + for i in range(7): + for u in range(len(data)): # 5 users + values, counts = np.unique(full_data[i][u], return_counts=True) + counts_vis = [0]*7 + for j in range(len(values)): + counts_vis[values[j]-1] = counts[j] + print('task', i, 'actions', values, 'num', counts) + + axs[i].set_title('Intention '+str(i)) + axs[i].set_xlabel('action id') + axs[i].set_ylabel('num of actions') + axs[i].bar(x+u*width, counts_vis, width=0.1, label='user '+str(u)) + axs[i].set_xticks(np.arange(len(x))) + axs[i].set_xticklabels(act_name) + axs[i].set_ylim([0,80]) + + axs[0].legend(loc='upper right', ncol=1) + plt.tight_layout() + plt.savefig('dataset/'+'test'+'_gt_dist.png') + plt.show() + +def plot_act(): + full_data = [] + for i in range(7): + with open('dataset/strategy_dataset/' + 'test' + '_label_' + str(i) + '.pkl', 'rb') as f: + data = pickle.load(f) + full_data.append(data) + + width = 0.1 + for i in range(7): + fig, axs = plt.subplots(5) + fig.set_figheight(10) + fig.set_figwidth(16) + act_name = ["Italic", "Bold", "Underline", "Indent", "Align", "FontSize", "FontFamily"] + for u in range(len(full_data[i])): # 5 users + x = np.arange(len(full_data[i][u])) + axs[u].set_xlabel('action id') + axs[u].set_ylabel('num of actions') + axs[u].plot(x, full_data[i][u]) + + axs[0].legend(loc='upper right', ncol=1) + plt.tight_layout() + #plt.savefig('test'+'_act.png') + plt.show() + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("func", help="select what function to run. view_clean_data, split_org_data, calc_gt_prob, plot_gt_dist, plot_act", type=str) + args = parser.parse_args() + + if args.func == 'view_clean_data': + view_clean_data() # view original keyboad and mouse interaction dataset + if args.func == 'split_org_data': + split_org_data() # split the original keyboad and mouse interaction dataset. User 1-11 for training, rest for testing + if args.func == 'calc_gt_prob': + calc_gt_prob() # see unique label in train set + if args.func == 'plot_gt_dist': + plot_gt_dist() # plot the label distribution of test set + if args.func == 'plot_act': + plot_act() # plot the label of test set + + + diff --git a/keyboard_and_mouse/sampler_single_act.py b/keyboard_and_mouse/sampler_single_act.py new file mode 100644 index 0000000..d0c002e --- /dev/null +++ b/keyboard_and_mouse/sampler_single_act.py @@ -0,0 +1,56 @@ +import numpy as np +from numpy import genfromtxt +import csv +import pandas +from pathlib import Path +import argparse + +def sample_single_act(pred_path, save_path, j): + data = pandas.read_csv(pred_path).values + total_data = [] + + for u in range(1,6): + act_data = data[data[:,1]==u] + final_save_path = save_path + "/rate_" + str(j) + "_act_" + str(int(u)) + "_pred.csv" + head = [] + for r in range(7): + head.append('act'+str(r+1)) + head.append('task_name') + head.append('gt') + head.insert(0,'action_id') + pandas.DataFrame(act_data[:,1:]).to_csv(final_save_path, header=head) + + +def main(): + # parsing parameters + parser = argparse.ArgumentParser(description='') + parser.add_argument('--batch_size', type=int, default=8, help='batch size') + parser.add_argument('--lr', type=float, default=1e-4, help='learning rate') + parser.add_argument('--hidden_size', type=int, default=64, help='hidden_size') + parser.add_argument('--model_type', type=str, default='lstmlast', help='model type') + + args = parser.parse_args() + + task = np.arange(7) + user_num = 5 + bs = args.batch_size + lr = args.lr # 1e-4 + hs = args.hidden_size #128 + model_type = args.model_type #'lstmlast' + + rate = [10, 20, 30, 40, 50, 60, 70, 80, 90, 100] + + for i in task: + for j in rate: + for l in range(user_num): + pred_path = "prediction/task" + str(i) + "/" + model_type + "_bs_" + str(bs) + "_lr_" + str(lr) + "_hidden_size_" + str(hs) + "/user" + str(l) + "_rate_" + str(j) + "_pred.csv" + if j == 100: + pred_path = "prediction/task" + str(i) + "/" + model_type + "_bs_" + str(bs) + "_lr_" + str(lr) + "_hidden_size_" + str(hs) + "/user" + str(l) + "_pred.csv" + save_path = "prediction/single_act/task" + str(i) + "/" + model_type + "_bs_" + str(bs) + "_lr_" + str(lr) + "_hidden_size_" + str(hs) + "/user" + str(l) + Path(save_path).mkdir(parents=True, exist_ok=True) + data = sample_single_act(pred_path, save_path, j) + +if __name__ == '__main__': + # split the prediction by action sequence id, from 10% to 90% + main() + diff --git a/keyboard_and_mouse/sampler_single_act.sh b/keyboard_and_mouse/sampler_single_act.sh new file mode 100644 index 0000000..e86d2a3 --- /dev/null +++ b/keyboard_and_mouse/sampler_single_act.sh @@ -0,0 +1,5 @@ +python3 sampler_single_act.py \ +--batch_size 8 \ +--lr 1e-4 \ +--model_type lstmlast \ +--hidden_size 128 diff --git a/keyboard_and_mouse/sampler_user.py b/keyboard_and_mouse/sampler_user.py new file mode 100644 index 0000000..5967ada --- /dev/null +++ b/keyboard_and_mouse/sampler_user.py @@ -0,0 +1,68 @@ +import numpy as np +from numpy import genfromtxt +import csv +import pandas +import argparse + +def sample_predciton(path, rate): + data = pandas.read_csv(path).values + task_list = [0, 1, 2, 3, 4, 5, 6] + + start = 0 + stop = 0 + num_unique = np.unique(data[:,1]) + + samples = [] + for j in task_list: + for i in num_unique: + inx = np.where((data[:,1] == i) & (data[:,-2] == j)) + samples.append(data[inx]) + + for i in range(len(samples)): + n = int(len(samples[i])*(100-rate)/100) + if n == 0: + n = 1 + samples[i] = samples[i][:-n] + if len(samples[i]) == 0: + print('len of after sampling',len(samples[i])) + + return np.vstack(samples) + +def main(): + # parsing parameters + parser = argparse.ArgumentParser(description='') + parser.add_argument('--batch_size', type=int, default=8, help='batch size') + parser.add_argument('--lr', type=float, default=1e-4, help='learning rate') + parser.add_argument('--hidden_size', type=int, default=128, help='hidden_size') + parser.add_argument('--model_type', type=str, default='lstmlast', help='model type') + + args = parser.parse_args() + + task = np.arange(7) + user_num = 5 + bs = args.batch_size + lr = args.lr # 1e-4 + hs = args.hidden_size #128 + model_type = args.model_type #'lstmlast' + + rate = [10, 20, 30, 40, 50, 60, 70, 80, 90] + + for i in task: + for j in rate: + for l in range(user_num): + pred_path = "prediction/task" + str(i) + "/" + model_type + "_bs_" + str(bs) + "_lr_" + str(lr) + "_hidden_size_" + str(hs) + "/user" + str(l) + "_pred.csv" + save_path = "prediction/task" + str(i) + "/" + model_type + "_bs_" + str(bs) + "_lr_" + str(lr) + "_hidden_size_" + str(hs) + "/user" + str(l) + "_rate_" + str(j) + "_pred.csv" + data = sample_predciton(pred_path, j) + + head = [] + for r in range(7): + head.append('act'+str(r+1)) + head.append('task_name') + head.append('gt') + head.insert(0,'action_id') + pandas.DataFrame(data[:,1:]).to_csv(save_path, header=head) + +if __name__ == '__main__': + # split the prediction by length, from 10% to 90% + main() + diff --git a/keyboard_and_mouse/sampler_user.sh b/keyboard_and_mouse/sampler_user.sh new file mode 100644 index 0000000..7fe93c2 --- /dev/null +++ b/keyboard_and_mouse/sampler_user.sh @@ -0,0 +1,5 @@ +python3 sampler_user.py \ +--batch_size 8 \ +--lr 1e-4 \ +--model_type lstmlast \ +--hidden_size 128 diff --git a/keyboard_and_mouse/stan/plot_user.py b/keyboard_and_mouse/stan/plot_user.py new file mode 100644 index 0000000..a49db34 --- /dev/null +++ b/keyboard_and_mouse/stan/plot_user.py @@ -0,0 +1,88 @@ +import numpy as np +from numpy import genfromtxt +import matplotlib.pyplot as plt +from pathlib import Path +import argparse + + +def main(): + parser = argparse.ArgumentParser(description='') + parser.add_argument('--batch_size', type=int, default=8, help='batch size') + parser.add_argument('--lr', type=float, default=1e-4, help='learning rate') + parser.add_argument('--hidden_size', type=int, default=128, help='hidden_size') + parser.add_argument('--model_type', type=str, default='lstmlast', help='model type') + parser.add_argument('--N', type=int, default=1, help='number of sequence for inference') + parser.add_argument('--user', type=int, default=1, help='number of users') + + args = parser.parse_args() + plot_type = 'bar' # line bar + width = [-0.3, -0.2, -0.1, 0, 0.1, 0.2, 0.3] + + # read data + user_data_list = [] + for i in range(args.user): + model_data_list = [] + path = "result/"+"N"+ str(args.N) + "/" + args.model_type + "bs_" + str(args.batch_size) + '_lr_' + str(args.lr) + '_hidden_size_' + str(args.hidden_size) + '_N' + str(args.N) + "_result_user" + str(i) +".csv" + data = genfromtxt(path, delimiter=',', skip_header =1) + for j in range(7): + data_temp = data[[1+7*j+j,2+7*j+j,3+7*j+j,4+7*j+j,5+7*j+j,6+7*j+j,7+7*j+j],:][:,[2,4,6,7]] + model_data_list.append(data_temp) + model_data_list = np.concatenate(model_data_list, axis=0) + user_data_list.append(model_data_list) + + color = ['royalblue', 'lightgreen', 'tomato', 'indigo', 'plum', 'darkorange', 'blue'] + legend = ['rule 1', 'rule 2', 'rule 3', 'rule 4', 'rule 5', 'rule 6', 'rule 7'] + fig, axs = plt.subplots(7, sharex=True, sharey=True) + fig.set_figheight(14) + fig.set_figwidth(25) + + for ax in range(7): + y_total = [] + y_low_total = [] + y_high_total = [] + for j in range(7): + y= [] + y_low = [] + y_high = [] + for i in range(len(user_data_list)): + y.append(user_data_list[i][j+ax*7][0]) + y_low.append(user_data_list[i][j+ax*7][2]) + y_high.append(user_data_list[i][j+ax*7][3]) + y_total.append(y) + y_low_total.append(y_low) + y_high_total.append(y_high) + print() + print("user mean of mean prob: ", np.mean(y)) + print("user mean of sd prob: ", np.std(y)) + + for i in range(7): + if plot_type == 'line': + axs[ax].plot(range(args.user), y_total[i], color=color[i], label=legend[i]) + axs[ax].fill_between(range(args.user), y_low_total[i], y_high_total[i], color=color[i],alpha=0.3 ) + if plot_type == 'bar': + width = [-0.36, -0.24, -0.12, 0, 0.12, 0.24, 0.36] + yerror = [np.array(y_total[i])-np.array(y_low_total[i]), np.array(y_high_total[i])-np.array(y_total[i])] + axs[ax].bar(np.arange(args.user)+width[i], y_total[i], width=0.08, yerr=yerror, label=legend[i], color=color[i]) + axs[ax].tick_params(axis='x', which='both', length=0) + axs[ax].set_ylabel('prob', fontsize=22) + for k,x in enumerate(np.arange(args.user)+width[i]): + y = y_total[i][k] + yerror[1][k] + axs[ax].annotate(f'{y_total[i][k]:.2f}', (x, y), textcoords='offset points', xytext=(-18,3), fontsize=16) + + axs[0].text(-0.1, 0.9, 'True Intention:', horizontalalignment='center', verticalalignment='center', transform=axs[0].transAxes, fontsize= 22) # all: -0.3,0.5 3rows: -0.5,0.5 + axs[ax].text(-0.1, 0.5, legend[ax], horizontalalignment='center', verticalalignment='center', transform=axs[ax].transAxes, fontsize= 22, color=color[ax]) + axs[ax].tick_params(axis='both', which='major', labelsize=16) + + plt.xticks(range(args.user),('1', '2', '3', '4', '5')) + plt.xlabel('user', fontsize= 22) + handles, labels = axs[0].get_legend_handles_labels() + plt.ylim([0, 1]) + Path("figure").mkdir(parents=True, exist_ok=True) + if plot_type == 'line': + plt.savefig("figure/"+"N"+ str(args.N) + "_ "+ args.model_type + "_bs_" + str(args.batch_size) + '_lr_' + str(args.lr) + '_hidden_size_' + str(args.hidden_size) + '_N' + str(args.N) + "_line.png", bbox_inches='tight') + if plot_type == 'bar': + plt.savefig("figure/"+"N"+ str(args.N) + "_ "+ args.model_type + "_bs_" + str(args.batch_size) + '_lr_' + str(args.lr) + '_hidden_size_' + str(args.hidden_size) + '_N' + str(args.N) + "_bar.png", bbox_inches='tight') + plt.show() + +if __name__ == '__main__': + main() diff --git a/keyboard_and_mouse/stan/plot_user.sh b/keyboard_and_mouse/stan/plot_user.sh new file mode 100644 index 0000000..1d175ba --- /dev/null +++ b/keyboard_and_mouse/stan/plot_user.sh @@ -0,0 +1,8 @@ +python3 plot_user.py \ +--model_type lstmlast_ \ +--batch_size 8 \ +--lr 1e-4 \ +--hidden_size 128 \ +--N 1 \ +--user 5 + diff --git a/keyboard_and_mouse/stan/plot_user_all_individual.py b/keyboard_and_mouse/stan/plot_user_all_individual.py new file mode 100644 index 0000000..e721845 --- /dev/null +++ b/keyboard_and_mouse/stan/plot_user_all_individual.py @@ -0,0 +1,99 @@ +import numpy as np +from numpy import genfromtxt +import matplotlib.pyplot as plt +import argparse + +def main(): + parser = argparse.ArgumentParser(description='') + parser.add_argument('--batch_size', type=int, default=8, help='batch size') + parser.add_argument('--lr', type=float, default=1e-4, help='learning rate') + parser.add_argument('--hidden_size', type=int, default=128, help='hidden_size') + parser.add_argument('--model_type', type=str, default='lstmlast', help='model type') + parser.add_argument('--N', type=int, default=1, help='number of sequence for inference') + parser.add_argument('--user', type=int, default=1, help='number of users') + + args = parser.parse_args() + plot_type = 'bar' # line bar + act_series = 5 + + # read data + plot_list = [] + for act in range(1,act_series+1): + user_data_list = [] + for i in range(args.user): + model_data_list = [] + path = "result/"+"N"+ str(args.N) + "/" + args.model_type + "bs_" + str(args.batch_size) + '_lr_' + str(args.lr) + '_hidden_size_' + str(args.hidden_size) + '_N' + str(args.N) + "_result_user" + str(i) + "_rate__100" + "_act_" + str(act) +".csv" + data = genfromtxt(path, delimiter=',', skip_header =1) + for j in range(7): + data_temp = data[[1+7*j+j,2+7*j+j,3+7*j+j,4+7*j+j,5+7*j+j,6+7*j+j,7+7*j+j],:][:,[2,4,6,7]] + model_data_list.append(data_temp) + model_data_list = np.concatenate(model_data_list, axis=0) + print(model_data_list.shape) + user_data_list.append(model_data_list) + + color = ['royalblue', 'lightgreen', 'tomato', 'indigo', 'plum', 'darkorange', 'blue'] + legend = ['rule 1', 'rule 2', 'rule 3', 'rule 4', 'rule 5', 'rule 6', 'rule 7'] + fig, axs = plt.subplots(7, sharex=True, sharey=True) + fig.set_figheight(14) + fig.set_figwidth(25) + + for ax in range(7): + y_total = [] + y_low_total = [] + y_high_total = [] + for j in range(7): + y= [] + y_low = [] + y_high = [] + for i in range(len(user_data_list)): + y.append(user_data_list[i][j+ax*7][0]) + y_low.append(user_data_list[i][j+ax*7][2]) + y_high.append(user_data_list[i][j+ax*7][3]) + y_total.append(y) + y_low_total.append(y_low) + y_high_total.append(y_high) + print() + print("user mean of mean prob: ", np.mean(y)) + print("user mean of sd prob: ", np.std(y)) + + for i in range(7): + if plot_type == 'line': + axs[ax].plot(range(args.user), y_total[i], color=color[i], label=legend[i]) + axs[ax].fill_between(range(args.user), y_low_total[i], y_high_total[i], color=color[i],alpha=0.3 ) + if plot_type == 'bar': + width = [-0.36, -0.24, -0.12, 0, 0.12, 0.24, 0.36] + yerror = [np.array(y_total[i])-np.array(y_low_total[i]), np.array(y_high_total[i])-np.array(y_total[i])] + axs[ax].bar(np.arange(args.user)+width[i], y_total[i], width=0.08, yerr=[np.array(y_total[i])-np.array(y_low_total[i]), np.array(y_high_total[i])-np.array(y_total[i])], label=legend[i], color=color[i]) + axs[ax].tick_params(axis='x', which='both', length=0) + axs[ax].set_ylabel('prob', fontsize=36) # was 22, + for k,x in enumerate(np.arange(args.user)+width[i]): + y = y_total[i][k] + yerror[1][k] + axs[ax].annotate(f'{y_total[i][k]:.2f}', (x, y), textcoords='offset points', xytext=(-18,3), fontsize=16) #was 16 + + axs[0].text(-0.17, 1.2, 'True Intention:', horizontalalignment='center', verticalalignment='center', transform=axs[0].transAxes, fontsize= 46) # was -0.1 0.9 25 + axs[ax].text(-0.17, 0.5, legend[ax], horizontalalignment='center', verticalalignment='center', transform=axs[ax].transAxes, fontsize= 46, color=color[ax]) # was 25 + axs[ax].tick_params(axis='both', which='major', labelsize=42) # was 18 + for tick in axs[ax].xaxis.get_major_ticks(): + tick.set_pad(20) + + plt.xticks(range(args.user),('1', '2', '3', '4', '5')) + plt.xlabel('user', fontsize= 42) # was 22 + handles, labels = axs[0].get_legend_handles_labels() + + plt.ylim([0, 1]) + plt.tight_layout() + if plot_type == 'line': + plt.savefig("figure/"+"N"+ str(args.N) + "_ "+ args.model_type + "_bs_" + str(args.batch_size) + '_lr_' + str(args.lr) + '_hidden_size_' + str(args.hidden_size) + '_N' + str(args.N) + "_act_series" + str(act) + "_line_all_individual.png", bbox_inches='tight') + if plot_type == 'bar': + plt.savefig("figure/"+"N"+ str(args.N) + "_ "+ args.model_type + "_bs_" + str(args.batch_size) + '_lr_' + str(args.lr) + '_hidden_size_' + str(args.hidden_size) + '_N' + str(args.N) + "_act_series" + str(act) + "_bar_all_individual.png", bbox_inches='tight') + + if plot_type == 'line': + plt.savefig("figure/"+"N"+ str(args.N) + "_ "+ args.model_type + "_bs_" + str(args.batch_size) + '_lr_' + str(args.lr) + '_hidden_size_' + str(args.hidden_size) + '_N' + str(args.N) + "_act_series" + str(act) + "_line_all_individual.eps", bbox_inches='tight', format='eps') + if plot_type == 'bar': + plt.savefig("figure/"+"N"+ str(args.N) + "_ "+ args.model_type + "_bs_" + str(args.batch_size) + '_lr_' + str(args.lr) + '_hidden_size_' + str(args.hidden_size) + '_N' + str(args.N) + "_act_series" + str(act) + "_bar_all_individual.eps", bbox_inches='tight', format='eps') + #plt.show() + +if __name__ == '__main__': + main() + + diff --git a/keyboard_and_mouse/stan/plot_user_all_individual.sh b/keyboard_and_mouse/stan/plot_user_all_individual.sh new file mode 100644 index 0000000..6bdc59b --- /dev/null +++ b/keyboard_and_mouse/stan/plot_user_all_individual.sh @@ -0,0 +1,8 @@ +python3 plot_user_all_individual.py \ +--model_type lstmlast_ \ +--batch_size 8 \ +--lr 1e-4 \ +--hidden_size 128 \ +--N 1 \ +--user 5 + diff --git a/keyboard_and_mouse/stan/plot_user_all_individual_chiw.py b/keyboard_and_mouse/stan/plot_user_all_individual_chiw.py new file mode 100644 index 0000000..3571192 --- /dev/null +++ b/keyboard_and_mouse/stan/plot_user_all_individual_chiw.py @@ -0,0 +1,93 @@ +import numpy as np +from numpy import genfromtxt +import matplotlib.pyplot as plt + +model_type = "lstmlast_" +batch_size = 8 +lr = 1e-4 +hidden_size = 128 +N = 1 +user = 5 +plot_type = 'bar' # line bar +act_series = 5 + +# read data +plot_list = [] +for act in range(1,act_series+1): + user_data_list = [] + for i in range(user): + model_data_list = [] + path = "result/"+"N"+ str(N) + "/" + model_type + "bs_" + str(batch_size) + '_lr_' + str(lr) + '_hidden_size_' + str(hidden_size) + '_N' + str(N) + "_result_user" + str(i) + "_rate__100" + "_act_" + str(act) +".csv" + data = genfromtxt(path, delimiter=',', skip_header =1) + for j in range(7): + data_temp = data[[1+7*j+j,2+7*j+j,3+7*j+j,4+7*j+j,5+7*j+j,6+7*j+j,7+7*j+j],:][:,[2,4,6,7]] + model_data_list.append(data_temp) + model_data_list = np.concatenate(model_data_list, axis=0) + print(model_data_list.shape) + user_data_list.append(model_data_list) + + color = ['royalblue', 'lightgreen', 'tomato', 'indigo', 'plum', 'darkorange', 'blue'] + legend = ['rule 1', 'rule 2', 'rule 3', 'rule 4', 'rule 5', 'rule 6', 'rule 7'] + fig, axs = plt.subplots(7, sharex=True, sharey=True) + fig.set_figheight(14) + fig.set_figwidth(25) + + for ax in range(7): + y_total = [] + y_low_total = [] + y_high_total = [] + for j in range(7): + y= [] + y_low = [] + y_high = [] + for i in range(len(user_data_list)): + y.append(user_data_list[i][j+ax*7][0]) + y_low.append(user_data_list[i][j+ax*7][2]) + y_high.append(user_data_list[i][j+ax*7][3]) + y_total.append(y) + y_low_total.append(y_low) + y_high_total.append(y_high) + print() + print(legend[ax]) + print("user mean of mean prob: ", np.mean(y)) + print("user mean of sd prob: ", np.std(y)) + + for i in range(7): + if plot_type == 'line': + axs[ax].plot(range(user), y_total[i], color=color[i], label=legend[i]) + axs[ax].fill_between(range(user), y_low_total[i], y_high_total[i], color=color[i],alpha=0.3 ) + if plot_type == 'bar': + width = [-0.36, -0.24, -0.12, 0, 0.12, 0.24, 0.36] + yerror = [np.array(y_total[i])-np.array(y_low_total[i]), np.array(y_high_total[i])-np.array(y_total[i])] + axs[ax].bar(np.arange(user)+width[i], y_total[i], width=0.08, yerr=[np.array(y_total[i])-np.array(y_low_total[i]), np.array(y_high_total[i])-np.array(y_total[i])], label=legend[i], color=color[i]) + axs[ax].tick_params(axis='x', which='both', length=0) + axs[ax].set_ylabel('prob', fontsize=26) # was 22, + axs[ax].set_title(legend[ax], color=color[ax], fontsize=26) + for k,x in enumerate(np.arange(user)+width[i]): + y = y_total[i][k] + yerror[1][k] + axs[ax].annotate(f'{y_total[i][k]:.2f}', (x, y), textcoords='offset points', xytext=(-18,3), fontsize=16) #was 16 + + #axs[0].text(-0.17, 1.2, 'True Intention:', horizontalalignment='center', verticalalignment='center', transform=axs[0].transAxes, fontsize= 46) # was -0.1 0.9 25 + #axs[ax].text(-0.17, 0.5, legend[ax], horizontalalignment='center', verticalalignment='center', transform=axs[ax].transAxes, fontsize= 46, color=color[ax]) # was 25 + axs[ax].tick_params(axis='both', which='major', labelsize=18) # was 18 + for tick in axs[ax].xaxis.get_major_ticks(): + tick.set_pad(20) + + plt.xticks(range(user),('1', '2', '3', '4', '5')) + plt.xlabel('user', fontsize= 26) # was 22 + handles, labels = axs[0].get_legend_handles_labels() + + plt.ylim([0, 1.2]) + plt.tight_layout() + if plot_type == 'line': + plt.savefig("figure/"+"N"+ str(N) + "_ "+ model_type + "_bs_" + str(batch_size) + '_lr_' + str(lr) + '_hidden_size_' + str(hidden_size) + '_N' + str(N) + "_act_series" + str(act) + "_line_all_individual_chiw.png", bbox_inches='tight') + if plot_type == 'bar': + plt.savefig("figure/"+"N"+ str(N) + "_ "+ model_type + "_bs_" + str(batch_size) + '_lr_' + str(lr) + '_hidden_size_' + str(hidden_size) + '_N' + str(N) + "_act_series" + str(act) + "_bar_all_individual_chiw.png", bbox_inches='tight') + #plt.show() + if plot_type == 'line': + plt.savefig("figure/"+"N"+ str(N) + "_ "+ model_type + "_bs_" + str(batch_size) + '_lr_' + str(lr) + '_hidden_size_' + str(hidden_size) + '_N' + str(N) + "_act_series" + str(act) + "_line_all_individual_chiw.eps", bbox_inches='tight', format='eps') + if plot_type == 'bar': + plt.savefig("figure/"+"N"+ str(N) + "_ "+ model_type + "_bs_" + str(batch_size) + '_lr_' + str(lr) + '_hidden_size_' + str(hidden_size) + '_N' + str(N) + "_act_series" + str(act) + "_bar_all_individual_chiw.eps", bbox_inches='tight', format='eps') + + + diff --git a/keyboard_and_mouse/stan/plot_user_length_10_steps.py b/keyboard_and_mouse/stan/plot_user_length_10_steps.py new file mode 100644 index 0000000..75b9e0c --- /dev/null +++ b/keyboard_and_mouse/stan/plot_user_length_10_steps.py @@ -0,0 +1,88 @@ +import numpy as np +from numpy import genfromtxt +import matplotlib.pyplot as plt +import argparse + +def main(): + parser = argparse.ArgumentParser(description='') + parser.add_argument('--batch_size', type=int, default=8, help='batch size') + parser.add_argument('--lr', type=float, default=1e-4, help='learning rate') + parser.add_argument('--hidden_size', type=int, default=128, help='hidden_size') + parser.add_argument('--model_type', type=str, default='lstmlast', help='model type') + parser.add_argument('--N', type=int, default=1, help='number of sequence for inference') + parser.add_argument('--user', type=int, default=1, help='number of users') + + args = parser.parse_args() + width = [-0.3, -0.2, -0.1, 0, 0.1, 0.2, 0.3] + + rate_user_data_list = [] + for r in range(0,101,10): # rate = range(0,101,10) + # read data + user_data_list = [] + for i in range(args.user): + model_data_list = [] + path = "result/"+"N"+ str(args.N) + "/" + args.model_type + "bs_" + str(args.batch_size) + '_lr_' + str(args.lr) + '_hidden_size_' + str(args.hidden_size) + '_N' + str(args.N) + "_result_user" + str(i) + "_rate__" + str(r) +".csv" + data = genfromtxt(path, delimiter=',', skip_header =1) + for j in range(7): + data_temp = data[[1+7*j+j,2+7*j+j,3+7*j+j,4+7*j+j,5+7*j+j,6+7*j+j,7+7*j+j],:][:,[2,4,6,7]] + model_data_list.append(data_temp) + model_data_list = np.concatenate(model_data_list, axis=0) + if i == 4: + print(model_data_list.shape, model_data_list) + user_data_list.append(model_data_list) + model_data_list_total = np.stack(user_data_list) + print(model_data_list_total.shape) + mean_user_data = np.mean(model_data_list_total,axis=0) + print(mean_user_data.shape) + rate_user_data_list.append(mean_user_data) + + + color = ['royalblue', 'lightgreen', 'tomato', 'indigo', 'plum', 'darkorange', 'blue'] + legend = ['rule 1', 'rule 2', 'rule 3', 'rule 4', 'rule 5', 'rule 6', 'rule 7'] + fig, axs = plt.subplots(7, sharex=True, sharey=True) + fig.set_figheight(10) # all sample rate: 10; 3 row: 8 + fig.set_figwidth(20) + + for ax in range(7): + y_total = [] + y_low_total = [] + y_high_total = [] + for j in range(7): + y= [] + y_low = [] + y_high = [] + for i in range(len(rate_user_data_list)): + y.append(rate_user_data_list[i][j+ax*7][0]) + y_low.append(rate_user_data_list[i][j+ax*7][2]) + y_high.append(rate_user_data_list[i][j+ax*7][3]) + y_total.append(y) + y_low_total.append(y_low) + y_high_total.append(y_high) + print() + print("user mean of mean prob: ", np.mean(y)) + print("user mean of sd prob: ", np.std(y)) + + for i in range(7): + axs[ax].plot(range(0,101,10), y_total[i], color=color[i], label=legend[i]) + axs[ax].fill_between(range(0,101,10), y_low_total[i], y_high_total[i], color=color[i],alpha=0.3 ) + axs[ax].set_xticks(range(0,101,10)) + axs[ax].set_ylabel('prob', fontsize=20) + + + axs[0].text(-0.125, 0.9, 'True Intention:', horizontalalignment='center', verticalalignment='center', transform=axs[0].transAxes, fontsize= 20) + axs[ax].text(-0.125, 0.5, legend[ax], horizontalalignment='center', verticalalignment='center', transform=axs[ax].transAxes, fontsize= 20, color=color[ax]) + axs[ax].tick_params(axis='both', which='major', labelsize=16) + + + plt.xlabel('Percentage of occurred actions in one action sequence', fontsize= 20) + handles, labels = axs[0].get_legend_handles_labels() + + plt.xlim([0, 101]) + plt.ylim([0, 1]) + + plt.savefig("figure/"+"N"+ str(args.N) + "_ "+ args.model_type + "_bs_" + str(args.batch_size) + '_lr_' + str(args.lr) + '_hidden_size_' + str(args.hidden_size) + '_N' + str(args.N) + "_rate_full.png", bbox_inches='tight') + + plt.show() + +if __name__ == '__main__': + main() diff --git a/keyboard_and_mouse/stan/plot_user_length_10_steps.sh b/keyboard_and_mouse/stan/plot_user_length_10_steps.sh new file mode 100644 index 0000000..06675aa --- /dev/null +++ b/keyboard_and_mouse/stan/plot_user_length_10_steps.sh @@ -0,0 +1,8 @@ +python3 plot_user_length_10_steps.py \ +--model_type lstmlast_ \ +--batch_size 8 \ +--lr 1e-4 \ +--hidden_size 128 \ +--N 1 \ +--user 5 + diff --git a/keyboard_and_mouse/stan/plot_user_length_10_steps_all_individual.py b/keyboard_and_mouse/stan/plot_user_length_10_steps_all_individual.py new file mode 100644 index 0000000..7f30eab --- /dev/null +++ b/keyboard_and_mouse/stan/plot_user_length_10_steps_all_individual.py @@ -0,0 +1,90 @@ +import numpy as np +from numpy import genfromtxt +import matplotlib.pyplot as plt +import argparse + +def main(): + parser = argparse.ArgumentParser(description='') + parser.add_argument('--batch_size', type=int, default=8, help='batch size') + parser.add_argument('--lr', type=float, default=1e-4, help='learning rate') + parser.add_argument('--hidden_size', type=int, default=128, help='hidden_size') + parser.add_argument('--model_type', type=str, default='lstmlast', help='model type') + parser.add_argument('--N', type=int, default=1, help='number of sequence for inference') + parser.add_argument('--user', type=int, default=1, help='number of users') + + args = parser.parse_args() + width = [-0.3, -0.2, -0.1, 0, 0.1, 0.2, 0.3] + act_series = 5 + + for act in range(1,act_series+1): + rate_user_data_list = [] + for r in range(0,101,10): + # read data + user_data_list = [] + for i in range(args.user): + model_data_list = [] + path = "result/"+"N"+ str(args.N) + "/" + args.model_type + "bs_" + str(args.batch_size) + '_lr_' + str(args.lr) + '_hidden_size_' + str(args.hidden_size) + '_N' + str(args.N) + "_result_user" + str(i) + "_rate__" + str(r) + "_act_" + str(act) +".csv" + data = genfromtxt(path, delimiter=',', skip_header =1) + for j in range(7): + data_temp = data[[1+7*j+j,2+7*j+j,3+7*j+j,4+7*j+j,5+7*j+j,6+7*j+j,7+7*j+j],:][:,[2,4,6,7]] + model_data_list.append(data_temp) + model_data_list = np.concatenate(model_data_list, axis=0) + user_data_list.append(model_data_list) + model_data_list_total = np.stack(user_data_list) + mean_user_data = np.mean(model_data_list_total,axis=0) + rate_user_data_list.append(mean_user_data) + + color = ['royalblue', 'lightgreen', 'tomato', 'indigo', 'plum', 'darkorange', 'blue'] + legend = ['rule 1', 'rule 2', 'rule 3', 'rule 4', 'rule 5', 'rule 6', 'rule 7'] + fig, axs = plt.subplots(7, sharex=True, sharey=True) + fig.set_figheight(14) # was 10 + fig.set_figwidth(20) + + for ax in range(7): + y_total = [] + y_low_total = [] + y_high_total = [] + for j in range(7): + y= [] + y_low = [] + y_high = [] + for i in range(len(rate_user_data_list)): + y.append(rate_user_data_list[i][j+ax*7][0]) + y_low.append(rate_user_data_list[i][j+ax*7][2]) + y_high.append(rate_user_data_list[i][j+ax*7][3]) + y_total.append(y) + y_low_total.append(y_low) + y_high_total.append(y_high) + print() + print("user mean of mean prob: ", np.mean(y)) + print("user mean of sd prob: ", np.std(y)) + + for i in range(7): + axs[ax].plot(range(0,101,10), y_total[i], color=color[i], label=legend[i]) + axs[ax].fill_between(range(0,101,10), y_low_total[i], y_high_total[i], color=color[i],alpha=0.3 ) + axs[ax].set_xticks(range(0,101,10)) + axs[ax].set_ylabel('prob', fontsize=26) # was 20 + + axs[0].text(-0.15, 1.2, 'True Intention:', horizontalalignment='center', verticalalignment='center', transform=axs[0].transAxes, fontsize= 36) # was -0.125 20 + axs[ax].text(-0.15, 0.5, legend[ax], horizontalalignment='center', verticalalignment='center', transform=axs[ax].transAxes, fontsize= 36, color=color[ax]) # -0.125 20 + axs[ax].tick_params(axis='y', which='major', labelsize=24) # was 16 + axs[ax].tick_params(axis='x', which='major', labelsize=24) # was 16 + for tick in axs[ax].xaxis.get_major_ticks(): + tick.set_pad(20) + + plt.xlabel('Percentage of occurred actions in one action sequence', fontsize= 36) # was 20 + handles, labels = axs[0].get_legend_handles_labels() + + plt.xlim([0, 101]) + plt.ylim([0, 1]) + + plt.savefig("figure/"+"N"+ str(args.N) + "_ "+ args.model_type + "_bs_" + str(args.batch_size) + '_lr_' + str(args.lr) + '_hidden_size_' + str(args.hidden_size) + '_N' + str(args.N) + "_act_series" + str(act) + "_rate_ful_all_individuall.png", bbox_inches='tight') + + #plt.show() + +if __name__ == '__main__': + main() + + + + diff --git a/keyboard_and_mouse/stan/plot_user_length_10_steps_all_individual.sh b/keyboard_and_mouse/stan/plot_user_length_10_steps_all_individual.sh new file mode 100644 index 0000000..b336057 --- /dev/null +++ b/keyboard_and_mouse/stan/plot_user_length_10_steps_all_individual.sh @@ -0,0 +1,8 @@ +python3 plot_user_length_10_steps_all_individual.py \ +--model_type lstmlast_ \ +--batch_size 8 \ +--lr 1e-4 \ +--hidden_size 128 \ +--N 1 \ +--user 5 + diff --git a/keyboard_and_mouse/stan/plot_user_length_10_steps_all_individual_chiw.py b/keyboard_and_mouse/stan/plot_user_length_10_steps_all_individual_chiw.py new file mode 100644 index 0000000..fc297d3 --- /dev/null +++ b/keyboard_and_mouse/stan/plot_user_length_10_steps_all_individual_chiw.py @@ -0,0 +1,90 @@ +import numpy as np +from numpy import genfromtxt +import matplotlib.pyplot as plt + +model_type = "lstmlast_" +batch_size = 8 +lr = 1e-4 +hidden_size = 128 +N = 1 +user = 5 +width = [-0.3, -0.2, -0.1, 0, 0.1, 0.2, 0.3] +act_series = 5 + +for act in range(1,act_series+1): + rate_user_data_list = [] + for r in range(0,101,10): + # read data + print(r) + user_data_list = [] + for i in range(user): + model_data_list = [] + path = "result/"+"N"+ str(N) + "/" + model_type + "bs_" + str(batch_size) + '_lr_' + str(lr) + '_hidden_size_' + str(hidden_size) + '_N' + str(N) + "_result_user" + str(i) + "_rate__" + str(r) + "_act_" + str(act) +".csv" + data = genfromtxt(path, delimiter=',', skip_header =1) + for j in range(7): + data_temp = data[[1+7*j+j,2+7*j+j,3+7*j+j,4+7*j+j,5+7*j+j,6+7*j+j,7+7*j+j],:][:,[2,4,6,7]] + model_data_list.append(data_temp) + model_data_list = np.concatenate(model_data_list, axis=0) + user_data_list.append(model_data_list) + model_data_list_total = np.stack(user_data_list) + print(model_data_list_total.shape) + mean_user_data = np.mean(model_data_list_total,axis=0) + print(mean_user_data.shape) + rate_user_data_list.append(mean_user_data) + + color = ['royalblue', 'lightgreen', 'tomato', 'indigo', 'plum', 'darkorange', 'blue'] + legend = ['rule 1', 'rule 2', 'rule 3', 'rule 4', 'rule 5', 'rule 6', 'rule 7'] + fig, axs = plt.subplots(7, sharex=True, sharey=True) + fig.set_figheight(14) # was 10 + fig.set_figwidth(20) + + for ax in range(7): + y_total = [] + y_low_total = [] + y_high_total = [] + for j in range(7): + y= [] + y_low = [] + y_high = [] + for i in range(len(rate_user_data_list)): + y.append(rate_user_data_list[i][j+ax*7][0]) + y_low.append(rate_user_data_list[i][j+ax*7][2]) + y_high.append(rate_user_data_list[i][j+ax*7][3]) + y_total.append(y) + y_low_total.append(y_low) + y_high_total.append(y_high) + print() + print(legend[ax]) + print("user mean of mean prob: ", np.mean(y)) + print("user mean of sd prob: ", np.std(y)) + + for i in range(7): + axs[ax].plot(range(0,101,10), y_total[i], color=color[i], label=legend[i]) + axs[ax].fill_between(range(0,101,10), y_low_total[i], y_high_total[i], color=color[i],alpha=0.3 ) + axs[ax].set_xticks(range(0,101,10)) + axs[ax].set_ylabel('prob', fontsize=26) # was 20 + axs[ax].set_title(legend[ax], color=color[ax], fontsize=26) + + + #axs[0].text(-0.15, 1.2, 'True Intention:', horizontalalignment='center', verticalalignment='center', transform=axs[0].transAxes, fontsize= 36) # was -0.125 20 + #axs[ax].text(-0.15, 0.5, legend[ax], horizontalalignment='center', verticalalignment='center', transform=axs[ax].transAxes, fontsize= 36, color=color[ax]) # -0.125 20 + axs[ax].tick_params(axis='y', which='major', labelsize=18) # was 16 + axs[ax].tick_params(axis='x', which='major', labelsize=18) # was 16 + for tick in axs[ax].xaxis.get_major_ticks(): + tick.set_pad(20) + + plt.xlabel('Percentage of occurred actions in one action sequence', fontsize= 26) # was 20 + handles, labels = axs[0].get_legend_handles_labels() + + plt.xlim([0, 101]) + plt.ylim([0, 1.1]) + plt.tight_layout() + plt.savefig("figure/"+"N"+ str(N) + "_ "+ model_type + "_bs_" + str(batch_size) + '_lr_' + str(lr) + '_hidden_size_' + str(hidden_size) + '_N' + str(N) + "_act_series" + str(act) + "_rate_ful_all_individuall_chiw.png", bbox_inches='tight') + + #plt.show() + + + + + + diff --git a/keyboard_and_mouse/stan/strategy_inference_model b/keyboard_and_mouse/stan/strategy_inference_model new file mode 100755 index 0000000..6b7f2e6 Binary files /dev/null and b/keyboard_and_mouse/stan/strategy_inference_model differ diff --git a/keyboard_and_mouse/stan/strategy_inference_model.stan b/keyboard_and_mouse/stan/strategy_inference_model.stan new file mode 100755 index 0000000..1d337e0 --- /dev/null +++ b/keyboard_and_mouse/stan/strategy_inference_model.stan @@ -0,0 +1,26 @@ +data { + int I; // number of question options (22) + int N; // number of questions being asked by the user + int K; // number of strategies + // observed "true" questions of the user + int q[N]; + // array of predicted probabilities of questions given strategies + // coming from the forward neural network + matrix[I, K] P_q_S[N]; +} +parameters { + // probabiliy vector of the strategies being applied by the user + // to be inferred by the model here + simplex[K] P_S; +} +model { + for (n in 1:N) { + // marginal probability vector of the questions being asked + vector[I] theta = P_q_S[n] * P_S; + // categorical likelihood + target += categorical_lpmf(q[n] | theta); + } + // priors + target += dirichlet_lpdf(P_S | rep_vector(1.0, K)); +} + diff --git a/keyboard_and_mouse/stan/strategy_inference_test.R b/keyboard_and_mouse/stan/strategy_inference_test.R new file mode 100644 index 0000000..aee6d99 --- /dev/null +++ b/keyboard_and_mouse/stan/strategy_inference_test.R @@ -0,0 +1,157 @@ +library(tidyverse) +library(cmdstanr) +library(dplyr) + + +model_type <- "lstmlast" +batch_size <- "8" +lr <- "0.0001" +hidden_size <- "128" +model_type <- paste0(model_type, "_bs_", batch_size, "_lr_", lr, "_hidden_size_", hidden_size) +print(model_type) +set.seed(9736734) + +user_num <- 5 +user <-c(0:(user_num-1)) +strategies <- c(0:6) # 7 tasks +print(strategies) +print(length(strategies)) +N <- 1 + +# read data from csv +sel <- vector("list", length(strategies)) +for (u in seq_along(user)){ + dat <- vector("list", length(strategies)) + print(paste0('user: ', u)) + for (i in seq_along(strategies)) { + dat[[i]] <- read.csv(paste0("../prediction/task", strategies[[i]], "/", model_type, "/user", user[[u]], "_pred", ".csv")) + dat[[i]]$assumed_strategy <- strategies[[i]] + dat[[i]]$index <- dat[[i]]$action_id # sample based on intention + dat[[i]]$id <- dat[[i]][,1] # sample based on intention + } + + # reset N after inference + N = 1 + + # select one action series from one intention + if (user[[u]] == 0){ + sel[[1]]<-dat[[1]] %>% + group_by(task_name) %>% + sample_n(N) + sel[[1]] <- data.frame(sel[[1]]) + } + + # filter data from the selected action series, N series per intention + for (i in seq_along(strategies)) { + dat[[i]]<-subset(dat[[i]], dat[[i]]$action_id == sel[[1]]$action_id[1]) + } + row.names(dat) <- NULL + + # create save path + dir.create(file.path("result"), showWarnings = FALSE) + dir.create(file.path(paste0("result/", "N", N)), showWarnings = FALSE) + save_path <- paste0("result/", "N", N, "/", model_type, "_N", N, "_", "result","_user", user[[u]], ".csv") + + dat <- do.call(rbind, dat) %>% + mutate(index = as.numeric(as.factor(id))) %>% + rename(true_strategy = task_name) %>% + mutate( + true_strategy = factor( + true_strategy, levels = 0:6, + labels = strategies + ), + q_type = case_when( + gt %in% c(3,4,5) ~ 0, + gt %in% c(1,2,3,4,5,6,7) ~ 1, + gt %in% c(1,2,3,4) ~ 2, + gt %in% c(1,4,5,6,7) ~ 3, + gt %in% c(1,2,3,6,7) ~ 4, + gt %in% c(2,3,4,5,6,7) ~ 5, + gt %in% c(1,2,3,4,5,6,7) ~ 6, + ) + ) + + dat_obs <- dat %>% filter(assumed_strategy == strategies[[i]]) + N <- nrow(dat_obs) + print(c("N: ", N)) + q <- dat_obs$gt + true_strategy <- dat_obs$true_strategy + + K <- length(unique(dat$assumed_strategy)) + print(c("K: ", K)) + I <- 7 + + P_q_S <- array(dim = c(N, I, K)) + for (n in 1:N) { + #print(n) + P_q_S[n, , ] <- dat %>% + filter(index == n) %>% + select(matches("^act[[:digit:]]+$")) %>% + as.matrix() %>% + t() + for (k in 1:K) { + # normalize probabilities + P_q_S[n, , k] <- P_q_S[n, , k] / sum(P_q_S[n, , k]) + } + } + print(c('dim P_q_S',dim(P_q_S))) + + mod <- cmdstan_model("strategy_inference_model.stan") + + sub <- which(true_strategy == 0) # "0" + print(c('sub', sub)) + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = P_q_S[sub, , ]) + fit_0 <- mod$sample(data = sdata, refresh=0, output_dir=paste0(getwd(),"/temp")) + print(fit_0$summary(NULL, c("mean","sd"))) + + sub <- which(true_strategy == 1) + print(c('sub', sub)) + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = P_q_S[sub, , ]) + fit_1 <- mod$sample(data = sdata, refresh=0, output_dir=paste0(getwd(),"/temp")) + print(fit_1$summary(NULL, c("mean","sd"))) + + sub <- which(true_strategy == 2) + print(c('sub', sub)) + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = P_q_S[sub, , ]) + fit_2 <- mod$sample(data = sdata, refresh=0, output_dir=paste0(getwd(),"/temp")) + print(fit_2$summary(NULL, c("mean","sd"))) + + sub <- which(true_strategy == 3) + print(c('sub', sub)) + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = P_q_S[sub, , ]) + fit_3 <- mod$sample(data = sdata, refresh=0, output_dir=paste0(getwd(),"/temp")) + print(fit_3$summary(NULL, c("mean","sd"))) + + sub <- which(true_strategy == 4) + print(c('sub', sub)) + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = P_q_S[sub, , ]) + fit_4 <- mod$sample(data = sdata, refresh=0, output_dir=paste0(getwd(),"/temp")) + print(fit_4$summary(NULL, c("mean","sd"))) + + sub <- which(true_strategy == 5) + print(c('sub', sub)) + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = P_q_S[sub, , ]) + fit_5 <- mod$sample(data = sdata, refresh=0, output_dir=paste0(getwd(),"/temp")) + print(fit_5$summary(NULL, c("mean","sd"))) + + sub <- which(true_strategy == 6) + print(c('sub', sub)) + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = P_q_S[sub, , ]) + fit_6 <- mod$sample(data = sdata, refresh=0, output_dir=paste0(getwd(),"/temp")) + print(fit_6$summary(NULL, c("mean","sd"))) + + # save csv + df <-rbind(fit_0$summary(), fit_1$summary(), fit_2$summary(), fit_3$summary(), fit_4$summary(), fit_5$summary(), fit_6$summary()) + write.csv(df,file=save_path,quote=FALSE) +} + + + + + + + + + + + diff --git a/keyboard_and_mouse/stan/strategy_inference_test_all_individual_act.R b/keyboard_and_mouse/stan/strategy_inference_test_all_individual_act.R new file mode 100644 index 0000000..0aae2e1 --- /dev/null +++ b/keyboard_and_mouse/stan/strategy_inference_test_all_individual_act.R @@ -0,0 +1,239 @@ +library(tidyverse) +library(cmdstanr) +library(dplyr) + +# using every action sequence from each user +model_type <- "lstmlast" +batch_size <- "8" +lr <- "0.0001" +hidden_size <- "128" +model_type <- paste0(model_type, "_bs_", batch_size, "_lr_", lr, "_hidden_size_", hidden_size) +rates <- c("_0", "_10", "_20", "_30", "_40", "_50", "_60", "_70", "_80", "_90", "_100") + +user_num <- 5 +user <-c(0:(user_num-1)) +strategies <- c(0:6) # 7 tasks +print('strategies') +print(strategies) +print('strategies length') +print(length(strategies)) +N <- 1 +unique_act_id <- c(1:5) +print('unique_act_id') +print(unique_act_id) +set.seed(9746234) + +for (act_id in seq_along(unique_act_id)){ + for (u in seq_along(user)){ + print('user') + print(u) + for (rate in rates) { + N <- 1 + dat <- vector("list", length(strategies)) + for (i in seq_along(strategies)) { + if (rate=="_0"){ + # read data from csv + dat[[i]] <- read.csv(paste0("../prediction/single_act/task", strategies[[i]], "/", model_type, "/user", user[[u]], "/rate_10", "_act_", unique_act_id[act_id], "_pred", ".csv")) + } else{ + dat[[i]] <- read.csv(paste0("../prediction/single_act/task", strategies[[i]], "/", model_type, "/user", user[[u]], "/rate", rate, "_act_", unique_act_id[act_id], "_pred", ".csv")) + } + # strategy assumed for prediction + dat[[i]]$assumed_strategy <- strategies[[i]] + dat[[i]]$index <- dat[[i]]$action_id # sample based on intention + dat[[i]]$id <- dat[[i]][,1] # sample based on intention + } + + save_path <- paste0("result/", "N", N, "/", model_type, "_N", N, "_", "result","_user", user[[u]], "_rate_", rate, "_act_", unique_act_id[act_id], ".csv") + + dat_act <- do.call(rbind, dat) %>% + mutate(index = as.numeric(as.factor(id))) %>% + rename(true_strategy = task_name) %>% + mutate( + true_strategy = factor( + true_strategy, levels = 0:6, + labels = strategies + ), + q_type = case_when( + gt %in% c(3,4,5) ~ 0, + gt %in% c(1,2,3,4,5,6,7) ~ 1, + gt %in% c(1,2,3,4) ~ 2, + gt %in% c(1,4,5,6,7) ~ 3, + gt %in% c(1,2,3,6,7) ~ 4, + gt %in% c(2,3,4,5,6,7) ~ 5, + gt %in% c(1,2,3,4,5,6,7) ~ 6, + ) + ) + + dat_obs <- dat_act %>% filter(assumed_strategy == strategies[[i]]) + N <- nrow(dat_obs) + print(c("N: ", N)) + print(c("dim dat_act: ", dim(dat_act))) + q <- dat_obs$gt + true_strategy <- dat_obs$true_strategy + + K <- length(unique(dat_act$assumed_strategy)) + I <- 7 + + P_q_S <- array(dim = c(N, I, K)) + for (n in 1:N) { + print(n) + P_q_S[n, , ] <- dat_act %>% + filter(index == n) %>% + select(matches("^act[[:digit:]]+$")) %>% + as.matrix() %>% + t() + for (k in 1:K) { + # normalize probabilities + P_q_S[n, , k] <- P_q_S[n, , k] / sum(P_q_S[n, , k]) + } + } + + print(c("dim(P_q_S)", dim(P_q_S))) + # read stan model + mod <- cmdstan_model(paste0(getwd(),"/strategy_inference_model.stan")) + + if (rate=="_0"){ + sub <- integer(0) + } else { + sub <- which(true_strategy == 0) # "0" + } + #print(sub) + #print(length(sub)) + if (length(sub) == 1){ + temp <- P_q_S[sub, , ] + dim(temp) <- c(1, dim(temp)) + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = temp) + } else{ + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = P_q_S[sub, , ]) + } + fit_0 <- mod$sample(data = sdata, refresh=0, output_dir=paste0(getwd(),'/temp')) + print(fit_0$summary(NULL, c("mean","sd"))) + + + if (rate=="_0"){ + sub <- integer(0) + } else { + sub <- which(true_strategy == 1) + } + #print(sub) + #print(length(sub)) + if (length(sub) == 1){ + temp <- P_q_S[sub, , ] + dim(temp) <- c(1, dim(temp)) + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = temp) + } else{ + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = P_q_S[sub, , ]) + } + fit_1 <- mod$sample(data = sdata, refresh=0, output_dir=paste0(getwd(),'/temp')) + print(fit_1$summary(NULL, c("mean","sd"))) + + + if (rate=="_0"){ + sub <- integer(0) + } else { + sub <- which(true_strategy == 2) + } + #print(sub) + #print(length(sub)) + if (length(sub) == 1){ + temp <- P_q_S[sub, , ] + dim(temp) <- c(1, dim(temp)) + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = temp) + } else{ + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = P_q_S[sub, , ]) + } + fit_2 <- mod$sample(data = sdata, refresh=0, output_dir=paste0(getwd(),'/temp')) + print(fit_2$summary(NULL, c("mean","sd"))) + + + if (rate=="_0"){ + sub <- integer(0) + } else { + sub <- which(true_strategy == 3) + } + #print(sub) + #print(length(sub)) + if (length(sub) == 1){ + temp <- P_q_S[sub, , ] + dim(temp) <- c(1, dim(temp)) + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = temp) + } else{ + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = P_q_S[sub, , ]) + } + fit_3 <- mod$sample(data = sdata, refresh=0, output_dir=paste0(getwd(),'/temp')) + print(fit_3$summary(NULL, c("mean","sd"))) + + + if (rate=="_0"){ + sub <- integer(0) + } else { + sub <- which(true_strategy == 4) + } + #print(sub) + #print(length(sub)) + if (length(sub) == 1){ + temp <- P_q_S[sub, , ] + dim(temp) <- c(1, dim(temp)) + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = temp) + } else{ + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = P_q_S[sub, , ]) + } + fit_4 <- mod$sample(data = sdata, refresh=0, output_dir=paste0(getwd(),'/temp')) + print(fit_4$summary(NULL, c("mean","sd"))) + + + if (rate=="_0"){ + sub <- integer(0) + } else { + sub <- which(true_strategy == 5) + } + #print(sub) + #print(length(sub)) + if (length(sub) == 1){ + temp <- P_q_S[sub, , ] + dim(temp) <- c(1, dim(temp)) + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = temp) + } else{ + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = P_q_S[sub, , ]) + } + fit_5 <- mod$sample(data = sdata, refresh=0, output_dir=paste0(getwd(),'/temp')) + print(fit_5$summary(NULL, c("mean","sd"))) + + + if (rate=="_0"){ + sub <- integer(0) + } else { + sub <- which(true_strategy == 6) + } + #print(sub) + #print(length(sub)) + if (length(sub) == 1){ + temp <- P_q_S[sub, , ] + dim(temp) <- c(1, dim(temp)) + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = temp) + } else{ + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = P_q_S[sub, , ]) + } + fit_6 <- mod$sample(data = sdata, refresh=0, output_dir=paste0(getwd(),'/temp')) + print(fit_6$summary(NULL, c("mean","sd"))) + + # save csv + df <-rbind(fit_0$summary(), fit_1$summary(), fit_2$summary(), fit_3$summary(), fit_4$summary(), fit_5$summary(), fit_6$summary()) + write.csv(df,file=save_path,quote=FALSE) + + } + } + + +} + + + + + + + + + + + diff --git a/keyboard_and_mouse/stan/strategy_inference_test_full_length.R b/keyboard_and_mouse/stan/strategy_inference_test_full_length.R new file mode 100644 index 0000000..a118f64 --- /dev/null +++ b/keyboard_and_mouse/stan/strategy_inference_test_full_length.R @@ -0,0 +1,238 @@ +library(tidyverse) +library(cmdstanr) +library(dplyr) + +# index order of the strategies assumed throughout +model_type <- "lstmlast" +batch_size <- "8" +lr <- "0.0001" +hidden_size <- "128" +model_type <- paste0(model_type, "_bs_", batch_size, "_lr_", lr, "_hidden_size_", hidden_size) +rates <- c("_0", "_10", "_20", "_30", "_40", "_50", "_60", "_70", "_80", "_90", "_100") + +user_num <- 5 +user <-c(0:(user_num-1)) +strategies <- c(0:6) # 7 tasks +print(strategies) +print(length(strategies)) +N <- 1 + +set.seed(9736754) + +#read data from csv +sel <- vector("list", length(strategies)) +for (u in seq_along(user)){ + print('user') + print(u) + for (rate in rates) { + dat <- vector("list", length(strategies)) + for (i in seq_along(strategies)) { + if (rate=="_0"){ + dat[[i]] <- read.csv(paste0("../prediction/task", strategies[[i]], "/", model_type, "/user", user[[u]], "_rate_10", "_pred", ".csv")) + } else if (rate=="_100"){ + dat[[i]] <- read.csv(paste0("../prediction/task", strategies[[i]], "/", model_type, "/user", user[[u]], "_pred", ".csv")) + } else{ + dat[[i]] <- read.csv(paste0("../prediction/task", strategies[[i]], "/", model_type, "/user", user[[u]], "_rate", rate, "_pred", ".csv")) + } + # strategy assumed for prediction + dat[[i]]$assumed_strategy <- strategies[[i]] + dat[[i]]$index <- dat[[i]]$action_id + dat[[i]]$id <- dat[[i]][,1] + } + + # reset N after inference + N <- 1 + + # select all action series and infer every one + if (rate == "_0"){ + sel[[1]]<-dat[[1]] %>% + group_by(task_name) %>% + sample_n(N) + sel[[1]] <- data.frame(sel[[1]]) + unique_act_id <- unique(sel[[1]]$action_id) + } + print(sel[[1]]$action_id) + print(sel[[1]]$task_name) + print(dat[[1]]$task_name) + + + for (i in seq_along(strategies)) { + dat[[i]]<-subset(dat[[i]], dat[[i]]$action_id == sel[[1]]$action_id[1]) + } + row.names(dat) <- NULL + print(c('action id', dat[[1]]$action_id)) + print(c('action id', dat[[2]]$action_id)) + print(c('action id', dat[[3]]$action_id)) + + dir.create(file.path(paste0("result/", "N", N)), showWarnings = FALSE) + save_path <- paste0("result/", "N", N, "/", model_type, "_N", N, "_", "result","_user", user[[u]], "_rate_", rate, ".csv") + + dat_act <- do.call(rbind, dat) %>% + mutate(index = as.numeric(as.factor(id))) %>% + rename(true_strategy = task_name) %>% + mutate( + true_strategy = factor( + true_strategy, levels = 0:6, + labels = strategies + ), + q_type = case_when( + gt %in% c(3,4,5) ~ 0, + gt %in% c(1,2,3,4,5,6,7) ~ 1, + gt %in% c(1,2,3,4) ~ 2, + gt %in% c(1,4,5,6,7) ~ 3, + gt %in% c(1,2,3,6,7) ~ 4, + gt %in% c(2,3,4,5,6,7) ~ 5, + gt %in% c(1,2,3,4,5,6,7) ~ 6, + ) + ) + + dat_obs <- dat_act %>% filter(assumed_strategy == strategies[[i]]) # put_fridge, was num + N <- nrow(dat_obs) + print(c("N: ", N)) + print(c("dim dat_act: ", dim(dat_act))) + + q <- dat_obs$gt + true_strategy <- dat_obs$true_strategy + + K <- length(unique(dat_act$assumed_strategy)) + I <- 7 + + P_q_S <- array(dim = c(N, I, K)) + for (n in 1:N) { + print(n) + P_q_S[n, , ] <- dat_act %>% + filter(index == n) %>% + select(matches("^act[[:digit:]]+$")) %>% + as.matrix() %>% + t() + for (k in 1:K) { + # normalize probabilities + P_q_S[n, , k] <- P_q_S[n, , k] / sum(P_q_S[n, , k]) + } + } + print(c("dim(P_q_S)", dim(P_q_S))) + + mod <- cmdstan_model(paste0(getwd(),"/strategy_inference_model.stan")) + + if (rate=="_0"){ + sub <- integer(0) + } else { + sub <- which(true_strategy == 0) # "0" + } + if (length(sub) == 1){ + temp <- P_q_S[sub, , ] + dim(temp) <- c(1, dim(temp)) + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = temp) + } else{ + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = P_q_S[sub, , ]) + } + fit_0 <- mod$sample(data = sdata, refresh=0, output_dir=paste0(getwd(),"/temp")) + print(fit_0$summary(NULL, c("mean","sd"))) + + if (rate=="_0"){ + sub <- integer(0) + } else { + sub <- which(true_strategy == 1) + } + if (length(sub) == 1){ + temp <- P_q_S[sub, , ] + dim(temp) <- c(1, dim(temp)) + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = temp) + } else{ + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = P_q_S[sub, , ]) + } + fit_1 <- mod$sample(data = sdata, refresh=0, output_dir=paste0(getwd(),"/temp")) + print(fit_1$summary(NULL, c("mean","sd"))) + + if (rate=="_0"){ + sub <- integer(0) + } else { + sub <- which(true_strategy == 2) + } + if (length(sub) == 1){ + temp <- P_q_S[sub, , ] + dim(temp) <- c(1, dim(temp)) + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = temp) + } else{ + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = P_q_S[sub, , ]) + } + fit_2 <- mod$sample(data = sdata, refresh=0, output_dir=paste0(getwd(),"/temp")) + print(fit_2$summary(NULL, c("mean","sd"))) + + if (rate=="_0"){ + sub <- integer(0) + } else { + sub <- which(true_strategy == 3) + } + if (length(sub) == 1){ + temp <- P_q_S[sub, , ] + dim(temp) <- c(1, dim(temp)) + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = temp) + } else{ + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = P_q_S[sub, , ]) + } + fit_3 <- mod$sample(data = sdata, refresh=0, output_dir=paste0(getwd(),"/temp")) + print(fit_3$summary(NULL, c("mean","sd"))) + + if (rate=="_0"){ + sub <- integer(0) + } else { + sub <- which(true_strategy == 4) + } + if (length(sub) == 1){ + temp <- P_q_S[sub, , ] + dim(temp) <- c(1, dim(temp)) + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = temp) + } else{ + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = P_q_S[sub, , ]) + } + fit_4 <- mod$sample(data = sdata, refresh=0, output_dir=paste0(getwd(),"/temp")) + print(fit_4$summary(NULL, c("mean","sd"))) + + if (rate=="_0"){ + sub <- integer(0) + } else { + sub <- which(true_strategy == 5) + } + if (length(sub) == 1){ + temp <- P_q_S[sub, , ] + dim(temp) <- c(1, dim(temp)) + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = temp) + } else{ + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = P_q_S[sub, , ]) + } + fit_5 <- mod$sample(data = sdata, refresh=0, output_dir=paste0(getwd(),"/temp")) + print(fit_5$summary(NULL, c("mean","sd"))) + + if (rate=="_0"){ + sub <- integer(0) + } else { + sub <- which(true_strategy == 6) + } + if (length(sub) == 1){ + temp <- P_q_S[sub, , ] + dim(temp) <- c(1, dim(temp)) + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = temp) + } else{ + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = P_q_S[sub, , ]) + } + fit_6 <- mod$sample(data = sdata, refresh=0, output_dir=paste0(getwd(),"/temp")) + print(fit_6$summary(NULL, c("mean","sd"))) + + # save csv + df <-rbind(fit_0$summary(), fit_1$summary(), fit_2$summary(), fit_3$summary(), fit_4$summary(), fit_5$summary(), fit_6$summary()) + write.csv(df,file=save_path,quote=FALSE) + + } +} + + + + + + + + + + + diff --git a/keyboard_and_mouse/temp.py b/keyboard_and_mouse/temp.py new file mode 100644 index 0000000..11bc399 --- /dev/null +++ b/keyboard_and_mouse/temp.py @@ -0,0 +1,10 @@ +import torch +import matplotlib as plt +import pickle +print(pickle.format_version) +import pandas + +print(torch.__version__) +print('matplotlib: {}'.format(plt.__version__)) +print(pandas.__version__) + diff --git a/keyboard_and_mouse/test.py b/keyboard_and_mouse/test.py new file mode 100644 index 0000000..e2afa84 --- /dev/null +++ b/keyboard_and_mouse/test.py @@ -0,0 +1,158 @@ +import pickle +import numpy as np +from torch.utils.data import Dataset, DataLoader +import torch +import torch.nn as nn +import torch.optim as optim +import torch.nn.functional as F +import shutil +import matplotlib.pyplot as plt +import argparse +from networks import ActionDemo2Predicate +from pathlib import Path +from termcolor import colored +import pandas as pd + + +print('torch version: ',torch.__version__) +DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu") +print(DEVICE) +torch.manual_seed(256) + +class test_dataset(Dataset): + def __init__(self, x, label, action_id): + self.x = x + self.idx = action_id + self.labels = label + + def __getitem__(self, index): + x = self.x[index] + label = self.labels[index] + action_idx = self.idx[index] + return x, label, action_idx + + def __len__(self): + return len(self.labels) + +def test_model(model, test_dataloader, DEVICE): + model.to(DEVICE) + model.eval() + test_acc = [] + logits = [] + labels = [] + action_ids = [] + for iter, (x, label, action_id) in enumerate(test_dataloader): + with torch.no_grad(): + x = torch.tensor(x).to(DEVICE) + label = torch.tensor(label).to(DEVICE) + logps = model(x) + logps = F.softmax(logps, 1) + logits.append(logps.cpu().numpy()) + labels.append(label.cpu().numpy()) + action_ids.append(action_id) + + argmax_Y = torch.max(logps, 1)[1].view(-1, 1) + test_acc.append((label.float().view(-1, 1) == argmax_Y.float()).sum().item() / len(label.float().view(-1, 1)) * 100) + + test_acc = np.mean(np.array(test_acc)) + print('test acc {:.4f}'.format(test_acc)) + logits = np.concatenate(logits, axis=0) + labels = np.concatenate(labels, axis=0) + action_ids = np.concatenate(action_ids, axis=0) + return logits, labels, action_ids + +def main(): + # parsing parameters + parser = argparse.ArgumentParser(description='') + parser.add_argument('--resume', type=bool, default=False, help='resume training') + parser.add_argument('--batch_size', type=int, default=32, help='batch size') + parser.add_argument('--lr', type=float, default=1e-1, help='learning rate') + parser.add_argument('--model_type', type=str, default='lstmlast', help='model type') + parser.add_argument('--hidden_size', type=int, default=256, help='hidden_size') + parser.add_argument('--epochs', type=int, default=100, help='training epoch') + parser.add_argument('--dataset_path', type=str, default='dataset/strategy_dataset/', help='dataset path') + parser.add_argument('--weight_decay', type=float, default=0.9, help='wight decay for Adam optimizer') + parser.add_argument('--demo_hidden', type=int, default=512, help='demo_hidden') + parser.add_argument('--dropout', type=float, default=0.5, help='dropout rate') + parser.add_argument('--checkpoint', type=str, default='checkpoints/', help='checkpoints path') + + args = parser.parse_args() + path = args.checkpoint+args.model_type+'_bs_'+str(args.batch_size)+'_lr_'+str(args.lr)+'_hidden_size_'+str(args.hidden_size) + + # read models + models = [] + for i in range(7): # 7 tasks + net = ActionDemo2Predicate(args) + model_path = path + '/task' + str(i) + '_checkpoint.ckpt' # _checkpoint + net.load(model_path) + models.append(net) + + for u in range(5): + task_pred = [] + task_target = [] + task_act = [] + task_task_name = [] + for i in range(7): # 7 tasks + test_loader = [] + # # read dataset test data + with open(args.dataset_path + 'test_data_' + str(i) + '.pkl', 'rb') as f: + data_x = pickle.load(f) + with open(args.dataset_path + 'test_label_' + str(i) + '.pkl', 'rb') as f: + data_y = pickle.load(f) + with open(args.dataset_path + 'test_action_id_' + str(i) + '.pkl', 'rb') as f: + act_idx = pickle.load(f) + + x = data_x[u] + y = data_y[u] + act = act_idx[u] + test_set = test_dataset(np.array(x), np.array(y)-1, np.array(act)) + test_loader = DataLoader(test_set, batch_size=args.batch_size, shuffle=False, num_workers=4, drop_last=True) + + preds = [] + targets = [] + actions = [] + task_names = [] + for j in range(7): # logits from all models + pred, target, action = test_model(models[j], test_loader, DEVICE) + preds.append(pred) + targets.append(target) + actions.append(action) + task_names.append(np.full(target.shape, i)) #assumed intention + + task_pred.append(preds) + task_target.append(targets) + task_act.append(actions) + task_task_name.append(task_names) + + for i in range(7): + preds = [] + targets = [] + actions = [] + task_names = [] + for j in range(7): + preds.append(task_pred[j][i]) + targets.append(task_target[j][i]+1) # gt value add one + actions.append(task_act[j][i]) + task_names.append(task_task_name[j][i]) + + preds = np.concatenate(preds, axis=0) + targets = np.concatenate(targets, axis=0) + actions = np.concatenate(actions, axis=0) + task_names = np.concatenate(task_names, axis=0) + write_data = np.concatenate((np.reshape(actions, (-1, 1)), preds, np.reshape(task_names, (-1, 1)), np.reshape(targets, (-1, 1))), axis=1) + + output_path = 'prediction/' + 'task' +str(i) + '/' + args.model_type+'_bs_'+str(args.batch_size)+'_lr_'+str(args.lr)+'_hidden_size_'+str(args.hidden_size) + Path(output_path).mkdir(parents=True, exist_ok=True) + output_path = output_path + '/user' + str(u) + '_pred.csv' + print(write_data.shape) + + head = [] + for j in range(7): + head.append('act'+str(j+1)) + head.append('task_name') + head.append('gt') + head.insert(0,'action_id') + pd.DataFrame(write_data).to_csv(output_path, header=head) + +if __name__ == '__main__': + main() diff --git a/keyboard_and_mouse/test.sh b/keyboard_and_mouse/test.sh new file mode 100644 index 0000000..775c3e3 --- /dev/null +++ b/keyboard_and_mouse/test.sh @@ -0,0 +1,12 @@ +python3 test.py \ +--resume False \ +--batch_size 8 \ +--lr 1e-4 \ +--model_type lstmlast \ +--epochs 100 \ +--demo_hidden 128 \ +--hidden_size 128 \ +--dropout 0.5 \ +--dataset_path dataset/strategy_dataset/ \ +--checkpoint checkpoints/ \ +--weight_decay 1e-4 diff --git a/keyboard_and_mouse/train.py b/keyboard_and_mouse/train.py new file mode 100644 index 0000000..4cebbb8 --- /dev/null +++ b/keyboard_and_mouse/train.py @@ -0,0 +1,145 @@ +import pickle +import numpy as np +from torch.utils.data import Dataset, DataLoader +import torch +import torch.nn as nn +import torch.optim as optim +import shutil +import matplotlib.pyplot as plt +import argparse +from networks import ActionDemo2Predicate + +print('torch version: ',torch.__version__) +DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu") +print(DEVICE) +torch.manual_seed(256) + +class train_dataset(Dataset): + def __init__(self, x, label): + self.x = x + self.labels = label + + def __getitem__(self, index): + x = self.x[index] + label = self.labels[index] + return x, label #, img_idx + + def __len__(self): + return len(self.labels) + +class test_dataset(Dataset): + def __init__(self, x, label): + self.x = x + self.labels = label + + def __getitem__(self, index): + x = self.x[index] + label = self.labels[index] + return x, label #, img_idx + + def __len__(self): + return len(self.labels) + +def train_model(model, train_dataloader, criterion, optimizer, num_epochs, DEVICE, path, resume): + running_loss = 0 + train_losses = 10 + is_best_acc = False + is_best_train_loss = False + + best_train_acc = 0 + best_train_loss = 10 + + start_epoch = 0 + accuracy = 0 + + model.to(DEVICE) + model.train() + for epoch in range(start_epoch, num_epochs): + epoch_losses = [] + train_acc = [] + epoch_loss = 0 + for iter, (x, labels) in enumerate(train_dataloader): + x = torch.tensor(x).to(DEVICE) + labels = torch.tensor(labels).to(DEVICE) + optimizer.zero_grad() + logps = model(x) + loss = criterion(logps, labels) + loss.backward() + optimizer.step() + + epoch_loss += loss.detach().item() + argmax_Y = torch.max(logps, 1)[1].view(-1, 1) + train_acc.append((labels.float().view(-1, 1) == argmax_Y.float()).sum().item() / len(labels.float().view(-1, 1)) * 100) + epoch_loss /= (iter + 1) + epoch_losses.append(epoch_loss) + train_acc = np.mean(np.array(train_acc)) + print('Epoch {}, train loss {:.4f}, train acc {:.4f}'.format(epoch, epoch_loss, train_acc)) + + is_best_acc = train_acc > best_train_acc + best_train_acc = max(train_acc, best_train_acc) + + is_best_train_loss = best_train_loss < epoch_loss + best_train_loss = min(epoch_loss, best_train_loss) + + if is_best_acc: + model.save(path + '_model_best.ckpt') + model.save(path + '_checkpoint.ckpt') + #scheduler.step() + +def save_checkpoint(state, is_best, path, filename='_checkpoint.pth.tar'): + torch.save(state, path + filename) + if is_best: + shutil.copyfile(path + filename, path +'_model_best.pth.tar') + +def main(): + # parsing parameters + parser = argparse.ArgumentParser(description='') + parser.add_argument('--resume', type=bool, default=False, help='resume training') + parser.add_argument('--batch_size', type=int, default=32, help='batch size') + parser.add_argument('--lr', type=float, default=1e-1, help='learning rate') + parser.add_argument('--model_type', type=str, default='lstmlast', help='model type') + parser.add_argument('--hidden_size', type=int, default=256, help='hidden_size') + parser.add_argument('--epochs', type=int, default=100, help='training epoch') + parser.add_argument('--dataset_path', type=str, default='dataset/strategy_dataset/', help='dataset path') + parser.add_argument('--weight_decay', type=float, default=0.9, help='wight decay for Adam optimizer') + parser.add_argument('--demo_hidden', type=int, default=512, help='demo_hidden') + parser.add_argument('--dropout', type=float, default=0.5, help='dropout rate') + parser.add_argument('--checkpoint', type=str, default='checkpoints/', help='checkpoints path') + + args = parser.parse_args() + # create checkpoints path + from pathlib import Path + path = args.checkpoint+args.model_type+'_bs_'+str(args.batch_size)+'_lr_'+str(args.lr)+'_hidden_size_'+str(args.hidden_size) + Path(path).mkdir(parents=True, exist_ok=True) + print('total epochs for training: ', args.epochs) + + # read dataset + train_loader = [] + test_loader = [] + loss_funcs = [] + optimizers = [] + models = [] + parameters = [] + for i in range(7): # 7 tasks + # train data + with open(args.dataset_path + 'train_data_' + str(i) + '.pkl', 'rb') as f: + data_x = pickle.load(f) + with open(args.dataset_path + 'train_label_' + str(i) + '.pkl', 'rb') as f: + data_y = pickle.load(f) + train_set = train_dataset(np.array(data_x), np.array(data_y)-1) + train_loader.append(DataLoader(train_set, batch_size=args.batch_size, shuffle=True, num_workers=4)) + print('task', str(i), 'train data size: ', len(train_set)) + + net = ActionDemo2Predicate(args) + models.append(net) + parameter = net.parameters() + loss_funcs.append(nn.CrossEntropyLoss()) + optimizers.append(optim.Adam(parameter, lr=args.lr, weight_decay=args.weight_decay)) + + for i in range(7): + path_save = path + '/task' + str(i) + print('checkpoint save path: ', path_save) + train_model(models[i], train_loader[i], loss_funcs[i], optimizers[i], args.epochs, DEVICE, path_save, args.resume) + +if __name__ == '__main__': + main() diff --git a/keyboard_and_mouse/train.sh b/keyboard_and_mouse/train.sh new file mode 100644 index 0000000..28a4104 --- /dev/null +++ b/keyboard_and_mouse/train.sh @@ -0,0 +1,12 @@ +python3 train.py \ +--resume False \ +--batch_size 8 \ +--lr 1e-4 \ +--model_type lstmlast \ +--epochs 100 \ +--demo_hidden 128 \ +--hidden_size 128 \ +--dropout 0.5 \ +--dataset_path dataset/strategy_dataset/ \ +--checkpoint checkpoints/ \ +--weight_decay 1e-4 diff --git a/watch_and_help/README.md b/watch_and_help/README.md new file mode 100644 index 0000000..06dac9e --- /dev/null +++ b/watch_and_help/README.md @@ -0,0 +1,46 @@ +# Watch And Help Dataset + +Codes to reproduce results on WAH dataset[^1] + +[^1]: Modified based on WAH train and test codes (https://github.com/xavierpuigf/watch_and_help)[https://github.com/xavierpuigf/watch_and_help]. + +## Data + +Extact `dataset/watch_data.zip` + + +## Neural Network + +Run `sh scripts/train_watch_strategy_full.sh` to train the model + +To test model, either use trained model or extract checkpoints `checkpoints/train_strategy_full/lstmlast.zip` + +Run `sh scripts/test_watch_strategy_full.sh` to test the model + + + +## Prediction Split + +Create artificial users and sample predictions from 10% to 90% + +``` +cd stan +sh split_user.sh +sh sampler_user.sh +``` + + +## Bayesian Inference + + +Run inference to get results of user intention prediction and action length (0% to 100%) for all users + +``` +Rscript strategy_inference_test.R +``` + +Plot intention prediction results and 10% to 100% of actions results + +``` +sh plot_user_length.sh +sh plot_user_length_10_steps.sh diff --git a/watch_and_help/checkpoints/train_strategy_full/lstmlast.zip b/watch_and_help/checkpoints/train_strategy_full/lstmlast.zip new file mode 100644 index 0000000..e6dcd3c Binary files /dev/null and b/watch_and_help/checkpoints/train_strategy_full/lstmlast.zip differ diff --git a/watch_and_help/dataset/watch_data.zip b/watch_and_help/dataset/watch_data.zip new file mode 100644 index 0000000..ee08dda Binary files /dev/null and b/watch_and_help/dataset/watch_data.zip differ diff --git a/watch_and_help/scripts/test_watch_strategy_full.sh b/watch_and_help/scripts/test_watch_strategy_full.sh new file mode 100644 index 0000000..34be337 --- /dev/null +++ b/watch_and_help/scripts/test_watch_strategy_full.sh @@ -0,0 +1,13 @@ +python3 watch_strategy_full/predicate-train-strategy.py \ +--testset test_task \ +--gpu_id 0 \ +--batch_size 32 \ +--demo_hidden 512 \ +--model_type lstmlast \ +--dropout 0 \ +--inputtype actioninput \ +--inference 2 \ +--single 1 \ +--resume '' \ +--loss_type ce \ +--checkpoint checkpoints/train_strategy_full/lstmlast_cross_entropy_bs_32_iter_2000_train_task_prob diff --git a/watch_and_help/scripts/train_watch_strategy_full.sh b/watch_and_help/scripts/train_watch_strategy_full.sh new file mode 100644 index 0000000..500da22 --- /dev/null +++ b/watch_and_help/scripts/train_watch_strategy_full.sh @@ -0,0 +1,13 @@ +python3 watch_strategy_full/predicate-train-strategy.py \ +--gpu_id 0 \ +--model_lr_rate 3e-4 \ +--batch_size 32 \ +--demo_hidden 512 \ +--model_type lstmlast \ +--inputtype actioninput \ +--dropout 0 \ +--single 1 \ +--resume '' \ +--checkpoint checkpoints/train_strategy_full/lstmlast \ +--train_iters 2000 \ +--loss_type ce\ diff --git a/watch_and_help/stan/plot_user_length.py b/watch_and_help/stan/plot_user_length.py new file mode 100644 index 0000000..d0d8484 --- /dev/null +++ b/watch_and_help/stan/plot_user_length.py @@ -0,0 +1,132 @@ +import numpy as np +from numpy import genfromtxt +import matplotlib.pyplot as plt +import argparse +import pathlib + +def main(args): + if args.task_type == 'new_test_task': + user = 9 + N = 1 + if args.task_type == 'test_task': + user = 92 + N = 1 + rate = 100 + + widths = [-0.1, 0, 0.1] + user_table = [6, 13, 15, 19, 20, 23, 27, 30, 33, 44, 46, 49, 50, 51, 52, 53, 54, 56, 65, 71, 84] + + # read data + model_data_list = [] + user_list = [] + if not args.plot_user_list: + for i in range(user): + path = "result/"+args.task_type+"/user"+str(user)+"/"+args.loss_type+"/N"+ str(N) + "/" + args.model_type + "_N" + str(N) + "_result_" + str(rate) + "_user" + str(i) +".csv" + data = genfromtxt(path, delimiter=',', skip_header =1) + data = data[[1,2,3,5,6,7,9,10,11],:][:,[2,4,6,7]] + model_data_list.append(data) + if args.task_type == 'test_task': + user_list.append(np.transpose(data[:,[0]])) + else: + for i in range(user): + for t in user_table: + if t == i+1: + path = "result/"+args.task_type+"/user"+str(user)+"/"+args.loss_type+"/N"+ str(N) + "/" + args.model_type + "_N" + str(N) + "_result_" + str(rate) + "_user" + str(i) +".csv" + data = genfromtxt(path, delimiter=',', skip_header =1) + data = data[[1,2,3,5,6,7,9,10,11],:][:,[2,4,6,7]] + model_data_list.append(data) + user_list.append(np.transpose(data[:,[0]])) + + color = ['royalblue', 'lightgreen', 'tomato'] + legend = ['put fridge', 'put\n dishwasher', 'read book'] + fig, axs = plt.subplots(3, sharex=True, sharey=True) + fig.set_figheight(10) # all sample rate: 10; 3 row: 8 + fig.set_figwidth(20) + + for ax in range(3): + y_total = [] + y_low_total = [] + y_high_total = [] + for j in range(3): + y= [] + y_low = [] + y_high = [] + for i in range(len(model_data_list)): + y.append(model_data_list[i][j+ax*3][0]) + y_low.append(model_data_list[i][j+ax*3][2]) + y_high.append(model_data_list[i][j+ax*3][3]) + y_total.append(y) + y_low_total.append(y_low) + y_high_total.append(y_high) + print() + print("user mean of mean prob: ", np.mean(y)) + print("user mean of sd prob: ", np.std(y)) + + for i in range(3): + if args.plot_type == 'line': + axs[ax].plot(range(user), y_total[i], color=color[i], label=legend[i]) + axs[ax].fill_between(range(user), y_low_total[i], y_high_total[i], color=color[i],alpha=0.3 ) + if args.plot_type == 'bar': + if args.task_type == 'new_test_task': + widths = [-0.25, 0, 0.25] + yerror = [np.array(y_total[i])-np.array(y_low_total[i]), np.array(y_high_total[i])-np.array(y_total[i])] + axs[0].text(-0.19, 0.9, 'True Intention:', horizontalalignment='center', verticalalignment='center', transform=axs[0].transAxes, fontsize= 36) + axs[ax].bar(np.arange(user)+widths[i],y_total[i], width=0.2, yerr=yerror, color=color[i], label=legend[i]) + axs[ax].tick_params(axis='x', which='both', pad=15, length=0) + plt.xticks(range(user), range(1,user+1)) + axs[ax].set_ylabel('prob', fontsize= 36) # was 22 + axs[ax].text(-0.19, 0.5, legend[ax], horizontalalignment='center', verticalalignment='center', transform=axs[ax].transAxes, fontsize= 36, color=color[ax]) + plt.xlabel('user', fontsize= 40) # was 22 + for k, x in enumerate(np.arange(user)+widths[i]): + y = y_total[i][k] + yerror[1][k] + axs[ax].annotate(f'{y_total[i][k]:.2f}', (x, y), textcoords='offset points', xytext=(-15, 3), fontsize=14) + + + if args.task_type == 'test_task': + if not args.plot_user_list: + axs[0].text(-0.19, 0.9, 'True Intention:', horizontalalignment='center', verticalalignment='center', transform=axs[0].transAxes, fontsize= 36) + axs[ax].errorbar(np.arange(user)+widths[i],y_total[i], yerr=[np.array(y_total[i])-np.array(y_low_total[i]), np.array(y_high_total[i])-np.array(y_total[i])],markerfacecolor=color[i], ecolor=color[i], markeredgecolor=color[i], label=legend[i],fmt='.k') + axs[ax].tick_params(axis='x', which='both', pad=15, length=0) + plt.xticks(range(user)[::5], range(1,user+1)[::5]) + axs[ax].set_ylabel('prob', fontsize= 36) + axs[ax].text(-0.19, 0.5, legend[ax], horizontalalignment='center', verticalalignment='center', transform=axs[ax].transAxes, fontsize= 36, color=color[ax]) + plt.xlabel('user', fontsize= 40) + else: + axs[0].text(-0.19, 0.9, 'True Intention:', horizontalalignment='center', verticalalignment='center', transform=axs[0].transAxes, fontsize= 36) + axs[ax].errorbar(np.arange(len(model_data_list))+widths[i],y_total[i], yerr=[np.array(y_total[i])-np.array(y_low_total[i]), np.array(y_high_total[i])-np.array(y_total[i])],markerfacecolor=color[i], ecolor=color[i], markeredgecolor=color[i], label=legend[i],fmt='.k') + axs[ax].tick_params(axis='x', which='both', pad=15, length=0) + plt.xticks(range(len(model_data_list)), user_table) + axs[ax].set_ylabel('prob', fontsize= 36) + #axs[ax].set_yticks(range(0.0,1.0, 0.25)) + axs[ax].text(-0.19, 0.5, legend[ax], horizontalalignment='center', verticalalignment='center', transform=axs[ax].transAxes, fontsize= 36, color=color[ax]) + plt.xlabel('user', fontsize= 40) + + axs[ax].tick_params(axis='both', which='major', labelsize=30) + + handles, labels = axs[0].get_legend_handles_labels() + + plt.ylim([0, 1.08]) + plt.tight_layout() + pathlib.Path("result/"+args.task_type+"/user"+str(user)+"/"+args.loss_type+"/figure/").mkdir(parents=True, exist_ok=True) + + if args.task_type == 'test_task': + if not args.plot_user_list: + plt.savefig("result/"+args.task_type+"/user"+str(user)+"/"+args.loss_type+"/figure/"+"N"+ str(N)+"_"+args.model_type+"_rate_"+str(rate)+"_"+args.plot_type+"_test_set_1.png", bbox_inches='tight') + else: + plt.savefig("result/"+args.task_type+"/user"+str(user)+"/"+args.loss_type+"/figure/"+"N"+ str(N)+"_"+args.model_type+"_rate_"+str(rate)+"_"+args.plot_type+"_test_set_1_user_analysis.png", bbox_inches='tight') + if args.task_type == 'new_test_task': + plt.savefig("result/"+args.ask_type+"/user"+str(user)+"/"+args.loss_type+"/figure/"+"N"+ str(N)+"_"+args.model_type+"_rate_"+str(rate)+"_"+args.plot_type+"_test_set_2.png", bbox_inches='tight') + plt.show() + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='') + parser.add_argument('--loss_type', type=str, default='ce') + parser.add_argument('--model_type', type=str, default="lstmlast" ) + parser.add_argument('--plot_type', type=str, default='bar') # bar or line + parser.add_argument('--task_type', type=str, default='test_task') + parser.add_argument('--plot_user_list', action='store_true') # plot user_table or not + + args = parser.parse_args() + + main(args) + diff --git a/watch_and_help/stan/plot_user_length.sh b/watch_and_help/stan/plot_user_length.sh new file mode 100644 index 0000000..14a3812 --- /dev/null +++ b/watch_and_help/stan/plot_user_length.sh @@ -0,0 +1,5 @@ +python3 plot_user_length.py \ +--loss_type ce \ +--model_type lstmlast \ +--plot_type bar \ +--task_type test_task diff --git a/watch_and_help/stan/plot_user_length_10_steps.py b/watch_and_help/stan/plot_user_length_10_steps.py new file mode 100644 index 0000000..2070b77 --- /dev/null +++ b/watch_and_help/stan/plot_user_length_10_steps.py @@ -0,0 +1,88 @@ +import numpy as np +from numpy import genfromtxt +import matplotlib.pyplot as plt +import argparse +import pathlib + +def main(): + parser = argparse.ArgumentParser(description='') + parser.add_argument('--loss_type', type=str, default='ce') + parser.add_argument('--model_type', type=str, default="lstmlast" ) + parser.add_argument('--task_type', type=str, default='test_task') + + args = parser.parse_args() + + if args.task_type == 'new_test_task': + user = 9 + N = 1 + if args.task_type == 'test_task': + user = 92 + N = 1 + + #rate = range(0,101,10) + rate_user_data_list = [] + for r in range(0,101,10): # rate = range(0,101,10) + # read data + print(r) + model_data_list = [] + for i in range(user): + path = "result/"+args.task_type+"/user"+str(user)+"/"+args.loss_type+"/N"+ str(N) + "/" + args.model_type + "_N" + str(N) + "_result_" + str(r) + "_user" + str(i) +".csv" + data = genfromtxt(path, delimiter=',', skip_header =1) + data = data[[1,2,3,5,6,7,9,10,11],:][:,[2,4,6,7]] + model_data_list.append(data) + #print(type(data)) + model_data_list_total = np.stack(model_data_list) + mean_user_data = np.mean(model_data_list_total,axis=0) + rate_user_data_list.append(mean_user_data) + + color = ['royalblue', 'lightgreen', 'tomato'] + legend = ['put fridge', 'put\n dishwasher', 'read book'] + fig, axs = plt.subplots(3, sharex=True, sharey=True) + fig.set_figheight(10) # all sample rate: 10; 3 row: 8 + fig.set_figwidth(20) + axs[0].text(-0.145, 0.9, 'True Intention:', horizontalalignment='center', verticalalignment='center', transform=axs[0].transAxes, fontsize= 25) # all: -0.3,0.5 3rows: -0.5,0.5 + + for ax in range(3): + y_total = [] + y_low_total = [] + y_high_total = [] + for j in range(3): + y= [] + y_low = [] + y_high = [] + for i in range(len(rate_user_data_list)): + y.append(rate_user_data_list[i][j+ax*3][0]) + y_low.append(rate_user_data_list[i][j+ax*3][2]) + y_high.append(rate_user_data_list[i][j+ax*3][3]) + y_total.append(y) + y_low_total.append(y_low) + y_high_total.append(y_high) + print() + print("user mean of mean prob: ", np.mean(y)) + print("user mean of sd prob: ", np.std(y)) + + for i in range(3): + axs[ax].plot(range(0,101,10), y_total[i], color=color[i], label=legend[i]) + axs[ax].fill_between(range(0,101,10), y_low_total[i], y_high_total[i], color=color[i],alpha=0.3 ) + axs[ax].set_xticks(range(0,101,10)) + axs[ax].set_ylabel('probability', fontsize=22) + + axs[ax].text(-0.145, 0.5, legend[ax], horizontalalignment='center', verticalalignment='center', transform=axs[ax].transAxes, fontsize= 25, color=color[ax]) + axs[ax].tick_params(axis='both', which='major', labelsize=18) + + plt.xlabel('Percentage of observed actions in one action sequence', fontsize= 22) + handles, labels = axs[0].get_legend_handles_labels() + + plt.xlim([0, 101]) + plt.ylim([0, 1]) + pathlib.Path("result/"+args.task_type+"/user"+str(user)+"/"+args.loss_type+"/figure/").mkdir(parents=True, exist_ok=True) + if args.task_type == 'test_task': + plt.savefig("result/"+args.task_type+"/user"+str(user)+ "/"+args.loss_type+"/figure/N"+ str(N) + "_"+args.model_type+"_rate_full_test_set_1.png", bbox_inches='tight') + if args.task_type == 'new_test_task': + plt.savefig("result/"+args.task_type+"/user"+str(user)+ "/"+args.loss_type+"/figure/N"+ str(N) + "_"+args.model_type+"_rate_full_test_set_2.png", bbox_inches='tight') + + plt.show() + +if __name__ == '__main__': + main() + diff --git a/watch_and_help/stan/plot_user_length_10_steps.sh b/watch_and_help/stan/plot_user_length_10_steps.sh new file mode 100644 index 0000000..20167a4 --- /dev/null +++ b/watch_and_help/stan/plot_user_length_10_steps.sh @@ -0,0 +1,4 @@ +python3 plot_user_length_10_steps.py \ +--loss_type ce \ +--model_type lstmlast \ +--task_type test_task diff --git a/watch_and_help/stan/sampler_user.py b/watch_and_help/stan/sampler_user.py new file mode 100644 index 0000000..048e917 --- /dev/null +++ b/watch_and_help/stan/sampler_user.py @@ -0,0 +1,64 @@ +import numpy as np +from numpy import genfromtxt +import csv +import pandas +import argparse + +def sample_predciton(path, rate): + data = pandas.read_csv(path).values + task_list = [0, 1, 2] + start = 0 + stop = 0 + num_unique = np.unique(data[:,1]) + #print('unique number', num_unique) + + samples = [] + for j in task_list: + for i in num_unique: + inx = np.where((data[:,1] == i) & (data[:,-2] == j)) + samples.append(data[inx]) + + for i in range(len(samples)): + n = int(len(samples[i])*(100-rate)/100) + samples[i] = samples[i][:-n] + + return np.vstack(samples) + +def main(): + parser = argparse.ArgumentParser(description='') + parser.add_argument('--LOSS', type=str, default='ce') + parser.add_argument('--MODEL_TYPE', type=str, default="lstmlast_cross_entropy_bs_32_iter_2000_train_task_prob" ) + parser.add_argument('--EPOCHS', type=int, default=50) + parser.add_argument('--TASK', type=str, default='test_task') + args = parser.parse_args() + + + + + task = ['put_fridge', 'put_dishwasher', 'read_book'] + sets = [args.TASK] + rate = [10, 20, 30, 40, 50, 60, 70, 80, 90] + + for i in task: + for j in rate: + for k in sets: + if k == 'test_task': + user_num = 92 + if k == 'new_test_task': + user_num = 9 + + for l in range(user_num): + pred_path = "prediction/" + k + "/" + "user" + str(user_num) + "/ce/" + i + "/" + "loss_weight_" + args.MODEL_TYPE + "_prediction_" + i + "_user" + str(l) + ".csv" + save_path = "prediction/" + k + "/" + "user" + str(user_num) + "/ce/" + i + "/" + "loss_weight_" + args.MODEL_TYPE + "_prediction_" + i + "_user" + str(l) + "_rate_" + str(j) + ".csv" + data = sample_predciton(pred_path, j) + + head = [] + for r in range(79): + head.append('act'+str(r+1)) + head.append('task_name') + head.append('gt') + head.insert(0,'action_id') + pandas.DataFrame(data[:,1:]).to_csv(save_path, header=head) + +if __name__ == '__main__': + main() diff --git a/watch_and_help/stan/sampler_user.sh b/watch_and_help/stan/sampler_user.sh new file mode 100644 index 0000000..71064bc --- /dev/null +++ b/watch_and_help/stan/sampler_user.sh @@ -0,0 +1,5 @@ +python3 sampler_user.py \ +--TASK test_task \ +--LOSS ce \ +--MODEL_TYPE lstmlast \ +--EPOCHS 50 diff --git a/watch_and_help/stan/save_act_series.R b/watch_and_help/stan/save_act_series.R new file mode 100644 index 0000000..f1dfe5a --- /dev/null +++ b/watch_and_help/stan/save_act_series.R @@ -0,0 +1,76 @@ +library(tidyverse) +library(cmdstanr) +library(dplyr) + +strategies <- c("put_fridge", "put_dishwasher", "read_book") +model_type <- "lstmlast_cross_entropy_bs_32_iter_2000_train_task_prob" +rate <- "_0" +task_type <- "new_test_task" # new_test_task test_task +loss_type <- "ce" +set.seed(9746234) +if (task_type=="test_task"){ + user_num <- 92 + user <-c(0:(user_num-1)) + N <- 1 +} +if (task_type=="new_test_task"){ + user_num <- 9 + user <-c(0:(user_num-1)) + N <- 1 +} +total_user_act1 <- vector("list", length(user_num)) +total_user_act2 <- vector("list", length(user_num)) + +sel <- vector("list", length(strategies)) +act_series <- vector("list", user_num) +for (u in seq_along(user)){ + print('user') + print(u) + dat <- vector("list", length(strategies)) + for (i in seq_along(strategies)) { + if (rate=="_0"){ + dat[[i]] <- read.csv(paste0("prediction/", task_type, "/user", user_num, "/", loss_type, "/", strategies[[i]], "/loss_weight_", model_type, "_prediction_", strategies[[i]], "_user", user[[u]], "_rate_", "90", ".csv")) + } else if (rate=="_100"){ + dat[[i]] <- read.csv(paste0("prediction/", task_type, "/user", user_num, "/", loss_type, "/", strategies[[i]], "/loss_weight_", model_type, "_prediction_", strategies[[i]], "_user", user[[u]], ".csv")) + } else{ + dat[[i]] <- read.csv(paste0("prediction/", task_type, "/user", user_num, "/", loss_type, "/", strategies[[i]], "/loss_weight_", model_type, "_prediction_", strategies[[i]], "_user", user[[u]], "_rate", rate, ".csv")) + } + dat[[i]]$assumed_strategy <- strategies[[i]] + dat[[i]]$index <- dat[[i]]$action_id # sample based on intention + dat[[i]]$id <- dat[[i]][,1] # sample based on intention + } + + N <- 1 + # select all action series and infer every one + sel[[1]]<-dat[[1]] %>% + group_by(task_name) %>% + filter(task_name==1) + sel[[1]] <- data.frame(sel[[1]]) + unique_act_id_t1 <- unique(sel[[1]]$action_id) + write.csv(unique_act_id_t1, paste0("result/", task_type, "/user", user_num, "/", loss_type, "/act", "/", "action_series_", "user_",u, "_put_dishwasher", ".csv")) + total_user_act1[[u]] <- unique_act_id_t1 + + sel[[1]]<-dat[[1]] %>% + group_by(task_name) %>% + filter(task_name==2) + sel[[1]] <- data.frame(sel[[1]]) + unique_act_id_t1 <- unique(sel[[1]]$action_id) + write.csv(unique_act_id_t1, paste0("result/", task_type, "/user", user_num, "/", loss_type, "/act", "/", "action_series_", "user_",u, "_read_book", ".csv")) + total_user_act2[[u]] <- unique_act_id_t1 +} + +write.csv(total_user_act1, paste0("result/", task_type, "/user", user_num, "/", loss_type, "/act", "/", "action_series_", "_put_dishwasher_total", ".csv")) +write.csv(total_user_act2, paste0("result/", task_type, "/user", user_num, "/", loss_type, "/act", "/", "action_series_", "read_book_total", ".csv")) + + + + + + + + + + + + + diff --git a/watch_and_help/stan/split_user.py b/watch_and_help/stan/split_user.py new file mode 100644 index 0000000..ef302f4 --- /dev/null +++ b/watch_and_help/stan/split_user.py @@ -0,0 +1,87 @@ +import numpy as np +import pathlib +import argparse + +np.random.seed(seed=100) + +def sample_user(data, num_users, split_inx): + np.random.seed(seed=100) + num_unique3 = np.unique(data[:,1]) + num_unique2 = num_unique3[0:split_inx[1]] + num_unique = num_unique3[0:split_inx[0]] + + user_list1 = [np.random.choice(num_unique, int(len(num_unique)/num_users), replace=False) for i in range(num_users)] + user_list2 = [np.random.choice(num_unique2, int(len(num_unique2)/num_users), replace=False) for i in range(num_users)] + user_list3 = [np.random.choice(num_unique3, int(len(num_unique3)/num_users), replace=False) for i in range(num_users)] + + user_data = [] + + for i in range(num_users): # len(user_list) + user_idx1 = [int(item) for item in user_list1[i]] + user_idx2 = [int(item) for item in user_list2[i]] + user_idx3 = [int(item) for item in user_list3[i]] + + data_list = [] + for j in range(len(user_idx1)): + inx = np.where((data[:,1] == user_idx1[j]) & (data[:,-2]==0)) + data_list.append(data[inx]) + + for j in range(len(user_idx2)): + inx = np.where((data[:,1] == user_idx2[j]) & (data[:,-2]==1)) + data_list.append(data[inx]) + + for j in range(len(user_idx3)): + inx = np.where((data[:,1] == user_idx3[j]) & (data[:,-2]==2)) + data_list.append(data[inx]) + + user_data.append(np.vstack(data_list)) + + return user_data + + +def main(): + parser = argparse.ArgumentParser(description='') + parser.add_argument('--LOSS', type=str, default='ce') + parser.add_argument('--MODEL_TYPE', type=str, default="lstmlast_cross_entropy_bs_32_iter_2000_train_task_prob" ) + parser.add_argument('--EPOCHS', type=int, default=50) + parser.add_argument('--TASK', type=str, default='test_task') + args = parser.parse_args() + + pref = ['put_fridge', 'put_dishwasher', 'read_book'] + + if args.TASK == 'new_test_task': + NUM_USER = 9 # 9 for 1 user 1 action + SPLIT_INX = [NUM_USER, 45] + if args.TASK == 'test_task': + NUM_USER = 92 + SPLIT_INX = [NUM_USER, 229] + + head = [] + for j in range(79): + head.append('act'+str(j+1)) + head.append('task_name') + head.append('gt') + head.insert(0,'action_id') + head.insert(0,'') + + for i in pref: + path = "prediction/"+args.TASK+"/" + args.MODEL_TYPE + "/model_" + i + "_strategy_put_fridge" +".csv" + data = np.genfromtxt(path, skip_header=1, delimiter=',') + data_task_name = np.genfromtxt(path, skip_header=1, delimiter=',', usecols=-2, dtype=None) + data_task_name[data_task_name==b'put_fridge'] = 0 + data_task_name[data_task_name==b'put_dishwasher'] = 1 + data_task_name[data_task_name==b'read_book'] = 2 + data[:,-2] = data_task_name.astype(np.float) + print("data length: ", len(data)) + users_data = sample_user(data, NUM_USER, SPLIT_INX) + + length = 0 + pathlib.Path("prediction/"+args.TASK+"/user" + str(NUM_USER) + "/" + args.LOSS + "/" + i).mkdir(parents=True, exist_ok=True) + for j in range(len(users_data)): + save_path = "prediction/"+args.TASK+"/user" + str(NUM_USER) + "/" + args.LOSS + "/" + i +"/loss_weight_"+ args.MODEL_TYPE + "_prediction_"+ i + "_user"+str(j)+".csv" + length = length + len(users_data[j]) + np.savetxt(save_path, users_data[j], delimiter=',', header=','.join(head)) + print("user data length: ", length) + +if __name__ == '__main__': + main() diff --git a/watch_and_help/stan/split_user.sh b/watch_and_help/stan/split_user.sh new file mode 100644 index 0000000..54568e5 --- /dev/null +++ b/watch_and_help/stan/split_user.sh @@ -0,0 +1,5 @@ +python3 split_user.py \ +--TASK test_task \ +--LOSS ce \ +--MODEL_TYPE lstmlast \ +--EPOCHS 50 diff --git a/watch_and_help/stan/strategy_inference_model b/watch_and_help/stan/strategy_inference_model new file mode 100755 index 0000000..a4a3b0d Binary files /dev/null and b/watch_and_help/stan/strategy_inference_model differ diff --git a/watch_and_help/stan/strategy_inference_model.stan b/watch_and_help/stan/strategy_inference_model.stan new file mode 100644 index 0000000..1d337e0 --- /dev/null +++ b/watch_and_help/stan/strategy_inference_model.stan @@ -0,0 +1,26 @@ +data { + int I; // number of question options (22) + int N; // number of questions being asked by the user + int K; // number of strategies + // observed "true" questions of the user + int q[N]; + // array of predicted probabilities of questions given strategies + // coming from the forward neural network + matrix[I, K] P_q_S[N]; +} +parameters { + // probabiliy vector of the strategies being applied by the user + // to be inferred by the model here + simplex[K] P_S; +} +model { + for (n in 1:N) { + // marginal probability vector of the questions being asked + vector[I] theta = P_q_S[n] * P_S; + // categorical likelihood + target += categorical_lpmf(q[n] | theta); + } + // priors + target += dirichlet_lpdf(P_S | rep_vector(1.0, K)); +} + diff --git a/watch_and_help/stan/strategy_inference_test.R b/watch_and_help/stan/strategy_inference_test.R new file mode 100644 index 0000000..ebb33ca --- /dev/null +++ b/watch_and_help/stan/strategy_inference_test.R @@ -0,0 +1,190 @@ +library(tidyverse) +library(cmdstanr) +library(dplyr) + +# index order of the strategies assumed throughout +strategies <- c("put_fridge", "put_dishwasher", "read_book") +model_type <- "lstmlast" +rates <- c("_0", "_10", "_20", "_30", "_40", "_50", "_60", "_70", "_80", "_90", "_100") +task_type <- "test_task" # new_test_task test_task +loss_type <- "ce" +set.seed(9746234) +if (task_type=="test_task"){ + user_num <- 92 + user <-c(38:(user_num-1)) + N <- 1 +} +if (task_type=="new_test_task"){ + user_num <- 9 + user <-c(0:(user_num-1)) + N <- 1 +} + +# read data from csv +sel <- vector("list", length(strategies)) +act_series <- vector("list", user_num) +for (u in seq_along(user)){ + for (rate in rates) { + dat <- vector("list", length(strategies)) + for (i in seq_along(strategies)) { + if (rate=="_0"){ + dat[[i]] <- read.csv(paste0("prediction/", task_type, "/user", user_num, "/", loss_type, "/", strategies[[i]], "/loss_weight_", model_type, "_prediction_", strategies[[i]], "_user", user[[u]], "_rate_", "10", ".csv")) # _60 + } else if (rate=="_100"){ + dat[[i]] <- read.csv(paste0("prediction/", task_type, "/user", user_num, "/", loss_type, "/", strategies[[i]], "/loss_weight_", model_type, "_prediction_", strategies[[i]], "_user", user[[u]], ".csv")) # _60 + } else{ + dat[[i]] <- read.csv(paste0("prediction/", task_type, "/user", user_num, "/", loss_type, "/", strategies[[i]], "/loss_weight_", model_type, "_prediction_", strategies[[i]], "_user", user[[u]], "_rate", rate, ".csv")) # _60 + } + # strategy assumed for prediction + dat[[i]]$assumed_strategy <- strategies[[i]] + dat[[i]]$index <- dat[[i]]$action_id # sample based on intention + dat[[i]]$id <- dat[[i]][,1] # sample based on intention + } + + # reset N after inference + if (task_type=="test_task"){ + N <- 1 + } + if (task_type=="new_test_task"){ + N <- 1 + } + + # select one action series from one intention + if (rate == "_0"){ + sel[[1]]<-dat[[1]] %>% + group_by(task_name) %>% + sample_n(N) + + sel[[1]] <- data.frame(sel[[1]]) + act_series[[u]] <- sel[[1]]$action_id + #print(typeof(sel[[1]])) + #print(typeof(dat[[1]])) + #print(sel[[1]]$action_id[2]) + } + + print(c('unique action id', sel[[1]]$action_id)) + + # filter data from the selected action series, N series per intention + dat[[1]]<-subset(dat[[1]], dat[[1]]$action_id == sel[[1]]$action_id[1] | dat[[1]]$action_id == sel[[1]]$action_id[2] | dat[[1]]$action_id == sel[[1]]$action_id[3]) + dat[[2]]<-subset(dat[[2]], dat[[2]]$action_id == sel[[1]]$action_id[1] | dat[[2]]$action_id == sel[[1]]$action_id[2] | dat[[2]]$action_id == sel[[1]]$action_id[3]) + dat[[3]]<-subset(dat[[3]], dat[[3]]$action_id == sel[[1]]$action_id[1] | dat[[3]]$action_id == sel[[1]]$action_id[2] | dat[[3]]$action_id == sel[[1]]$action_id[3]) + row.names(dat) <- NULL + print(c('task name 1', dat[[1]]$task_name)) + print(c('task name 2', dat[[2]]$task_name)) + print(c('task name 3', dat[[3]]$task_name)) + print(c('action id 1', dat[[1]]$action_id)) + print(c('action id 2', dat[[2]]$action_id)) + print(c('action id 3', dat[[3]]$action_id)) + + # create save path + dir.create(file.path(paste0("result/", task_type, "/user", user_num, "/", loss_type, "/N", N)), showWarnings = FALSE, recursive = TRUE) + dir.create(file.path("temp"), showWarnings = FALSE) + save_path <- paste0("result/", task_type, "/user", user_num, "/", loss_type, "/N", N, "/", model_type, "_N", N, "_", "result", rate,"_user", user[[u]], ".csv") + + if(task_type=="test_task"){ + dat <- do.call(rbind, dat) %>% + mutate(index = as.numeric(as.factor(id))) %>% + rename(true_strategy = task_name) %>% + mutate( + true_strategy = factor( + #true_strategy, levels = 0:3, + true_strategy, levels = 0:2, + labels = strategies + ), + q_type = case_when( + gt %in% c(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 19, 20, 22, 23, 29, 30, 31, 32, 33, 34, 35, 37, 38, 39, 40, 42, 43, 44, 58, 59, 64, 65, 68, 69, 70, 71, 72, 73, 74) ~ "put_fridge", + gt %in% c(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 25, 29,30, 31, 32, 33, 34, 37, 38, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57) ~ "put_dishwasher", + gt %in% c(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45) ~ "read_book", + ) + ) + } + + if(task_type=="new_test_task"){ + dat <- do.call(rbind, dat) %>% + mutate(index = as.numeric(as.factor(id))) %>% + rename(true_strategy = task_name) %>% + mutate( + true_strategy = factor( + true_strategy, levels = 0:2, + labels = strategies + ), + q_type = case_when( + # new_test_set + gt %in% c(1, 5, 6, 7, 8, 9, 10, 12, 13, 14, 15, 16, 19, 20, 22, 23, 25, 29, 30, 31, 32, 33, 34, 35, 40, 42, 43, 44, 46, 47, 52, 53, 55, 56, 58, 59, 60, 64, 65, 68, 69, 70, 71, 72, 73, 74, 75, 77, 78) ~ "put_fridge", + gt %in% c(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18, 19, 20, 21, 22, 23, 24, 25, 29, 30, 31, 32, 33, 34, 35, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74) ~ "put_dishwasher", + gt %in% c(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 60, 75, 76, 77, 78) ~ "read_book", + ) + ) + } + #print(nrow(dat)) + #print(dat) + + dat_obs <- dat %>% filter(assumed_strategy == strategies[[i]]) + N <- nrow(dat_obs) + print(c("N: ", N)) + q <- dat_obs$gt + true_strategy <- dat_obs$true_strategy + + K <- length(unique(dat$assumed_strategy)) + I <- 79 + + P_q_S <- array(dim = c(N, I, K)) + for (n in 1:N) { + P_q_S[n, , ] <- dat %>% + filter(index == n) %>% + select(matches("^act[[:digit:]]+$")) %>% + as.matrix() %>% + t() + for (k in 1:K) { + # normalize probabilities + P_q_S[n, , k] <- P_q_S[n, , k] / sum(P_q_S[n, , k]) + } + } + + mod <- cmdstan_model(paste0(getwd(),"/strategy_inference_model.stan")) + + if (rate=="_0"){ + sub <- integer(0) + } else { + sub <- which(true_strategy == "put_fridge") + } + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = P_q_S[sub, , ]) + fit_put_fridge <- mod$sample(data = sdata, refresh=0, output_dir=paste0(getwd(),"/temp")) + print(fit_put_fridge$summary(NULL, c("mean","sd"))) + + + if (rate=="_0"){ + sub <- integer(0) + } else { + sub <- which(true_strategy == "put_dishwasher") + } + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = P_q_S[sub, , ]) + fit_put_dishwasher <- mod$sample(data = sdata, refresh=0, output_dir=paste0(getwd(),"/temp")) + print(fit_put_dishwasher$summary(NULL, c("mean","sd"))) + + # read_book strategy (should favor index 3) + if (rate=="_0"){ + sub <- integer(0) + } else { + sub <- which(true_strategy == "read_book") + } + sdata <- list(N = length(sub), K = K, I = I, q = q[sub], P_q_S = P_q_S[sub, , ]) + fit_read_book <- mod$sample(data = sdata, refresh=0, output_dir=paste0(getwd(),"/temp")) + print(fit_read_book$summary(NULL, c("mean","sd"))) + + # save csv + df <-rbind(fit_put_fridge$summary(), fit_put_dishwasher$summary(), fit_read_book$summary()) + write.csv(df,file=save_path,quote=FALSE) + + } +} + + + + + + + + + + + diff --git a/watch_and_help/watch_strategy_full/__pycache__/helper.cpython-38.pyc b/watch_and_help/watch_strategy_full/__pycache__/helper.cpython-38.pyc new file mode 100644 index 0000000..a221869 Binary files /dev/null and b/watch_and_help/watch_strategy_full/__pycache__/helper.cpython-38.pyc differ diff --git a/watch_and_help/watch_strategy_full/helper.py b/watch_and_help/watch_strategy_full/helper.py new file mode 100644 index 0000000..438094c --- /dev/null +++ b/watch_and_help/watch_strategy_full/helper.py @@ -0,0 +1,203 @@ +import os +import math +import numpy as np + +import torch +import torch.nn as nn +import torch.nn.functional as F +from torch.nn.parameter import Parameter +from torch.nn.modules.rnn import RNNCellBase + +def to_cpu(list_of_tensor): + if isinstance(list_of_tensor[0], list): + list_list_of_tensor = list_of_tensor + list_of_tensor = [to_cpu(list_of_tensor) + for list_of_tensor in list_list_of_tensor] + else: + list_of_tensor = [tensor.cpu() for tensor in list_of_tensor] + return list_of_tensor + + +def average_over_list(l): + return sum(l) / len(l) + +def _LayerNormGRUCell(input, hidden, w_ih, w_hh, ln, b_ih=None, b_hh=None): + gi = F.linear(input, w_ih, b_ih) + gh = F.linear(hidden, w_hh, b_hh) + i_r, i_i, i_n = gi.chunk(3, 1) + h_r, h_i, h_n = gh.chunk(3, 1) + + # use layernorm here + resetgate = torch.sigmoid(ln['resetgate'](i_r + h_r)) + inputgate = torch.sigmoid(ln['inputgate'](i_i + h_i)) + newgate = torch.tanh(ln['newgate'](i_n + resetgate * h_n)) + hy = newgate + inputgate * (hidden - newgate) + return hy + +class CombinedEmbedding(nn.Module): + def __init__(self, pretrained_embedding, embedding): + super(CombinedEmbedding, self).__init__() + self.pretrained_embedding = pretrained_embedding + self.embedding = embedding + self.pivot = pretrained_embedding.num_embeddings + + def forward(self, input): + outputs = [] + mask = input < self.pivot + outputs.append(self.pretrained_embedding(torch.clamp(input, 0, self.pivot-1)) * mask.unsqueeze(1).float()) + mask = input >= self.pivot + outputs.append(self.embedding(torch.clamp(input, self.pivot) - self.pivot) * mask.unsqueeze(1).float()) + return sum(outputs) + + +class writer_helper(object): + def __init__(self, writer): + self.writer = writer + self.all_steps = {} + + def get_step(self, tag): + if tag not in self.all_steps.keys(): + self.all_steps.update({tag: 0}) + + step = self.all_steps[tag] + self.all_steps[tag] += 1 + return step + + def scalar_summary(self, tag, value, step=None): + if step is None: + step = self.get_step(tag) + self.writer.add_scalar(tag, value, step) + + def text_summary(self, tag, value, step=None): + if step is None: + step = self.get_step(tag) + self.writer.add_text(tag, value, step) + + +class Constant(): + def __init__(self, v): + self.v = v + + def update(self): + pass + + +class LinearStep(): + def __init__(self, max, min, steps): + self.steps = float(steps) + self.max = max + self.min = min + self.cur_step = 0 + self.v = self.max + + def update(self): + v = max(self.max - (self.max - self.min) * + self.cur_step / self.steps, self.min) + self.cur_step += 1 + self.v = v + + +class fc_block(nn.Module): + def __init__(self, in_channels, out_channels, norm, activation_fn): + super(fc_block, self).__init__() + + block = nn.Sequential() + block.add_module('linear', nn.Linear(in_channels, out_channels)) + if norm: + block.add_module('batchnorm', nn.BatchNorm1d(out_channels)) + if activation_fn is not None: + block.add_module('activation', activation_fn()) + + self.block = block + + def forward(self, x): + return self.block(x) + + +class conv_block(nn.Module): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride, + norm, + activation_fn): + super(conv_block, self).__init__() + + block = nn.Sequential() + block.add_module( + 'conv', + nn.Conv2d( + in_channels, + out_channels, + kernel_size, + stride)) + if norm: + block.add_module('batchnorm', nn.BatchNorm2d(out_channels)) + if activation_fn is not None: + block.add_module('activation', activation_fn()) + + self.block = block + + def forward(self, x): + return self.block(x) + + +def get_conv_output_shape(shape, block): + B = 1 + input = torch.rand(B, *shape) + output = block(input) + n_size = output.data.view(B, -1).size(1) + return n_size + + +class Flatten(nn.Module): + def forward(self, input): + return input.view(input.size(0), -1) + + +def BHWC_to_BCHW(tensor): + tensor = torch.transpose(tensor, 1, 3) # BCWH + tensor = torch.transpose(tensor, 2, 3) # BCHW + return tensor + + +def LCS(X, Y): + + # find the length of the strings + m = len(X) + n = len(Y) + + # declaring the array for storing the dp values + L = [[None] * (n + 1) for i in range(m + 1)] + longest_L = [[[]] * (n + 1) for i in range(m + 1)] + longest = 0 + lcs_set = [] + + for i in range(m + 1): + for j in range(n + 1): + if i == 0 or j == 0: + L[i][j] = 0 + longest_L[i][j] = [] + elif X[i - 1] == Y[j - 1]: + L[i][j] = L[i - 1][j - 1] + 1 + longest_L[i][j] = longest_L[i - 1][j - 1] + [X[i - 1]] + if L[i][j] > longest: + lcs_set = [] + lcs_set.append(longest_L[i][j]) + longest = L[i][j] + elif L[i][j] == longest and longest != 0: + lcs_set.append(longest_L[i][j]) + else: + if L[i - 1][j] > L[i][j - 1]: + L[i][j] = L[i - 1][j] + longest_L[i][j] = longest_L[i - 1][j] + else: + L[i][j] = L[i][j - 1] + longest_L[i][j] = longest_L[i][j - 1] + + if len(lcs_set) > 0: + return lcs_set[0] + else: + return lcs_set diff --git a/watch_and_help/watch_strategy_full/network/__pycache__/encoder_decoder.cpython-38.pyc b/watch_and_help/watch_strategy_full/network/__pycache__/encoder_decoder.cpython-38.pyc new file mode 100644 index 0000000..277bc65 Binary files /dev/null and b/watch_and_help/watch_strategy_full/network/__pycache__/encoder_decoder.cpython-38.pyc differ diff --git a/watch_and_help/watch_strategy_full/network/__pycache__/module_graph.cpython-38.pyc b/watch_and_help/watch_strategy_full/network/__pycache__/module_graph.cpython-38.pyc new file mode 100644 index 0000000..582a192 Binary files /dev/null and b/watch_and_help/watch_strategy_full/network/__pycache__/module_graph.cpython-38.pyc differ diff --git a/watch_and_help/watch_strategy_full/network/encoder_decoder.py b/watch_and_help/watch_strategy_full/network/encoder_decoder.py new file mode 100644 index 0000000..8efb500 --- /dev/null +++ b/watch_and_help/watch_strategy_full/network/encoder_decoder.py @@ -0,0 +1,131 @@ +import copy +import numpy as np +from termcolor import colored + +import torch +import torch.nn as nn +import torch.nn.functional as F +#import ipdb +#import pdb + +def _align_tensor_index(reference_index, tensor_index): + + where_in_tensor = [] + for i in reference_index: + where = np.where(i == tensor_index)[0][0] + where_in_tensor.append(where) + return np.array(where_in_tensor) + + +def _sort_by_length(list_of_tensor, batch_length, return_idx=False): + idx = np.argsort(np.array(copy.copy(batch_length)))[::-1] + for i, tensor in enumerate(list_of_tensor): + if isinstance(tensor, dict): + list_of_tensor[i]['class_objects'] = [tensor['class_objects'][j] for j in idx] + list_of_tensor[i]['object_coords'] = [tensor['object_coords'][j] for j in idx] + list_of_tensor[i]['states_objects'] = [tensor['states_objects'][j] for j in idx] + list_of_tensor[i]['mask_object'] = [tensor['mask_object'][j] for j in idx] + else: + list_of_tensor[i] = [tensor[j] for j in idx] + if return_idx: + return list_of_tensor, idx + else: + return list_of_tensor + +def _sort_by_index(list_of_tensor, idx): + for i, tensor in enumerate(list_of_tensor): + list_of_tensor[i] = [tensor[j] for j in idx] + return list_of_tensor + +class ActionDemo2Predicate(nn.Module): + summary_keys = ['loss', 'top1'] + def __init__(self, args, dset, loss_weight, **kwargs): + from network.module_graph import PredicateClassifier + super(ActionDemo2Predicate, self).__init__() + + print('------------------------------------------------------------------------------------------') + print('ActionDemo2Predicate') + print('------------------------------------------------------------------------------------------') + + model_type = kwargs["model_type"] + print('model_type', model_type) + + if model_type.lower() == 'max': + from network.module_graph import ActionDemoEncoder + demo_encoder = ActionDemoEncoder(args, dset, 'max') + elif model_type.lower() == 'avg': + from network.module_graph import ActionDemoEncoder + demo_encoder = ActionDemoEncoder(args, dset, 'avg') + elif model_type.lower() == 'lstmavg': + from network.module_graph import ActionDemoEncoder + demo_encoder = ActionDemoEncoder(args, dset, 'lstmavg') + elif model_type.lower() == 'bilstmavg': + from network.module_graph import ActionDemoEncoder + demo_encoder = ActionDemoEncoder(args, dset, 'bilstmavg') + elif model_type.lower() == 'lstmlast': + from network.module_graph import ActionDemoEncoder + demo_encoder = ActionDemoEncoder(args, dset, 'lstmlast') + elif model_type.lower() == 'bilstmlast': + from network.module_graph import ActionDemoEncoder + demo_encoder = ActionDemoEncoder(args, dset, 'bilstmlast') + else: + raise ValueError + demo_encoder = torch.nn.DataParallel(demo_encoder) + + predicate_decoder = PredicateClassifier(args, dset, loss_weight) + + # for quick save and load + all_modules = nn.Sequential() + all_modules.add_module('demo_encoder', demo_encoder) + all_modules.add_module('predicate_decoder', predicate_decoder) + + self.demo_encoder = demo_encoder + self.predicate_decoder = predicate_decoder + self.all_modules = all_modules + self.to_cuda_fn = None + + def set_to_cuda_fn(self, to_cuda_fn): + self.to_cuda_fn = to_cuda_fn + + def forward(self, data, **kwargs): + if self.to_cuda_fn: + data = self.to_cuda_fn(data) + + # demonstration + batch_data = data[0].cuda() + batch_gt = data[1].cuda() + batch_task_name = data[2] + batch_action_id = data[3] + + # demonstration encoder + batch_demo_emb, _ = self.demo_encoder(batch_data, batch_gt, batch_task_name) + + loss, info = self.predicate_decoder(batch_demo_emb, batch_gt, batch_action_id, batch_task_name) + + return loss, info + + def write_summary(self, writer, info, postfix): + + model_name = 'Demo2Predicate-{}/'.format(postfix) + for k in self.summary_keys: + if k in info.keys(): + writer.scalar_summary(model_name + k, info[k]) + + def save(self, path, verbose=False): + + if verbose: + print(colored('[*] Save model at {}'.format(path), 'magenta')) + torch.save(self.all_modules.state_dict(), path) + + def load(self, path, verbose=False): + + if verbose: + print(colored('[*] Load model at {}'.format(path), 'magenta')) + self.all_modules.load_state_dict( + torch.load( + path, + map_location=lambda storage, + loc: storage)) + + + diff --git a/watch_and_help/watch_strategy_full/network/module_graph.py b/watch_and_help/watch_strategy_full/network/module_graph.py new file mode 100644 index 0000000..ded1d09 --- /dev/null +++ b/watch_and_help/watch_strategy_full/network/module_graph.py @@ -0,0 +1,249 @@ +import random +import itertools +import numpy as np + +import torch +import torch.nn as nn +import torch.nn.functional as F +from torch.nn.utils.rnn import pad_sequence +from helper import fc_block, Constant + +def _calculate_accuracy_predicate(logits, batch_target, max_possible_count=None, topk=1, multi_classifier=False): + batch_size = batch_target.size(0) / max_possible_count + + _, pred = logits.topk(topk, 1, True, True) + pred = pred.t() + correct = pred.eq(batch_target.view(1, -1).expand_as(pred)) + + k = 1 + accuray = correct[:k].view(-1).float() + accuray = accuray.view(-1, max_possible_count) + + correct_k = (accuray.sum(1)==max_possible_count).sum(0) + correct_k = correct_k * (100.0 / batch_size) + + return correct_k + +def _calculate_accuracy( + action_correct, + object_correct, + rel_correct, + target_correct, + batch_length, + info): + + action_valid_correct = [sum(action_correct[i, :(l - 1)]) + for i, l in enumerate(batch_length)] + object_valid_correct = [sum(object_correct[i, :(l - 1)]) + for i, l in enumerate(batch_length)] + rel_valid_correct = [sum(rel_correct[i, :(l - 1)]) + for i, l in enumerate(batch_length)] + target_valid_correct = [sum(target_correct[i, :(l - 1)]) + for i, l in enumerate(batch_length)] + + action_accuracy = sum(action_valid_correct).float() / (sum(batch_length) - 1. * len(batch_length)) + object_accuracy = sum(object_valid_correct).float() / (sum(batch_length) - 1. * len(batch_length)) + rel_accuracy = sum(rel_valid_correct).float() / (sum(batch_length) - 1. * len(batch_length)) + target_accuracy = sum(target_valid_correct).float() / (sum(batch_length) - 1. * len(batch_length)) + + info.update({'action_accuracy': action_accuracy.cpu().item()}) + info.update({'object_accuracy': object_accuracy.cpu().item()}) + info.update({'rel_accuracy': rel_accuracy.cpu().item()}) + info.update({'target_accuracy': target_accuracy.cpu().item()}) + +class PredicateClassifier(nn.Module): + + def __init__( + self, + args, + dset, + loss_weight + ): + + super(PredicateClassifier, self).__init__() + + self.num_goal_predicates = dset.num_goal_predicates + self.max_possible_count = dset.max_goal_length + self.loss = custom_loss(loss_weight) + + hidden_size = args.demo_hidden + self.hidden_size = hidden_size + self.loss_type = args.loss_type + + if args.dropout==0: + print('dropout', args.dropout) + classifier = nn.Sequential() + classifier.add_module('fc_block1', fc_block(hidden_size*82, hidden_size, False, nn.Tanh)) + classifier.add_module('fc_block2', fc_block(hidden_size, 79, False, None)) # 79 is all possible actions + else: + print('dropout not 0', args.dropout) + classifier = nn.Sequential() + classifier.add_module('fc_block1', fc_block(hidden_size*82, hidden_size, False, nn.Tanh)) + classifier.add_module('dropout', nn.Dropout(args.dropout)) + classifier.add_module('fc_block2', fc_block(hidden_size, 79, False, None)) # 79 is all possible actions + + self.classifier = classifier + + def forward(self, input_emb, batch_target, batch_action_id, batch_task_name, **kwargs): + input_emb = input_emb.view(-1, self.hidden_size*82) + logits = self.classifier(input_emb) + + prob = F.softmax(logits, 1) + + #cross entropy loss + if self.loss_type == 'ce': + loss = F.cross_entropy(logits, batch_target) + #custom loss + if self.loss_type == 'regu': + loss = self.loss(logits, batch_target) + + argmax_Y = torch.max(logits, 1)[1].view(-1, 1) + top1 = (batch_target.float().view(-1, 1) == argmax_Y.float()).sum().item() / len(batch_target.float().view(-1, 1)) * 100 + + with torch.no_grad(): + info = { + "prob": prob.cpu().numpy(), + "argmax": argmax_Y.cpu().numpy(), + "loss": loss.cpu().numpy(), + "top1": top1, + "target": batch_target.cpu().numpy(), + "task_name": batch_task_name, + "action_id": batch_action_id.cpu().numpy() + } + + return loss, info + +class custom_loss(nn.Module): + def __init__(self, loss_weight) -> None: + super(custom_loss, self).__init__( ) + self.loss = nn.CrossEntropyLoss().cuda() + self.weight_loss = nn.MSELoss().cuda() + self.loss_weight = loss_weight + self.counts = torch.FloatTensor(self.loss_weight).cuda() + + def forward(self, pred, target): + # weight loss + cross entropy loss + batch_counts = torch.bincount(target) + batch_counts = batch_counts/torch.sum(batch_counts) + if len(batch_counts) < 79: + batch_counts = F.pad(input=batch_counts, pad=(0, 79 - len(batch_counts)%79), mode='constant', value=0) + + celoss = self.loss(pred, target) + customloss = self.weight_loss(batch_counts, self.counts) + print('celoss: ', celoss, 'customloss: ', customloss) + loss = celoss + 1000*customloss + return loss + +class PredicateClassifierMultiClassifier(nn.Module): + + def __init__( + self, + args, + dset): + + super(PredicateClassifierMultiClassifier, self).__init__() + + self.num_goal_predicates = dset.num_goal_predicates + self.max_possible_count = dset.max_goal_length + self.max_subgoal_length = dset.max_subgoal_length + + hidden_size = args.demo_hidden + print('hidden_size', hidden_size) + print('PredicateClassifierMultiClassifier') + + if args.dropout==0: + print('dropout', args.dropout) + classifier = nn.Sequential() + classifier.add_module('fc_block1', fc_block(hidden_size, hidden_size, False, nn.Tanh)) + classifier.add_module('fc_block2', fc_block(hidden_size, self.num_goal_predicates*(self.max_subgoal_length+1), False, None)) + else: + print('dropout not 0', args.dropout) + classifier = nn.Sequential() + classifier.add_module('fc_block1', fc_block(hidden_size, hidden_size, False, nn.Tanh)) + classifier.add_module('dropout', nn.Dropout(args.dropout)) + classifier.add_module('fc_block2', fc_block(hidden_size, self.num_goal_predicates*(self.max_subgoal_length+1), False, None)) + + + self.classifier = classifier + + def forward(self, bs, input_emb, batch_target, batch_file_name, **kwargs): + + logits = self.classifier(input_emb) + logits = logits.reshape([-1, (self.max_subgoal_length+1)]) + prob = F.softmax(logits, 1) + + batch_target = torch.cat(batch_target) + loss = F.cross_entropy(logits, batch_target) + + top1 = _calculate_accuracy_predicate(logits, batch_target, self.num_goal_predicates, multi_classifier=True) + + with torch.no_grad(): + info = { + "prob": prob.cpu().numpy(), + "loss": loss.cpu().numpy(), + "top1": top1.cpu().numpy(), + "target": batch_target.cpu().numpy(), + "file_name": batch_file_name + } + + return loss, info + +class ActionDemoEncoder(nn.Module): + def __init__(self, args, dset, pooling): + super(ActionDemoEncoder, self).__init__() + hidden_size = args.demo_hidden + self.hidden_size = hidden_size + + len_action_predicates = dset.max_action_len + self.action_embed = nn.Embedding(len_action_predicates, hidden_size) + + feat2hidden = nn.Sequential() + feat2hidden.add_module( + 'fc_block1', fc_block(hidden_size, hidden_size, False, nn.ReLU)) + self.feat2hidden = feat2hidden + + self.pooling = pooling + + if 'lstm' in self.pooling: + self.lstm = nn.LSTM(hidden_size, hidden_size) + + + def forward(self, batch_data, batch_gt, batch_task_name): + batch_data = batch_data.view(-1,1) + + stacked_demo_feat = self.action_embed(batch_data) + + stacked_demo_feat = self.feat2hidden(stacked_demo_feat) + batch_demo_feat = [] + start = 0 + + for length in range(0,batch_data.shape[0]): + if length == 0: + feat = stacked_demo_feat[0:1, :] + else: + feat = stacked_demo_feat[(length-1):length, :] + if len(feat.size()) == 3: + feat = feat.unsqueeze(0) + + if self.pooling == 'max': + feat = torch.max(feat, 0)[0] + elif self.pooling == 'avg': + feat = torch.mean(feat, 0) + elif self.pooling == 'lstmavg': + lstm_out, hidden = self.lstm(feat.view(len(feat), 1, -1)) + lstm_out = lstm_out.view(len(feat), -1) + feat = torch.mean(lstm_out, 0) + elif self.pooling == 'lstmlast': + lstm_out, hidden = self.lstm(feat.view(len(feat), 1, -1)) + lstm_out = lstm_out.view(len(feat), -1) + feat = lstm_out[-1] + else: + raise ValueError + + + batch_demo_feat.append(feat) + + demo_emb = torch.stack(batch_demo_feat, 0) + demo_emb = demo_emb.view(8,82, -1) + return demo_emb, batch_demo_feat + diff --git a/watch_and_help/watch_strategy_full/predicate-train-strategy.py b/watch_and_help/watch_strategy_full/predicate-train-strategy.py new file mode 100644 index 0000000..550d901 --- /dev/null +++ b/watch_and_help/watch_strategy_full/predicate-train-strategy.py @@ -0,0 +1,368 @@ +import resource +import time +from termcolor import colored +import torch +from torch.utils.data import DataLoader +from helper import Constant, LinearStep +from predicate.utils import save, setup, save_checkpoint +from predicate.utils import summary, write_prob, summary_eval, write_prob_strategy +import random +import json +import pickle +import numpy as np + + +topk = 1 +p_th = 0.5 + + +def print_output(args, outputs, targets, file_names, test_dset): + goal_predicates = test_dset.goal_predicates + goal_predicates = {v:k for k,v in goal_predicates.items()} + json_output = {} + + for i, target in enumerate(targets): + if args.inference == 0: + p = random.uniform(0, 1) + if p>p_th: + continue + + file_name = file_names[i] + output = outputs[i] + + if args.multi_classifier: + output = torch.Tensor(output).view(-1, len(test_dset.goal_predicates), test_dset.max_subgoal_length+1) + target = torch.Tensor(target).view(-1, len(test_dset.goal_predicates)) + else: + output = torch.Tensor(output).view(-1, test_dset.max_goal_length, len(test_dset.goal_predicates)) + target = torch.Tensor(target).view(-1, test_dset.max_goal_length) + + output = output.numpy() + target = target.numpy() + + if args.inference == 0: + target_inference = [target[0]] + output_inference = [output[0]] + file_name_inference = [file_name[0]] + else: + target_inference = target + output_inference = output + file_name_inference = file_name + + + for (target_j, output_j, file_name_j) in zip(target_inference, output_inference, file_name_inference): + ## only show the fist sample in each minibatch + assert file_name_j not in json_output + json_output[file_name_j] = {} + json_output[file_name_j]['ground_truth'] = [] + json_output[file_name_j]['prediction'] = [] + json_output[file_name_j]['ground_truth_id'] = [] + json_output[file_name_j]['prediction_id'] = [] + + print('----------------------------------------------------------------------------------') + if args.multi_classifier: + assert len(target_j) == len(goal_predicates) == len(output_j) + for k, target_k in enumerate(target_j): + output_k = output_j[k] + strtar = ('tar: %s %d' % (goal_predicates[k], target_k)).ljust(50, ' ') + strpre = '| gen: %s %d' % (goal_predicates[k], output_k.argmax()) + print(strtar+strpre) + + json_output[file_name_j]['ground_truth_id'].append(int(target_k)) + json_output[file_name_j]['prediction_id'].append(output_k.argmax()) + json_output[file_name_j]['ground_truth'].append(goal_predicates[k]) + json_output[file_name_j]['prediction'].append(goal_predicates[k]) + else: + for k, target_k in enumerate(target_j): + output_k = output_j[k] + + strtar = ('tar: %s' % goal_predicates[int(target_k)]).ljust(50, ' ') + strpre = '| gen: %s' % goal_predicates[output_k.argmax()] + print(strtar+strpre) + + json_output[file_name_j]['ground_truth_id'].append(int(target_k)) + json_output[file_name_j]['prediction_id'].append(output_k.argmax()) + json_output[file_name_j]['ground_truth'].append(goal_predicates[int(target_k)]) + json_output[file_name_j]['prediction'].append(goal_predicates[output_k.argmax()]) + + print('----------------------------------------------------------------------------------') + + if args.inference == 1: + if args.single: + pickle.dump( json_output, open( "dataset/test_output_"+args.resume.split('/')[-2]+"_single_task.p", "wb" ) ) + else: + pickle.dump( json_output, open( "dataset/test_output_"+args.resume.split('/')[-2]+"_multiple_task.p", "wb" ) ) + +def run_one_iteration(model, optim, batch_data, train_args, args): + model.train() + optim.zero_grad() + loss, info = model(batch_data, **train_args) + loss.backward() + optim.step() + return batch_data, info, loss + + +def train( + args, + model, + optim, + train_loader, + test_loader, + val_loader, + checkpoint_dir, + writer, + train_dset, + test_dset, + task): + # Train + print(colored('Start training...', 'red')) + # loader for the testing set + def _loader(): + while True: + for batch_data in test_loader: + yield batch_data + + get_next_data_fn = _loader().__iter__().__next__ + train_args = {} + + if args.inference == 1: + info = summary( + args, + writer, + None, + None, + model, + test_loader, + 'test') + print('test top1', info['top1']) + write_prob(info, args) + + def _train_loop(task): + iter = 0 + summary_t1 = time.time() + + test_best_top1 = 0 + print('start while') + print('train iterations: ',args.train_iters) + while iter <= args.train_iters: + for batch_data in train_loader: + results = run_one_iteration(model, optim, batch_data, train_args, args) + batch_data, info, loss = results + + if iter % 10 == 0: + print('%s: training %d / %d: loss %.4f: acc %.4f' % (args.checkpoint, iter, len(train_loader), loss, info['top1'])) + + fps = 10. / (time.time() - summary_t1) + info = summary( + args, + writer, + info, + train_args, + model, + None, + 'train', + fps=fps) + if iter > 0: + summary_t1 = time.time() + + if iter % (len(train_loader)*1) == 0 and iter>0: + info = summary( + args, + writer, + None, + None, + model, + test_loader, + 'test') + + if info['top1']>test_best_top1: + test_best_top1 = info['top1'] + save(args, iter, checkpoint_dir, model, task) + save_checkpoint(args, iter, checkpoint_dir, model, task) + + iter += 1 + print('start train loop') + _train_loop(task) + print('train loop done') + + +def main(): + args, checkpoint_dir, writer, model_config = setup(train=True) + print(args) + + from predicate.demo_dataset_graph_strategy_test import get_dataset + from predicate.demo_dataset_graph_strategy_test import collate_fn + from predicate.demo_dataset_graph_strategy_test import to_cuda_fn + + #strategy inference + if args.inference == 2: # 0: not infer, 1: infer, 2: strategy infer + from network.encoder_decoder import ActionDemo2Predicate + test_tasks = ['put_fridge', 'put_dishwasher', 'read_book', 'prepare_food', 'setup_table'] + new_test_tasks = ['put_fridge', 'put_dishwasher', 'read_book'] + + train_dsets = [] + test_dsets = [] + new_test_dsets = [] + models = [] + train_loaders = [] + test_loaders = [] + val_loaders = [] + for i in range(len(new_test_tasks)): + loss_weights = np.load('dataset/watch_data/loss_weight_'+test_tasks[i]+'_new_test_task'+'.npy') + train_dset, test_dset, new_test_dset = get_dataset(args, new_test_tasks[i], train=True ) + train_dsets.append(train_dset) + test_dsets.append(test_dset) + new_test_dsets.append(new_test_dset) + model = ActionDemo2Predicate(args, train_dset, loss_weights, **model_config) + model.load(args.checkpoint+'/demo2predicate-checkpoint_model_'+new_test_tasks[i]+'.ckpt', True) + model.cuda() + model.eval() + models.append(model) + + train_loader = DataLoader( + dataset=train_dset, + batch_size=args.batch_size, + shuffle=True, + num_workers=args.n_workers, + drop_last=True) + if args.testset == 'test_task': + test_loader = DataLoader( + dataset=test_dset, + batch_size=args.batch_size, + shuffle=False, + num_workers=0, + drop_last=True) + + val_loader = DataLoader( + dataset=test_dset, + batch_size=args.batch_size, + shuffle=False, + num_workers=0, + drop_last=True) + if args.testset == 'new_test_task': + test_loader = DataLoader( + dataset=new_test_dset, + batch_size=args.batch_size, + shuffle=False, + num_workers=0, + drop_last=True) + + val_loader = DataLoader( + dataset=new_test_dset, + batch_size=args.batch_size, + shuffle=False, + num_workers=0, + drop_last=True) + + train_loaders.append(train_loader) + test_loaders.append(test_loader) + val_loaders.append(val_loader) + + + for i in range(len(models)): + infos = [] + for j in range(len(test_loaders)): + info = summary_eval( + models[i], + test_loaders[j], + test_loaders[j].dataset) + print('test top1', info['top1']) + infos.append(info) + total_info = { + "prob": np.concatenate((infos[0]["prob"], infos[1]["prob"], infos[2]["prob"]), axis=0), + "target": np.concatenate((infos[0]["target"], infos[1]["target"], infos[2]["target"]), axis=0), #batch_target.cpu().numpy(), + "task_name": np.concatenate((infos[0]["task_name"], infos[1]["task_name"], infos[2]["task_name"]), axis=0), #batch_task_name, + "action_id": np.concatenate((infos[0]["action_id"], infos[1]["action_id"], infos[2]["action_id"]), axis=0) #batch_action_id.cpu().numpy() + } + write_prob_strategy(total_info, test_tasks[i], args) + + else: + print('get dataset') + test_tasks = ['put_dishwasher', 'read_book', 'put_fridge', 'prepare_food', 'setup_table'] + new_test_tasks = ['put_dishwasher', 'read_book', 'put_fridge'] + for i in range(len(new_test_tasks)): + train_dset, test_dset, new_test_dset = get_dataset(args, test_tasks[i], train=True ) + print('train set len:',len(train_dset)) + + train_loader = DataLoader( + dataset=train_dset, + batch_size=args.batch_size, + shuffle=True, + num_workers=args.n_workers, + drop_last=True) + + if args.single: + test_loader = DataLoader( + dataset=new_test_dset, + batch_size=args.batch_size, + shuffle=True, + num_workers=0, + drop_last=True) + + val_loader = DataLoader( + dataset=test_dset, + batch_size=args.batch_size, + shuffle=True, + num_workers=0, + drop_last=True) + else: + test_loader = DataLoader( + dataset=new_test_dset, + batch_size=args.batch_size, + shuffle=True, + num_workers=0, + drop_last=True) + + val_loader = DataLoader( + dataset=test_dset, + batch_size=args.batch_size, + shuffle=True, + num_workers=0, + drop_last=True) + + # initialize model + loss_weights = np.load('dataset/watch_data/loss_weight_'+test_tasks[i]+'_train_task'+'.npy') + if args.inputtype=='graphinput': + from network.encoder_decoder import GraphDemo2Predicate + model = GraphDemo2Predicate(args, train_dset, **model_config) + elif args.inputtype=='actioninput': + from network.encoder_decoder import ActionDemo2Predicate + model = ActionDemo2Predicate(args, train_dset, loss_weights, **model_config) + + if args.resume!='': + model.load(args.resume, True) + + + optim = torch.optim.Adam( + filter( + lambda p: p.requires_grad, + model.parameters()), + args.model_lr_rate) + if args.gpu_id is not None: + model.cuda() + + # main loop + train( + args, + model, + optim, + train_loader, + test_loader, + val_loader, + checkpoint_dir, + writer, + train_dset, + test_dset, + test_tasks[i]) + +rlimit = resource.getrlimit(resource.RLIMIT_NOFILE) +resource.setrlimit(resource.RLIMIT_NOFILE, (1024 * 4, rlimit[1])) + + +if __name__ == '__main__': + from multiprocessing import set_start_method + try: + set_start_method('spawn') + except RuntimeError: + pass + main() diff --git a/watch_and_help/watch_strategy_full/predicate/__pycache__/demo_dataset_graph_strategy_test.cpython-38.pyc b/watch_and_help/watch_strategy_full/predicate/__pycache__/demo_dataset_graph_strategy_test.cpython-38.pyc new file mode 100644 index 0000000..2737e7a Binary files /dev/null and b/watch_and_help/watch_strategy_full/predicate/__pycache__/demo_dataset_graph_strategy_test.cpython-38.pyc differ diff --git a/watch_and_help/watch_strategy_full/predicate/__pycache__/utils.cpython-38.pyc b/watch_and_help/watch_strategy_full/predicate/__pycache__/utils.cpython-38.pyc new file mode 100644 index 0000000..35acf07 Binary files /dev/null and b/watch_and_help/watch_strategy_full/predicate/__pycache__/utils.cpython-38.pyc differ diff --git a/watch_and_help/watch_strategy_full/predicate/demo_dataset_graph_strategy_test.py b/watch_and_help/watch_strategy_full/predicate/demo_dataset_graph_strategy_test.py new file mode 100644 index 0000000..1d6b5fc --- /dev/null +++ b/watch_and_help/watch_strategy_full/predicate/demo_dataset_graph_strategy_test.py @@ -0,0 +1,306 @@ +import os +import random +import copy +import json + + +import numpy as np +from termcolor import colored +from glob import glob +import pickle + + +import torch +import torch.nn.functional as F +from torch.utils.data.dataset import Dataset + + + +################################ +# Demonstration +################################ + +def get_dataset(args, task_name, train): + train_data, test_data, new_test_data, train_action_gt, test_action_gt, new_test_action_gt, train_task_name, test_task_name, new_test_task_name, train_action_id, test_action_id, new_test_action_id, action_predicates, all_action, all_object, goal_objects, goal_targets, goal_predicates, max_goal_length, max_action_length, max_node_length, max_subgoal_length = gather_data(args, task_name) + train_dset = demo_dset(args, train_data, train_action_gt, train_task_name, train_action_id, action_predicates, all_action, all_object, goal_objects, goal_targets, goal_predicates, max_goal_length, max_action_length, max_node_length, max_subgoal_length) + test_dset = demo_dset(args, test_data, test_action_gt, test_task_name, test_action_id, action_predicates, all_action, all_object, goal_objects, goal_targets, goal_predicates, max_goal_length, max_action_length, max_node_length, max_subgoal_length) + new_test_dset = demo_dset(args, new_test_data, new_test_action_gt, new_test_task_name, new_test_action_id, action_predicates, all_action, all_object, goal_objects, goal_targets, goal_predicates, max_goal_length, max_action_length, max_node_length, max_subgoal_length) + return train_dset, test_dset, new_test_dset + + +def collate_fn(data_list): + graph_data = [data[0] for data in data_list] + batch_goal_index = [data[1] for data in data_list] + batch_valid_action_with_walk_index = [data[2] for data in data_list] + + if len(graph_data[0])==3: + batch_graph_length = [d[0] for d in graph_data] + batch_graph_input = [d[1] for d in graph_data] + batch_file_name = [d[2] for d in graph_data] + else: + batch_graph_length = [d[0] for d in graph_data] + batch_file_name = [d[1] for d in graph_data] + + if len(graph_data[0])==3: + batch_demo_data = ( + np.arange(len(batch_graph_length)), + batch_graph_length, + batch_graph_input, + batch_file_name + ) + else: + batch_demo_data = ( + np.arange(len(batch_graph_length)), + batch_graph_length, + batch_file_name + ) + + return batch_demo_data, batch_goal_index, batch_valid_action_with_walk_index + + +def to_cuda_fn(data): + batch_demo_data, batch_goal_index, batch_valid_action_with_walk_index = data + + if len(batch_demo_data)==4: + batch_demo_index, batch_graph_length, batch_graph_input, batch_file_name = batch_demo_data + + batch_graph_input_class_objects = [[torch.tensor(j['class_objects']).cuda() for j in i] for i in batch_graph_input] + batch_graph_input_object_coords = [[torch.tensor(j['object_coords']).cuda() for j in i] for i in batch_graph_input] + batch_graph_input_states_objects = [[torch.tensor(j['states_objects']).cuda() for j in i] for i in batch_graph_input] + batch_graph_input_mask_object = [[torch.tensor(j['mask_object']).cuda() for j in i] for i in batch_graph_input] + + batch_graph_input = { 'class_objects': batch_graph_input_class_objects, + 'object_coords': batch_graph_input_object_coords, + 'states_objects': batch_graph_input_states_objects, + 'mask_object': batch_graph_input_mask_object} + + else: + batch_demo_index, batch_graph_length, batch_file_name = batch_demo_data + + + batch_goal_index = [torch.tensor(i).cuda().long() for i in batch_goal_index] + batch_valid_action_with_walk_index = [torch.tensor(i).cuda().long() for i in batch_valid_action_with_walk_index] + + if len(batch_demo_data)==4: + batch_demo_data = ( + batch_demo_index, + batch_graph_length, + batch_graph_input, + batch_file_name + ) + else: + batch_demo_data = ( + batch_demo_index, + batch_graph_length, + batch_file_name + ) + + return batch_demo_data, batch_goal_index, batch_valid_action_with_walk_index + +def one_hot(states, graph_node_states): + one_hot = np.zeros(len(graph_node_states)) + for state in states: + one_hot[graph_node_states[state]] = 1 + return one_hot + +def gather_data(args, task): + meta_data_path = 'dataset/watch_data/metadata.json' + data_path_new_test = 'dataset/watch_data/action/new_test_task_' + task + '_strategy.json' + data_path_test = 'dataset/watch_data/action/test_task_' + task + '_strategy.json' + data_path_train = 'dataset/watch_data/action/train_task_' + task + '_strategy.json' + + + with open(data_path_new_test, 'r') as f: + new_test_data = json.load(f) + + with open(data_path_test, 'r') as f: + test_data = json.load(f) + + #if os.path.exists(data_path): + #print('load gather_data, this may take a while...', data_path) + with open(data_path_train, 'r') as f: + data = json.load(f) + + # temporarily set data_path to test data to test training + train_data = data + + with open(meta_data_path, 'r') as f: + data = json.load(f) + + action_predicates = data['action_predicates'] + all_action = data['all_action'] + all_object = data['all_object'] + + goal_objects = data['goal_objects'] + goal_targets = data['goal_targets'] + goal_predicates = data['goal_predicates'] + + graph_class_names = data['graph_class_names'] + graph_node_states = data['graph_node_states'] + + max_goal_length = data['max_goal_length'] + max_action_length = data['max_action_length'] + max_node_length = data['max_node_length'] + + + ## ----------------------------------------------------------------------------- + ## add action, goal, and graph node index + ## ----------------------------------------------------------------------------- + max_subgoal_length = 1 + + train_task_name = np.array(train_data['task_name']) + + test_task_name = np.array(test_data['task_name']) + + new_test_task_name = np.array(new_test_data['task_name']) + + + for traintest in [train_data, test_data, new_test_data]: + for data in traintest['goal']: + ## goal + goal_index = [] + subgoal_dict = {} + for subgoal in data: + goal_index.append(goal_predicates[subgoal]) + + if goal_predicates[subgoal] not in subgoal_dict: + subgoal_dict[goal_predicates[subgoal]] = 1 + else: + subgoal_dict[goal_predicates[subgoal]] += 1 + + this_max_subgoal_length = np.max(list(subgoal_dict.values())) + if this_max_subgoal_length>max_subgoal_length: + max_subgoal_length = this_max_subgoal_length + + + goal_index.sort() + if len(goal_index) < max_goal_length: + for i in range(max_goal_length-len(goal_index)): + goal_index.append(0) + + ## action gt + for i in range(len(traintest['action_gt'])): # len(traintest['action_gt']) + action_name = traintest['action_gt'][i][0].split(' ')[0] + object_name = traintest['action_gt'][i][0].split(' ')[1] + predicate_name = ' '.join([action_name, object_name]) + traintest['action_gt'][i] = action_predicates[predicate_name] + + ## action + valid_action_with_walk_index = [] + for i in range(len(traintest['valid_action_with_walks'])): + actions_index = [] + for actions in traintest['valid_action_with_walks'][i]: + if actions!='None': + action_name = actions[0].split(' ')[0] + object_name = actions[0].split(' ')[1] + predicate_name = ' '.join([action_name, object_name]) + else: + predicate_name = actions + actions_index.append(action_predicates[predicate_name]) + + traintest['valid_action_with_walks'][i] = actions_index + + print(len(train_data['action_gt']),np.array(train_data['action_gt']), type(train_data['action_gt'])) + + + train_action_gt = np.array(train_data['action_gt']) + test_action_gt = np.array(test_data['action_gt']) + new_test_action_gt = np.array(new_test_data['action_gt']) + + train_action_id = np.array(train_data['action_id']) + test_action_id = np.array(test_data['action_id']) + new_test_action_id = np.array(new_test_data['action_id']) + + train_data = np.array(train_data['valid_action_with_walks']) + test_data = np.array(test_data['valid_action_with_walks']) + new_test_data = np.array(new_test_data['valid_action_with_walks']) + + + print('--------------------------------------------------------------------------------') + print('train_data', len(train_data), train_data.shape) + print('test_data', len(test_data), train_data.shape) + print('new_test_data', len(new_test_data), train_data.shape) + print('--------------------------------------------------------------------------------') + print('train_gt', len(train_action_gt), train_action_gt.shape) + print('test_gt', len(test_action_gt), test_action_gt.shape) + print('new_test_gt', len(new_test_action_gt), new_test_action_gt.shape) + print('--------------------------------------------------------------------------------') + print('train_task_name', len(train_task_name), train_task_name.shape) + print('test_task_name', len(test_task_name), test_task_name.shape) + print('new_test_task_name', len(new_test_task_name), new_test_task_name.shape) + print('--------------------------------------------------------------------------------') + + return train_data, test_data, new_test_data, train_action_gt, test_action_gt, new_test_action_gt, train_task_name, test_task_name, new_test_task_name, train_action_id, test_action_id, new_test_action_id, action_predicates, all_action, all_object, goal_objects, goal_targets, goal_predicates, max_goal_length, max_action_length, max_node_length, max_subgoal_length + +class demo_dset(Dataset): + + def __init__( + self, + args, + data, + gt, + task_name, + action_id, + #action_predicates, all_action, all_object, goal_objects, goal_targets, goal_predicates, graph_class_names, graph_node_states, max_goal_length, max_action_length, max_node_length, max_subgoal_length): + action_predicates, all_action, all_object, goal_objects, goal_targets, goal_predicates, max_goal_length, max_action_length, max_node_length, max_subgoal_length): + + + self.inputtype = args.inputtype + self.multi_classifier = args.multi_classifier + self.data = data + self.gt = gt + self.task_name = task_name + self.action_id = action_id + + self.max_action_len = 82 + self.action_predicates = action_predicates + self.all_action = all_action + self.all_object = all_object + + self.goal_objects = goal_objects + self.goal_targets = goal_targets + self.goal_predicates = goal_predicates + self.num_goal_predicates = len(goal_predicates) + + self.max_goal_length = max_goal_length + self.max_action_length = max_action_length + self.max_subgoal_length = max_subgoal_length + + if self.inputtype=='graphinput': + self.graph_class_names = graph_class_names + self.graph_node_states = graph_node_states + self.num_node_states = len(graph_node_states) + self.max_node_length = max_node_length + + + print('-----------------------------------------------------------------------------') + print('num_goal_predicates', self.num_goal_predicates) + print('max_goal_length', self.max_goal_length) + print('max_action_length', max_action_length) + + if self.inputtype=='graphinput': + print('num_node_states', self.num_node_states) + print('max_node_length', max_node_length) + print('-----------------------------------------------------------------------------') + + + def __getitem__(self, index): + data = self.data[index] + gt = self.gt[index] + task_name = self.task_name[index] + action_id = self.action_id[index] + + return data, gt, task_name, action_id + + def __len__(self): + return len(self.data) + + def _preprocess_one_data(self, data): + action_gt = data['action_gt'] + valid_action_with_walk_index = data['valid_action_with_walks'] + action_length = len(valid_action_with_walk_index) + inputdata = (action_length, 'actions') + data = [inputdata, action_gt, valid_action_with_walk_index] + return data + + diff --git a/watch_and_help/watch_strategy_full/predicate/utils.py b/watch_and_help/watch_strategy_full/predicate/utils.py new file mode 100644 index 0000000..5cee74e --- /dev/null +++ b/watch_and_help/watch_strategy_full/predicate/utils.py @@ -0,0 +1,297 @@ +import argparse +import random +import time +import os +import json +import numpy as np + + +import torch +from torch.utils.tensorboard import SummaryWriter +from helper import to_cpu, average_over_list, writer_helper +import csv +import pathlib + +def grab_args(): + + def str2bool(v): + return v.lower() == 'true' + + parser = argparse.ArgumentParser(description='') + parser.add_argument('--seed', type=int, default=123, help='random seed') + parser.add_argument('--verbose', type=str2bool, default=False) + parser.add_argument('--debug', type=str2bool, default=False) + parser.add_argument('--prefix', type=str, default='test') + parser.add_argument('--checkpoint', type=str, default=None) + parser.add_argument('--n_workers', type=int, default=0) + parser.add_argument('--train_iters', type=int, default=2e4) + parser.add_argument('--inputtype', type=str, default='actioninput') + parser.add_argument('--resume', type=str, default='') + parser.add_argument('--dropout', type=float, default=0) + parser.add_argument('--inference', type=int, default=0) + parser.add_argument('--single', type=int, default=0) + parser.add_argument('--loss_type', type=str, default='regu') #regu or ce + parser.add_argument('--testset', type=str, default='test') # test: test set 1, new_test: test set 2 + + # model config + parser.add_argument( + '--model_type', + type=str, + default='max') + parser.add_argument('--embedding_dim', type=int, default=100) + parser.add_argument('--predicate_hidden', type=int, default=128) + parser.add_argument('--demo_hidden', type=int, default=128) + parser.add_argument('--multi_classifier', type=int, default=0) + parser.add_argument('--transformer_nhead', type=int, default=2) + + # train config + parser.add_argument( + '--gpu_id', + metavar='N', + type=str, + nargs='+', + help='specify the gpu id') + parser.add_argument('--batch_size', type=int, default=2) + parser.add_argument('--model_lr_rate', type=float, default=3e-4) + + args = parser.parse_args() + return args + + +def setup(train): + def _basic_setting(args): + + # set seed + torch.manual_seed(args.seed) + random.seed(args.seed) + np.random.seed(args.seed) + + if args.gpu_id is None: + os.environ['CUDA_VISIBLE_DEVICES'] = '' + args.__dict__.update({'cuda': False}) + else: + os.environ['CUDA_VISIBLE_DEVICES'] = ', '.join(args.gpu_id) + args.__dict__.update({'cuda': True}) + torch.cuda.manual_seed_all(args.seed) + + if args.debug: + args.verbose = True + + def _basic_checking(args): + pass + + def _create_checkpoint_dir(args): + # setup checkpoint_dir + if args.debug: + checkpoint_dir = 'debug' + elif train: + checkpoint_dir = 'checkpoint_dir' + else: + checkpoint_dir = 'testing_dir' + + checkpoint_dir = os.path.join(checkpoint_dir, 'demo2predicate') + + args_dict = args.__dict__ + keys = sorted(args_dict) + prefix = ['{}-{}'.format(k, args_dict[k]) for k in keys] + prefix.remove('debug-{}'.format(args.debug)) + prefix.remove('checkpoint-{}'.format(args.checkpoint)) + prefix.remove('gpu_id-{}'.format(args.gpu_id)) + + checkpoint_dir = os.path.join(checkpoint_dir, *prefix) + + checkpoint_dir += '/{}'.format(time.strftime("%Y%m%d-%H%M%S")) + + return checkpoint_dir + + def _make_dirs(checkpoint_dir, tfboard_dir): + if not os.path.exists(checkpoint_dir): + os.makedirs(checkpoint_dir) + if not os.path.exists(tfboard_dir): + os.makedirs(tfboard_dir) + + def _print_args(args): + args_str = '' + with open(os.path.join(checkpoint_dir, 'args.txt'), 'w') as f: + for k, v in args.__dict__.items(): + s = '{}: {}'.format(k, v) + args_str += '{}\n'.format(s) + print(s) + f.write(s + '\n') + print("All the data will be saved in", checkpoint_dir) + return args_str + + args = grab_args() + _basic_setting(args) + _basic_checking(args) + + checkpoint_dir = args.checkpoint + tfboard_dir = os.path.join(checkpoint_dir, 'tfboard') + + _make_dirs(checkpoint_dir, tfboard_dir) + args_str = _print_args(args) + + writer = SummaryWriter(tfboard_dir) + writer.add_text('args', args_str, 0) + writer = writer_helper(writer) + + model_config = { + "model_type": args.model_type, + "embedding_dim": args.embedding_dim, + "predicate_hidden": args.predicate_hidden, + } + model_config.update({"demo_hidden": args.demo_hidden}) + + return args, checkpoint_dir, writer, model_config + + +def summary( + args, + writer, + info, + train_args, + model, + test_loader, + postfix, + fps=None): + + if postfix == 'train': + model.write_summary(writer, info, postfix=postfix) + elif postfix == 'val': + info = summary_eval( + model, + test_loader, + test_loader.dataset) + model.write_summary(writer, info, postfix=postfix) + elif postfix == 'test': + info = summary_eval( + model, + test_loader, + test_loader.dataset) + + model.write_summary(writer, info, postfix=postfix) + else: + raise ValueError + + if fps: + writer.scalar_summary('General/fps', fps) + + return info + +def summary_eval( + model, + loader, + dset): + + model.eval() + print(len(loader)) + with torch.no_grad(): + + loss_list = [] + top1_list = [] + iter = 0 + action_id_list = [] + + prob = [] + target = [] + file_name = [] + for batch_data in loader: + loss, info = model(batch_data) + loss_list.append(loss.cpu().item()) + top1_list.append(info['top1']) + + prob.append(info['prob']) + target.append(info['target']) + file_name.append(info['task_name']) + action_id_list.append(info['action_id']) + + if iter%10==0: + print('testing %d / %d: loss %.4f: acc %.4f' % (iter, len(loader), loss, info['top1'])) + + iter += 1 + + info = {"loss": sum(loss_list)/ len(loss_list), "top1": sum(top1_list)/ len(top1_list), "prob": prob, "target": target, "task_name": file_name, "action_id": action_id_list} + return info + +def write_prob(info, args): + temp_prob_list = [] + temp_action_id_list = [] + temp_task_name_list = [] + temp_target_list = [] + for i in range(len(info['prob'])): + for j in range(len(info['prob'][i])): + temp_prob_list.append(info['prob'][i][j]) + + for i in range(len(info['action_id'])): + for j in range(len(info['action_id'][i])): + temp_action_id_list.append(info['action_id'][i][j]) + + for i in range(len(info['task_name'])): + for j in range(len(info['task_name'][i])): + temp_task_name_list.append(info['task_name'][i][j]) + + for i in range(len(info['target'])): + for j in range(len(info['target'][i])): + temp_target_list.append(info['target'][i][j]) + + prob = np.array(temp_prob_list) + action_id = np.array(temp_action_id_list) + task_name = np.array(temp_task_name_list) + target = np.array(temp_target_list) + + write_data = np.concatenate((np.reshape(action_id, (-1, 1)), prob, np.reshape(task_name, (-1, 1)), np.reshape(target, (-1, 1))), axis=1) + import pandas as pd + head = [] + for j in range(79): + head.append('act'+str(j+1)) + head.append('task_name') + head.append('gt') + head.insert(0,'action_id') + pd.DataFrame(write_data).to_csv("prediction/" + args.model_type + "/" + task_name[0] + "_full.csv", header=head) + +def write_prob_strategy(info, model_name, args): + temp_prob_list = [] + temp_action_id_list = [] + temp_task_name_list = [] + temp_target_list = [] + for i in range(len(info['prob'])): + for j in range(len(info['prob'][i])): + temp_prob_list.append(info['prob'][i][j]) + + for i in range(len(info['action_id'])): + for j in range(len(info['action_id'][i])): + temp_action_id_list.append(info['action_id'][i][j]) + + for i in range(len(info['task_name'])): + for j in range(len(info['task_name'][i])): + temp_task_name_list.append(info['task_name'][i][j]) + + for i in range(len(info['target'])): + for j in range(len(info['target'][i])): + temp_target_list.append(info['target'][i][j]) + + prob = np.array(temp_prob_list) + action_id = np.array(temp_action_id_list) + task_name = np.array(temp_task_name_list) + target = np.array(temp_target_list) + + + write_data = np.concatenate((np.reshape(action_id, (-1, 1)), prob, np.reshape(task_name, (-1, 1)), np.reshape(target, (-1, 1))), axis=1) + import pandas as pd + head = [] + for j in range(79): + head.append('act'+str(j+1)) + head.append('task_name') + head.append('gt') + head.insert(0,'action_id') + path = pathlib.Path("stan/prediction/" + args.testset + "/" + args.model_type) + path.mkdir(parents=True, exist_ok=True) + pd.DataFrame(write_data).to_csv("stan/prediction/" + args.testset + "/" + args.model_type + "/model_" + model_name + '_strategy_' + task_name[0] + ".csv", header=head) + +def save(args, i, checkpoint_dir, model, task): + save_path = '{}/demo2predicate-{}{}.ckpt'.format(checkpoint_dir, 'best_model_', task) + model.save(save_path, True) + +def save_checkpoint(args, i, checkpoint_dir, model, task): + save_path = '{}/demo2predicate-{}{}.ckpt'.format(checkpoint_dir, 'checkpoint_model_', task) + model.save(save_path, True)