Skip to content
Open
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 0 additions & 6 deletions constants.py

This file was deleted.

168 changes: 0 additions & 168 deletions format_beh.ipynb

This file was deleted.

100 changes: 100 additions & 0 deletions notebooks/create_design_matrix.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
{
"cells": [
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"### Notebook to create and store a design matrix of behavior and spikes "
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"The autoreload extension is already loaded. To reload it, use:\n",
" %reload_ext autoreload\n"
]
}
],
"source": [
"%load_ext autoreload\n",
"%autoreload 2\n",
"\n",
"import numpy as np\n",
"import pandas as pd\n",
"from spike_tools import (\n",
" general as spike_general,\n",
" analysis as spike_analysis,\n",
")\n",
"import wcst_encode.data_utils as data_utils\n",
"from wcst_encode.constants import COLUMN_NAMES\n",
"\n",
"species = 'nhp'\n",
"subject = 'SA'\n",
"exp = 'WCST'\n",
"session = 20180802 # this is the session for which there are spikes at the moment. \n",
"\n",
"tau_pre = 20\n",
"tau_post = 0"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"spikes_by_bins = pd.read_pickle('/data/processed/sub-SA_sess-20180802_spike_counts_binsize_50.pickle')\n",
"beh_by_bins = pd.read_pickle('/data/processed/sub-SA_sess-20180802_behavior_binsize_50.pickle')\n",
"intervals = pd.read_pickle(\"/data/processed/sub-SA_sess-20180802_interval_1500_fb_1500_binsize_50.pickle\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"column_names_w_units = COLUMN_NAMES + spikes_by_bins.columns[1:].tolist()\n",
"design_mat = data_utils.get_design_matrix(spikes_by_bins, beh_by_bins, column_names_w_units, tau_pre, tau_post)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"design_mat.to_pickle(\"/data/processed/sub-SA_sess-20180802_design_mat_taupre_20_taupost_0_binsize_50.pickle\")"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.6"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}
105 changes: 105 additions & 0 deletions notebooks/format_beh.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"%load_ext autoreload\n",
"%autoreload 2\n",
"\n",
"import numpy as np\n",
"import pandas as pd\n",
"from spike_tools import (\n",
" general as spike_general,\n",
" analysis as spike_analysis,\n",
")\n",
"import wcst_encode.data_utils\n",
"from wcst_encode.constants import FEATURES\n",
"\n",
"species = 'nhp'\n",
"subject = 'SA'\n",
"exp = 'WCST'\n",
"session = 20180802 # this is the session for which there are spikes at the moment. "
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"behavior_file = spike_general.get_behavior_path(subject, session)\n",
"behavior_data = pd.read_csv(\"/data/sub-SA_sess-20180802_object_features.csv\")\n",
"valid_beh = behavior_data[behavior_data.Response.isin([\"Correct\", \"Incorrect\"])]\n"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"behavior_by_bins = data_utils.get_behavior_by_bins(50, valid_beh)"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [],
"source": [
"behavior_by_bins.to_pickle('/data/processed/sub-SA_sess-20180802_behavior_binsize_50.pickle')"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"### Grab bin idxs of interval around fb onset"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
"intervals = data_utils.get_trial_intervals(valid_beh, pre_interval=1500, post_interval=1500, bin_size=50)"
]
},
{
"cell_type": "code",
"execution_count": 82,
"metadata": {},
"outputs": [],
"source": [
"intervals.to_pickle(\"/data/processed/sub-SA_sess-20180802_interval_1500_fb_1500_binsize_50.pickle\")"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.6"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}
File renamed without changes.
12 changes: 12 additions & 0 deletions wcst_encode/constants.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# useful constants during analysis

FEATURES = [
'CIRCLE', 'SQUARE', 'STAR', 'TRIANGLE',
'CYAN', 'GREEN', 'MAGENTA', 'YELLOW',
'ESCHER', 'POLKADOT', 'RIPPLE', 'SWIRL'
]

NUM_UNITS = 59

COLUMN_NAMES_W_UNITS = FEATURES + ["CORRECT", "INCORRECT"] + [f"unit_{i}" for i in range(0, NUM_UNITS)]
COLUMN_NAMES = FEATURES + ["CORRECT", "INCORRECT"]
Loading