From 509aa1edff8ebfeb2aac3728fef61e8ca7ced550 Mon Sep 17 00:00:00 2001
From: Venus Sherathiya <32345130+venus-sherathiya@users.noreply.github.com>
Date: Mon, 1 Jul 2024 13:06:38 -0500
Subject: [PATCH 01/10] less interaction with GUI for running analysis
---
GuPPy/computePsth.py | 32 +--
GuPPy/findTransientsFreqAndAmp.py | 24 +-
GuPPy/preprocess.py | 34 +--
GuPPy/readTevTsq.py | 26 +-
GuPPy/runFiberPhotometryAnalysis.ipynb | 332 +++++++++++++++++++------
GuPPy/savingInputParameters.ipynb | 13 +-
6 files changed, 325 insertions(+), 136 deletions(-)
diff --git a/GuPPy/computePsth.py b/GuPPy/computePsth.py
index f0ee794..7098966 100755
--- a/GuPPy/computePsth.py
+++ b/GuPPy/computePsth.py
@@ -643,12 +643,14 @@ def averageForGroup(folderNames, event, inputParameters):
print("Group of data averaged.")
-def psthForEachStorename(inputParameters):
+def psthForEachStorename(inputParametersPath):
print("Computing PSTH, Peak and Area for each event...")
-
- inputParameters = inputParameters
+ with open(inputParametersPath) as f:
+ inputParameters = json.load(f)
+
+ #inputParameters = inputParameters
#storesList = np.genfromtxt(inputParameters['storesListPath'], dtype='str', delimiter=',')
@@ -758,17 +760,17 @@ def psthForEachStorename(inputParameters):
print("PSTH, Area and Peak are computed for all events.")
return inputParameters
-if __name__ == "__main__":
- try:
- inputParameters = psthForEachStorename(json.loads(sys.argv[1]))
- subprocess.call(["python",
- os.path.join(inputParameters["curr_dir"],"GuPPy","findTransientsFreqAndAmp.py"),
- json.dumps(inputParameters)])
- insertLog('#'*400, logging.INFO)
- except Exception as e:
- with open(os.path.join(os.path.expanduser('~'), 'pbSteps.txt'), 'a') as file:
- file.write(str(-1)+"\n")
- insertLog(str(e), logging.ERROR)
- raise e
+# if __name__ == "__main__":
+# try:
+# inputParameters = psthForEachStorename(json.loads(sys.argv[1]))
+# subprocess.call(["python",
+# os.path.join(inputParameters["curr_dir"],"GuPPy","findTransientsFreqAndAmp.py"),
+# json.dumps(inputParameters)])
+# insertLog('#'*400, logging.INFO)
+# except Exception as e:
+# with open(os.path.join(os.path.expanduser('~'), 'pbSteps.txt'), 'a') as file:
+# file.write(str(-1)+"\n")
+# insertLog(str(e), logging.ERROR)
+# raise e
diff --git a/GuPPy/findTransientsFreqAndAmp.py b/GuPPy/findTransientsFreqAndAmp.py
index d7c015d..7181bdc 100755
--- a/GuPPy/findTransientsFreqAndAmp.py
+++ b/GuPPy/findTransientsFreqAndAmp.py
@@ -178,7 +178,7 @@ def visuzlize_peaks(filepath, z_score, timestamps, peaksIndex):
timestamps[peaksIndex], z_score[peaksIndex], 'o')
ax.set_title(basename)
fig.suptitle(os.path.basename(dirname))
- #plt.show()
+ plt.show()
def findFreqAndAmp(filepath, inputParameters, window=15, numProcesses=mp.cpu_count()):
@@ -347,7 +347,7 @@ def executeFindFreqAndAmp(inputParameters):
findFreqAndAmp(filepath, inputParameters, window=moving_window, numProcesses=numProcesses)
writeToFile(str(10+((inputParameters['step']+1)*10))+'\n')
inputParameters['step'] += 1
- plt.show()
+ #plt.show()
else:
for i in range(len(folderNames)):
insertLog(f"Finding transients in z-score data of {folderNames[i]} and calculating frequency and amplitude.",
@@ -361,19 +361,19 @@ def executeFindFreqAndAmp(inputParameters):
writeToFile(str(10+((inputParameters['step']+1)*10))+'\n')
inputParameters['step'] += 1
insertLog('Transients in z-score data found and frequency and amplitude are calculated.', logging.INFO)
- plt.show()
+ #plt.show()
print('Transients in z-score data found and frequency and amplitude are calculated.')
-if __name__ == "__main__":
- try:
- executeFindFreqAndAmp(json.loads(sys.argv[1]))
- insertLog('#'*400, logging.INFO)
- except Exception as e:
- with open(os.path.join(os.path.expanduser('~'), 'pbSteps.txt'), 'a') as file:
- file.write(str(-1)+"\n")
- insertLog(str(e), logging.ERROR)
- raise e
+# if __name__ == "__main__":
+# try:
+# executeFindFreqAndAmp(json.loads(sys.argv[1]))
+# insertLog('#'*400, logging.INFO)
+# except Exception as e:
+# with open(os.path.join(os.path.expanduser('~'), 'pbSteps.txt'), 'a') as file:
+# file.write(str(-1)+"\n")
+# insertLog(str(e), logging.ERROR)
+# raise e
diff --git a/GuPPy/preprocess.py b/GuPPy/preprocess.py
index c91691c..5f9eb98 100755
--- a/GuPPy/preprocess.py
+++ b/GuPPy/preprocess.py
@@ -15,7 +15,7 @@
import matplotlib.pyplot as plt
from matplotlib.widgets import MultiCursor
from combineDataFn import processTimestampsForCombiningData
-plt.switch_backend('TKAgg')
+#plt.switch_backend('TKAgg')
def insertLog(text, level):
file = os.path.join('.','..','guppy.log')
@@ -437,7 +437,7 @@ def visualize_z_score(filepath):
ax.plot(x,y)
ax.set_title(basename)
fig.suptitle(name)
- #plt.show()
+ plt.show()
# function to plot deltaF/F
def visualize_dff(filepath):
@@ -457,7 +457,7 @@ def visualize_dff(filepath):
ax.plot(x,y)
ax.set_title(basename)
fig.suptitle(name)
- #plt.show()
+ plt.show()
@@ -553,7 +553,7 @@ def plt_close_event(event):
cid = fig.canvas.mpl_connect('close_event', plt_close_event)
#multi = MultiCursor(fig.canvas, (ax1, ax2), color='g', lw=1, horizOn=False, vertOn=True)
- #plt.show()
+ plt.show()
#return fig
# function to plot control and signal, also provide a feature to select chunks for artifacts removal
@@ -1156,16 +1156,18 @@ def execute_zscore(folderNames, inputParameters):
writeToFile(str(10+((inputParameters['step']+1)*10))+'\n')
inputParameters['step'] += 1
- plt.show()
+ #plt.show()
insertLog("Signal data and event timestamps are extracted.", logging.INFO)
print("Signal data and event timestamps are extracted.")
-def extractTsAndSignal(inputParameters):
+def extractTsAndSignal(inputParametersPath):
print("Extracting signal data and event timestamps...")
insertLog("Extracting signal data and event timestamps", logging.DEBUG)
- inputParameters = inputParameters
+
+ with open(inputParametersPath) as f:
+ inputParameters = json.load(f)
#storesList = np.genfromtxt(inputParameters['storesListPath'], dtype='str', delimiter=',')
@@ -1202,15 +1204,15 @@ def extractTsAndSignal(inputParameters):
-if __name__ == "__main__":
- try:
- extractTsAndSignal(json.loads(sys.argv[1]))
- insertLog('#'*400, logging.INFO)
- except Exception as e:
- with open(os.path.join(os.path.expanduser('~'), 'pbSteps.txt'), 'a') as file:
- file.write(str(-1)+"\n")
- insertLog(str(e), logging.ERROR)
- raise e
+# if __name__ == "__main__":
+# try:
+# extractTsAndSignal(json.loads(sys.argv[1]))
+# insertLog('#'*400, logging.INFO)
+# except Exception as e:
+# with open(os.path.join(os.path.expanduser('~'), 'pbSteps.txt'), 'a') as file:
+# file.write(str(-1)+"\n")
+# insertLog(str(e), logging.ERROR)
+# raise e
diff --git a/GuPPy/readTevTsq.py b/GuPPy/readTevTsq.py
index 1d2def0..c20e442 100755
--- a/GuPPy/readTevTsq.py
+++ b/GuPPy/readTevTsq.py
@@ -483,13 +483,15 @@ def execute_import_doric(filepath, storesList, flag, outputPath):
# function to read data from 'tsq' and 'tev' files
-def readRawData(inputParameters):
+def readRawData(inputParametersPath):
print('### Reading raw data... ###')
insertLog('### Reading raw data... ###', logging.DEBUG)
# get input parameters
- inputParameters = inputParameters
+ with open(inputParametersPath) as f:
+ inputParameters = json.load(f)
+
folderNames = inputParameters['folderNames']
numProcesses = inputParameters['numberOfCores']
storesListPath = []
@@ -544,14 +546,14 @@ def readRawData(inputParameters):
insertLog('Raw data fetched and saved.', logging.INFO)
insertLog("#" * 400, logging.INFO)
-if __name__ == "__main__":
- print('run')
- try:
- readRawData(json.loads(sys.argv[1]))
- insertLog('#'*400, logging.INFO)
- except Exception as e:
- with open(os.path.join(os.path.expanduser('~'), 'pbSteps.txt'), 'a') as file:
- file.write(str(-1)+"\n")
- insertLog(f"An error occurred: {e}", logging.ERROR)
- raise e
+# if __name__ == "__main__":
+# print('run')
+# try:
+# readRawData(json.loads(sys.argv[1]))
+# insertLog('#'*400, logging.INFO)
+# except Exception as e:
+# with open(os.path.join(os.path.expanduser('~'), 'pbSteps.txt'), 'a') as file:
+# file.write(str(-1)+"\n")
+# insertLog(f"An error occurred: {e}", logging.ERROR)
+# raise e
diff --git a/GuPPy/runFiberPhotometryAnalysis.ipynb b/GuPPy/runFiberPhotometryAnalysis.ipynb
index ef5828c..e753823 100755
--- a/GuPPy/runFiberPhotometryAnalysis.ipynb
+++ b/GuPPy/runFiberPhotometryAnalysis.ipynb
@@ -9,15 +9,16 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"%load_ext autoreload\n",
"%autoreload 2\n",
"\n",
- "%matplotlib\n",
+ "%matplotlib tk\n",
"import os\n",
+ "import json\n",
"from readTevTsq import readRawData\n",
"from preprocess import extractTsAndSignal\n",
"from computePsth import psthForEachStorename\n",
@@ -31,9 +32,9 @@
"### Step 2: Input Parameters GUI\n",
"\n",
"a) Open a new terminal/anaconda window and navigate to location of code by entering 'cd path_to_code'\n",
- "
Example: 'cd Desktop/GuPPy-main/GuPPy'
\n",
+ "
Example: 'cd Desktop/GuPPy-main/'
\n",
"b) Execute the following command to open GUI\n",
- "
panel serve --show savingInputParameters.ipynb
\n",
+ "panel serve --show GuPPy/savingInputParameters.ipynb
\n",
"c) Navigate to data location (using down arrow) and select one or more folders to analyze
\n",
"d) Select appropriate options and save to file by clicking on 'Save' button
\n",
"Note: removeArtifacts should be set to 'False' initially
\n",
@@ -43,11 +44,11 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
- "inputParameters = \"/Users/VENUS/Downloads/FP_Data/habitEarly/inputParameters/inputParameters.json\" "
+ "inputParametersPath = \"/Users/vns0170/GuPPyParamtersUsed.json\""
]
},
{
@@ -74,11 +75,60 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 3,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "### Reading raw data... ###\n",
+ "/Users/vns0170/Downloads/FP_Data/habitEarly/Photo_048_392-200728-121222\n",
+ "Trying to read tsq file.\n",
+ "Data from tsq file fetched.\n",
+ "Reading data for event Dv1A ...\n",
+ "Reading data for event Dv2A ...\n",
+ "Data for event Dv1A fetched and stored.\n",
+ "Data for event Dv2A fetched and stored.\n",
+ "Reading data for event Dv3B ...\n",
+ "Reading data for event Dv4B ...\n",
+ "Data for event Dv3B fetched and stored.\n",
+ "Reading data for event LNRW ...\n",
+ "Data for event Dv4B fetched and stored.\n",
+ "Reading data for event LNnR ...\n",
+ "Data for event LNRW fetched and stored.\n",
+ "Reading data for event PrtN ...\n",
+ "Data for event LNnR fetched and stored.\n",
+ "Reading data for event PrtR ...\n",
+ "Data for event PrtN fetched and stored.\n",
+ "Data for event PrtR fetched and stored.\n",
+ "Time taken = 5.42340\n",
+ "/Users/vns0170/Downloads/FP_Data/habitEarly/Photo_63_207-181030-103332\n",
+ "Trying to read tsq file.\n",
+ "Data from tsq file fetched.\n",
+ "Reading data for event Dv1A ...\n",
+ "Reading data for event Dv2A ...\n",
+ "Data for event Dv1A fetched and stored.\n",
+ "Data for event Dv2A fetched and stored.\n",
+ "Reading data for event Dv3B ...\n",
+ "Reading data for event Dv4B ...\n",
+ "Data for event Dv3B fetched and stored.\n",
+ "Reading data for event LNRW ...\n",
+ "Data for event Dv4B fetched and stored.\n",
+ "Reading data for event LNnR ...\n",
+ "Data for event LNRW fetched and stored.\n",
+ "Reading data for event PrtN ...\n",
+ "Data for event LNnR fetched and stored.\n",
+ "Reading data for event PrtR ...\n",
+ "Data for event PrtN fetched and stored.\n",
+ "Data for event PrtR fetched and stored.\n",
+ "Time taken = 6.39616\n",
+ "### Raw data fetched and saved.\n"
+ ]
+ }
+ ],
"source": [
- "readRawData(inputParameters)"
+ "readRawData(inputParametersPath)"
]
},
{
@@ -91,11 +141,75 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 4,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Extracting signal data and event timestamps...\n",
+ "Remove Artifacts : True\n",
+ "Combine Data : False\n",
+ "Isosbestic Control Channel : True\n",
+ "Correcting timestamps by getting rid of the first 1 seconds and convert timestamps to seconds...\n",
+ "Timestamps corrected and converted to seconds.\n",
+ "Applying correction of timestamps to the data and event timestamps...\n",
+ "Timestamps corrections applied to the data and event timestamps.\n",
+ "Applying correction of timestamps to the data and event timestamps...\n",
+ "Timestamps corrections applied to the data and event timestamps.\n",
+ "Applying correction of timestamps to the data and event timestamps...\n",
+ "Timestamps corrections applied to the data and event timestamps.\n",
+ "Applying correction of timestamps to the data and event timestamps...\n",
+ "Timestamps corrections applied to the data and event timestamps.\n",
+ "Applying correction of timestamps to the data and event timestamps...\n",
+ "Timestamps corrections applied to the data and event timestamps.\n",
+ "Applying correction of timestamps to the data and event timestamps...\n",
+ "Timestamps corrections applied to the data and event timestamps.\n",
+ "Applying correction of timestamps to the data and event timestamps...\n",
+ "Timestamps corrections applied to the data and event timestamps.\n",
+ "Applying correction of timestamps to the data and event timestamps...\n",
+ "Timestamps corrections applied to the data and event timestamps.\n",
+ "Correcting timestamps by getting rid of the first 1 seconds and convert timestamps to seconds...\n",
+ "Timestamps corrected and converted to seconds.\n",
+ "Applying correction of timestamps to the data and event timestamps...\n",
+ "Timestamps corrections applied to the data and event timestamps.\n",
+ "Applying correction of timestamps to the data and event timestamps...\n",
+ "Timestamps corrections applied to the data and event timestamps.\n",
+ "Applying correction of timestamps to the data and event timestamps...\n",
+ "Timestamps corrections applied to the data and event timestamps.\n",
+ "Applying correction of timestamps to the data and event timestamps...\n",
+ "Timestamps corrections applied to the data and event timestamps.\n",
+ "Applying correction of timestamps to the data and event timestamps...\n",
+ "Timestamps corrections applied to the data and event timestamps.\n",
+ "Applying correction of timestamps to the data and event timestamps...\n",
+ "Timestamps corrections applied to the data and event timestamps.\n",
+ "Applying correction of timestamps to the data and event timestamps...\n",
+ "Timestamps corrections applied to the data and event timestamps.\n",
+ "Applying correction of timestamps to the data and event timestamps...\n",
+ "Timestamps corrections applied to the data and event timestamps.\n",
+ "Removing Artifacts from the data and correcting timestamps...\n",
+ "Computing z-score for each of the data...\n",
+ "Remove Artifacts : True\n",
+ "Remove Artifacts : True\n",
+ "z-score for the data computed.\n",
+ "Processing timestamps to get rid of artifacts using concatenate method...\n",
+ "Timestamps processed, artifacts are removed and good chunks are concatenated.\n",
+ "Artifacts from the data are removed and timestamps are corrected.\n",
+ "Removing Artifacts from the data and correcting timestamps...\n",
+ "Computing z-score for each of the data...\n",
+ "Remove Artifacts : True\n",
+ "Remove Artifacts : True\n",
+ "z-score for the data computed.\n",
+ "Processing timestamps to get rid of artifacts using concatenate method...\n",
+ "Timestamps processed, artifacts are removed and good chunks are concatenated.\n",
+ "Artifacts from the data are removed and timestamps are corrected.\n",
+ "Signal data and event timestamps are extracted.\n"
+ ]
+ }
+ ],
"source": [
- "extractTsAndSignal(inputParameters)"
+ "extractTsAndSignal(inputParametersPath)"
]
},
{
@@ -120,12 +234,100 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 5,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Computing PSTH, Peak and Area for each event...\n",
+ "Average for group : False\n",
+ "Computing PSTH for event rwdNP...Computing PSTH for event urwdNP...\n",
+ "\n",
+ "PSTH for event rwdNP computed.\n",
+ "Computing PSTH for event rwdNP...\n",
+ "PSTH for event rwdNP computed.\n",
+ "Computing PSTH for event urwdPE...\n",
+ "PSTH for event urwdNP computed.\n",
+ "Computing PSTH for event urwdNP...\n",
+ "PSTH for event urwdPE computed.\n",
+ "Computing PSTH for event urwdPE...\n",
+ "PSTH for event urwdNP computed.\n",
+ "Computing PSTH for event rwdPE...\n",
+ "PSTH for event urwdPE computed.\n",
+ "PSTH for event rwdPE computed.\n",
+ "Computing PSTH for event rwdPE...\n",
+ "PSTH for event rwdPE computed.\n",
+ "Computing peak and area for PSTH mean signal for event rwdNP...Computing peak and area for PSTH mean signal for event urwdNP...\n",
+ "\n",
+ "Peak and Area for PSTH mean signal for event rwdNP computed.\n",
+ "Computing peak and area for PSTH mean signal for event rwdNP...\n",
+ "Peak and Area for PSTH mean signal for event rwdNP computed.\n",
+ "Computing peak and area for PSTH mean signal for event urwdPE...\n",
+ "Peak and Area for PSTH mean signal for event urwdNP computed.\n",
+ "Computing peak and area for PSTH mean signal for event urwdNP...\n",
+ "Peak and Area for PSTH mean signal for event urwdPE computed.\n",
+ "Computing peak and area for PSTH mean signal for event urwdPE...\n",
+ "Peak and Area for PSTH mean signal for event urwdNP computed.\n",
+ "Computing peak and area for PSTH mean signal for event rwdPE...\n",
+ "Peak and Area for PSTH mean signal for event rwdPE computed.\n",
+ "Computing peak and area for PSTH mean signal for event rwdPE...\n",
+ "Peak and Area for PSTH mean signal for event urwdPE computed.\n",
+ "Peak and Area for PSTH mean signal for event rwdPE computed.\n",
+ "Computing PSTH for event rwdNP...Computing PSTH for event urwdNP...\n",
+ "\n",
+ "PSTH for event rwdNP computed.\n",
+ "Computing PSTH for event rwdNP...\n",
+ "PSTH for event rwdNP computed.\n",
+ "Computing PSTH for event urwdPE...\n",
+ "PSTH for event urwdNP computed.\n",
+ "Computing PSTH for event urwdNP...\n",
+ "PSTH for event urwdPE computed.\n",
+ "Computing PSTH for event urwdPE...\n",
+ "PSTH for event urwdNP computed.\n",
+ "Computing PSTH for event rwdPE...\n",
+ "PSTH for event urwdPE computed.\n",
+ "PSTH for event rwdPE computed.\n",
+ "Computing PSTH for event rwdPE...\n",
+ "PSTH for event rwdPE computed.\n",
+ "Computing peak and area for PSTH mean signal for event urwdNP...Computing peak and area for PSTH mean signal for event rwdNP...\n",
+ "\n",
+ "Peak and Area for PSTH mean signal for event rwdNP computed.\n",
+ "Computing peak and area for PSTH mean signal for event rwdNP...\n",
+ "Peak and Area for PSTH mean signal for event rwdNP computed.\n",
+ "Computing peak and area for PSTH mean signal for event urwdPE...\n",
+ "Peak and Area for PSTH mean signal for event urwdNP computed.\n",
+ "Computing peak and area for PSTH mean signal for event urwdNP...\n",
+ "Peak and Area for PSTH mean signal for event urwdPE computed.\n",
+ "Computing peak and area for PSTH mean signal for event urwdPE...\n",
+ "Peak and Area for PSTH mean signal for event urwdNP computed.\n",
+ "Computing peak and area for PSTH mean signal for event rwdPE...\n",
+ "Peak and Area for PSTH mean signal for event urwdPE computed.\n",
+ "Peak and Area for PSTH mean signal for event rwdPE computed.\n",
+ "Computing peak and area for PSTH mean signal for event rwdPE...\n",
+ "Peak and Area for PSTH mean signal for event rwdPE computed.\n",
+ "PSTH, Area and Peak are computed for all events.\n",
+ "Finding transients in z-score data and calculating frequency and amplitude....\n",
+ "Calculating frequency and amplitude of transients in z-score data....\n",
+ "Creating chunks for multiprocessing...\n",
+ "Chunks are created for multiprocessing.\n",
+ "Creating chunks for multiprocessing...\n",
+ "Chunks are created for multiprocessing.\n",
+ "Frequency and amplitude of transients in z_score data are calculated.\n",
+ "Calculating frequency and amplitude of transients in z-score data....\n",
+ "Creating chunks for multiprocessing...\n",
+ "Chunks are created for multiprocessing.\n",
+ "Creating chunks for multiprocessing...\n",
+ "Chunks are created for multiprocessing.\n",
+ "Frequency and amplitude of transients in z_score data are calculated.\n",
+ "Transients in z-score data found and frequency and amplitude are calculated.\n"
+ ]
+ }
+ ],
"source": [
- "psthForEachStorename(inputParameters)\n",
- "executeFindFreqAndAmp(inputParameters)"
+ "ip = psthForEachStorename(inputParametersPath)\n",
+ "executeFindFreqAndAmp(ip)"
]
},
{
@@ -152,21 +354,43 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
- "inputParameters_group = \"/Users/VENUS/Downloads/FP_Data/T1FAM/inputParameters/inputParameters.json\" "
+ "inputParametersPath_group = \"/Users/vns0170/GuPPyParamtersUsed.json\" "
]
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 7,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Computing PSTH, Peak and Area for each event...\n",
+ "Average for group : True\n",
+ "Averaging group of data...\n",
+ "Group of data averaged.\n",
+ "Averaging group of data...\n",
+ "Group of data averaged.\n",
+ "Averaging group of data...\n",
+ "Group of data averaged.\n",
+ "Averaging group of data...\n",
+ "Group of data averaged.\n",
+ "PSTH, Area and Peak are computed for all events.\n",
+ "Finding transients in z-score data and calculating frequency and amplitude....\n",
+ "Combining results for frequency and amplitude of transients in z-score data...\n",
+ "Results for frequency and amplitude of transients in z-score data are combined.\n",
+ "Transients in z-score data found and frequency and amplitude are calculated.\n"
+ ]
+ }
+ ],
"source": [
- "psthForEachStorename(inputParameters_group)\n",
- "executeFindFreqAndAmp(inputParameters_group)"
+ "ip = psthForEachStorename(inputParametersPath_group)\n",
+ "executeFindFreqAndAmp(ip)"
]
},
{
@@ -191,70 +415,28 @@
"execution_count": null,
"metadata": {},
"outputs": [],
- "source": [
- "conda init zsh"
- ]
+ "source": []
},
{
"cell_type": "code",
- "execution_count": 1,
+ "execution_count": null,
"metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "/Users/VENUS/all_codes/GuPPy\n"
- ]
- }
- ],
- "source": [
- "cd ~/all_codes/GuPPy"
- ]
+ "outputs": [],
+ "source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
- "source": [
- "!sh GuPPy_create_environment.sh"
- ]
+ "source": []
},
{
"cell_type": "code",
- "execution_count": 1,
+ "execution_count": null,
"metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "\r\n",
- "CommandNotFoundError: Your shell has not been properly configured to use 'conda activate'.\r\n",
- "To initialize your shell, run\r\n",
- "\r\n",
- " $ conda init \r\n",
- "\r\n",
- "Currently supported shells are:\r\n",
- " - bash\r\n",
- " - fish\r\n",
- " - tcsh\r\n",
- " - xonsh\r\n",
- " - zsh\r\n",
- " - powershell\r\n",
- "\r\n",
- "See 'conda init --help' for more information and options.\r\n",
- "\r\n",
- "IMPORTANT: You may need to close and restart your shell after running 'conda init'.\r\n",
- "\r\n",
- "\r\n"
- ]
- }
- ],
- "source": [
- "!conda activate guppy_test"
- ]
+ "outputs": [],
+ "source": []
},
{
"cell_type": "code",
@@ -506,9 +688,9 @@
],
"metadata": {
"kernelspec": {
- "display_name": "guppy",
+ "display_name": "guppy-test",
"language": "python",
- "name": "guppy"
+ "name": "guppy-test"
},
"language_info": {
"codemirror_mode": {
@@ -520,7 +702,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.6.10"
}
},
"nbformat": 4,
diff --git a/GuPPy/savingInputParameters.ipynb b/GuPPy/savingInputParameters.ipynb
index 2b7be06..01f1931 100755
--- a/GuPPy/savingInputParameters.ipynb
+++ b/GuPPy/savingInputParameters.ipynb
@@ -427,11 +427,12 @@
" \"highAmpFilt\": highAmpFilt.value,\n",
" \"transientsThresh\": transientsThresh.value \n",
" }\n",
- " for folder in files_1.value:\n",
- " with open(os.path.join(folder, 'GuPPyParamtersUsed.json'), 'w') as f:\n",
- " json.dump(analysisParameters, f, indent=4)\n",
- " insertLog(f\"Input Parameters file saved at {folder}\",\n",
- " logging.INFO)\n",
+ " parameters = getInputParameters()\n",
+ "\n",
+ " with open(os.path.join(os.path.expanduser('~'), 'GuPPyParamtersUsed.json'), 'w') as f:\n",
+ " json.dump(parameters, f, indent=4)\n",
+ " insertLog(f\"Input Parameters file saved at {os.path.join(os.path.expanduser('~'), 'GuPPyParamtersUsed.json')}\",\n",
+ " logging.INFO)\n",
" \n",
" insertLog('#'*400, logging.INFO)\n",
" \n",
@@ -562,7 +563,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.6 (default, Aug 11 2023, 19:44:49) \n[Clang 15.0.0 (clang-1500.0.40.1)]"
+ "version": "3.6.10"
},
"vscode": {
"interpreter": {
From 8c98c0fad6e06d8be41715ac0ab2b661b467eb0c Mon Sep 17 00:00:00 2001
From: Venus Sherathiya <32345130+venus-sherathiya@users.noreply.github.com>
Date: Mon, 1 Jul 2024 13:20:10 -0500
Subject: [PATCH 02/10] changes in the save input parameters NOTE
---
GuPPy/savingInputParameters.ipynb | 431 +++++++++++++++++++++++++++++-
1 file changed, 426 insertions(+), 5 deletions(-)
diff --git a/GuPPy/savingInputParameters.ipynb b/GuPPy/savingInputParameters.ipynb
index 01f1931..0f65360 100755
--- a/GuPPy/savingInputParameters.ipynb
+++ b/GuPPy/savingInputParameters.ipynb
@@ -2,9 +2,430 @@
"cells": [
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 2,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "application/javascript": [
+ "\n",
+ "(function(root) {\n",
+ " function now() {\n",
+ " return new Date();\n",
+ " }\n",
+ "\n",
+ " var force = true;\n",
+ "\n",
+ " if (typeof root._bokeh_onload_callbacks === \"undefined\" || force === true) {\n",
+ " root._bokeh_onload_callbacks = [];\n",
+ " root._bokeh_is_loading = undefined;\n",
+ " }\n",
+ "\n",
+ " if (typeof (root._bokeh_timeout) === \"undefined\" || force === true) {\n",
+ " root._bokeh_timeout = Date.now() + 5000;\n",
+ " root._bokeh_failed_load = false;\n",
+ " }\n",
+ "\n",
+ " function run_callbacks() {\n",
+ " try {\n",
+ " root._bokeh_onload_callbacks.forEach(function(callback) {\n",
+ " if (callback != null)\n",
+ " callback();\n",
+ " });\n",
+ " } finally {\n",
+ " delete root._bokeh_onload_callbacks\n",
+ " }\n",
+ " console.debug(\"Bokeh: all callbacks have finished\");\n",
+ " }\n",
+ "\n",
+ " function load_libs(css_urls, js_urls, js_modules, callback) {\n",
+ " if (css_urls == null) css_urls = [];\n",
+ " if (js_urls == null) js_urls = [];\n",
+ " if (js_modules == null) js_modules = [];\n",
+ "\n",
+ " root._bokeh_onload_callbacks.push(callback);\n",
+ " if (root._bokeh_is_loading > 0) {\n",
+ " console.debug(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n",
+ " return null;\n",
+ " }\n",
+ " if (js_urls.length === 0 && js_modules.length === 0) {\n",
+ " run_callbacks();\n",
+ " return null;\n",
+ " }\n",
+ " console.debug(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n",
+ " root._bokeh_is_loading = css_urls.length + js_urls.length + js_modules.length;\n",
+ "\n",
+ " function on_load() {\n",
+ " root._bokeh_is_loading--;\n",
+ " if (root._bokeh_is_loading === 0) {\n",
+ " console.debug(\"Bokeh: all BokehJS libraries/stylesheets loaded\");\n",
+ " run_callbacks()\n",
+ " }\n",
+ " }\n",
+ "\n",
+ " function on_error() {\n",
+ " console.error(\"failed to load \" + url);\n",
+ " }\n",
+ "\n",
+ " for (var i = 0; i < css_urls.length; i++) {\n",
+ " var url = css_urls[i];\n",
+ " const element = document.createElement(\"link\");\n",
+ " element.onload = on_load;\n",
+ " element.onerror = on_error;\n",
+ " element.rel = \"stylesheet\";\n",
+ " element.type = \"text/css\";\n",
+ " element.href = url;\n",
+ " console.debug(\"Bokeh: injecting link tag for BokehJS stylesheet: \", url);\n",
+ " document.body.appendChild(element);\n",
+ " }\n",
+ "\n",
+ " var skip = [];\n",
+ " if (window.requirejs) {\n",
+ " window.requirejs.config({'paths': {'tabulator': 'https://unpkg.com/tabulator-tables@4.9.3/dist/js/tabulator'}});\n",
+ " require([], function() {\n",
+ " })\n",
+ " }\n",
+ " if (((window['tabulator'] !== undefined) && (!(window['tabulator'] instanceof HTMLElement))) || window.requirejs) {\n",
+ " var urls = ['https://unpkg.com/tabulator-tables@4.9.3/dist/js/tabulator.js', 'https://unpkg.com/moment@2.27.0/moment.js'];\n",
+ " for (var i = 0; i < urls.length; i++) {\n",
+ " skip.push(urls[i])\n",
+ " }\n",
+ " }\n",
+ " for (var i = 0; i < js_urls.length; i++) {\n",
+ " var url = js_urls[i];\n",
+ " if (skip.indexOf(url) >= 0) { on_load(); continue; }\n",
+ " var element = document.createElement('script');\n",
+ " element.onload = on_load;\n",
+ " element.onerror = on_error;\n",
+ " element.async = false;\n",
+ " element.src = url;\n",
+ " console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n",
+ " document.head.appendChild(element);\n",
+ " }\n",
+ " for (var i = 0; i < js_modules.length; i++) {\n",
+ " var url = js_modules[i];\n",
+ " if (skip.indexOf(url) >= 0) { on_load(); continue; }\n",
+ " var element = document.createElement('script');\n",
+ " element.onload = on_load;\n",
+ " element.onerror = on_error;\n",
+ " element.async = false;\n",
+ " element.src = url;\n",
+ " element.type = \"module\";\n",
+ " console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n",
+ " document.head.appendChild(element);\n",
+ " }\n",
+ " if (!js_urls.length && !js_modules.length) {\n",
+ " on_load()\n",
+ " }\n",
+ " };\n",
+ "\n",
+ " function inject_raw_css(css) {\n",
+ " const element = document.createElement(\"style\");\n",
+ " element.appendChild(document.createTextNode(css));\n",
+ " document.body.appendChild(element);\n",
+ " }\n",
+ "\n",
+ " var js_urls = [\"https://unpkg.com/tabulator-tables@4.9.3/dist/js/tabulator.js\", \"https://unpkg.com/moment@2.27.0/moment.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-2.3.1.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-widgets-2.3.1.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-tables-2.3.1.min.js\", \"https://unpkg.com/@holoviz/panel@^0.11.0/dist/panel.js\"];\n",
+ " var js_modules = [];\n",
+ " var css_urls = [\"https://unpkg.com/tabulator-tables@4.9.3/dist/css/tabulator_simple.min.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/alerts.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/card.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/widgets.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/markdown.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/json.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/loading.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/dataframe.css\"];\n",
+ " var inline_js = [\n",
+ " function(Bokeh) {\n",
+ " inject_raw_css(\"\\n .bk.pn-loading.arcs:before {\\n background-image: url(\\\"data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHhtbG5zOnhsaW5rPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5L3hsaW5rIiBzdHlsZT0ibWFyZ2luOiBhdXRvOyBiYWNrZ3JvdW5kOiBub25lOyBkaXNwbGF5OiBibG9jazsgc2hhcGUtcmVuZGVyaW5nOiBhdXRvOyIgdmlld0JveD0iMCAwIDEwMCAxMDAiIHByZXNlcnZlQXNwZWN0UmF0aW89InhNaWRZTWlkIj4gIDxjaXJjbGUgY3g9IjUwIiBjeT0iNTAiIHI9IjMyIiBzdHJva2Utd2lkdGg9IjgiIHN0cm9rZT0iI2MzYzNjMyIgc3Ryb2tlLWRhc2hhcnJheT0iNTAuMjY1NDgyNDU3NDM2NjkgNTAuMjY1NDgyNDU3NDM2NjkiIGZpbGw9Im5vbmUiIHN0cm9rZS1saW5lY2FwPSJyb3VuZCI+ICAgIDxhbmltYXRlVHJhbnNmb3JtIGF0dHJpYnV0ZU5hbWU9InRyYW5zZm9ybSIgdHlwZT0icm90YXRlIiByZXBlYXRDb3VudD0iaW5kZWZpbml0ZSIgZHVyPSIxcyIga2V5VGltZXM9IjA7MSIgdmFsdWVzPSIwIDUwIDUwOzM2MCA1MCA1MCI+PC9hbmltYXRlVHJhbnNmb3JtPiAgPC9jaXJjbGU+PC9zdmc+\\\")\\n }\\n \");\n",
+ " },\n",
+ " function(Bokeh) {\n",
+ " Bokeh.set_log_level(\"info\");\n",
+ " },\n",
+ " function(Bokeh) {} // ensure no trailing comma for IE\n",
+ " ];\n",
+ "\n",
+ " function run_inline_js() {\n",
+ " if ((root.Bokeh !== undefined) || (force === true)) {\n",
+ " for (var i = 0; i < inline_js.length; i++) {\n",
+ " inline_js[i].call(root, root.Bokeh);\n",
+ " }} else if (Date.now() < root._bokeh_timeout) {\n",
+ " setTimeout(run_inline_js, 100);\n",
+ " } else if (!root._bokeh_failed_load) {\n",
+ " console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n",
+ " root._bokeh_failed_load = true;\n",
+ " }\n",
+ " }\n",
+ "\n",
+ " if (root._bokeh_is_loading === 0) {\n",
+ " console.debug(\"Bokeh: BokehJS loaded, going straight to plotting\");\n",
+ " run_inline_js();\n",
+ " } else {\n",
+ " load_libs(css_urls, js_urls, js_modules, function() {\n",
+ " console.debug(\"Bokeh: BokehJS plotting callback run at\", now());\n",
+ " run_inline_js();\n",
+ " });\n",
+ " }\n",
+ "}(window));"
+ ],
+ "application/vnd.holoviews_load.v0+json": "\n(function(root) {\n function now() {\n return new Date();\n }\n\n var force = true;\n\n if (typeof root._bokeh_onload_callbacks === \"undefined\" || force === true) {\n root._bokeh_onload_callbacks = [];\n root._bokeh_is_loading = undefined;\n }\n\n if (typeof (root._bokeh_timeout) === \"undefined\" || force === true) {\n root._bokeh_timeout = Date.now() + 5000;\n root._bokeh_failed_load = false;\n }\n\n function run_callbacks() {\n try {\n root._bokeh_onload_callbacks.forEach(function(callback) {\n if (callback != null)\n callback();\n });\n } finally {\n delete root._bokeh_onload_callbacks\n }\n console.debug(\"Bokeh: all callbacks have finished\");\n }\n\n function load_libs(css_urls, js_urls, js_modules, callback) {\n if (css_urls == null) css_urls = [];\n if (js_urls == null) js_urls = [];\n if (js_modules == null) js_modules = [];\n\n root._bokeh_onload_callbacks.push(callback);\n if (root._bokeh_is_loading > 0) {\n console.debug(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n return null;\n }\n if (js_urls.length === 0 && js_modules.length === 0) {\n run_callbacks();\n return null;\n }\n console.debug(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n root._bokeh_is_loading = css_urls.length + js_urls.length + js_modules.length;\n\n function on_load() {\n root._bokeh_is_loading--;\n if (root._bokeh_is_loading === 0) {\n console.debug(\"Bokeh: all BokehJS libraries/stylesheets loaded\");\n run_callbacks()\n }\n }\n\n function on_error() {\n console.error(\"failed to load \" + url);\n }\n\n for (var i = 0; i < css_urls.length; i++) {\n var url = css_urls[i];\n const element = document.createElement(\"link\");\n element.onload = on_load;\n element.onerror = on_error;\n element.rel = \"stylesheet\";\n element.type = \"text/css\";\n element.href = url;\n console.debug(\"Bokeh: injecting link tag for BokehJS stylesheet: \", url);\n document.body.appendChild(element);\n }\n\n var skip = [];\n if (window.requirejs) {\n window.requirejs.config({'paths': {'tabulator': 'https://unpkg.com/tabulator-tables@4.9.3/dist/js/tabulator'}});\n require([], function() {\n })\n }\n if (((window['tabulator'] !== undefined) && (!(window['tabulator'] instanceof HTMLElement))) || window.requirejs) {\n var urls = ['https://unpkg.com/tabulator-tables@4.9.3/dist/js/tabulator.js', 'https://unpkg.com/moment@2.27.0/moment.js'];\n for (var i = 0; i < urls.length; i++) {\n skip.push(urls[i])\n }\n }\n for (var i = 0; i < js_urls.length; i++) {\n var url = js_urls[i];\n if (skip.indexOf(url) >= 0) { on_load(); continue; }\n var element = document.createElement('script');\n element.onload = on_load;\n element.onerror = on_error;\n element.async = false;\n element.src = url;\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n document.head.appendChild(element);\n }\n for (var i = 0; i < js_modules.length; i++) {\n var url = js_modules[i];\n if (skip.indexOf(url) >= 0) { on_load(); continue; }\n var element = document.createElement('script');\n element.onload = on_load;\n element.onerror = on_error;\n element.async = false;\n element.src = url;\n element.type = \"module\";\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n document.head.appendChild(element);\n }\n if (!js_urls.length && !js_modules.length) {\n on_load()\n }\n };\n\n function inject_raw_css(css) {\n const element = document.createElement(\"style\");\n element.appendChild(document.createTextNode(css));\n document.body.appendChild(element);\n }\n\n var js_urls = [\"https://unpkg.com/tabulator-tables@4.9.3/dist/js/tabulator.js\", \"https://unpkg.com/moment@2.27.0/moment.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-2.3.1.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-widgets-2.3.1.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-tables-2.3.1.min.js\", \"https://unpkg.com/@holoviz/panel@^0.11.0/dist/panel.js\"];\n var js_modules = [];\n var css_urls = [\"https://unpkg.com/tabulator-tables@4.9.3/dist/css/tabulator_simple.min.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/alerts.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/card.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/widgets.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/markdown.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/json.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/loading.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/dataframe.css\"];\n var inline_js = [\n function(Bokeh) {\n inject_raw_css(\"\\n .bk.pn-loading.arcs:before {\\n background-image: url(\\\"data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHhtbG5zOnhsaW5rPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5L3hsaW5rIiBzdHlsZT0ibWFyZ2luOiBhdXRvOyBiYWNrZ3JvdW5kOiBub25lOyBkaXNwbGF5OiBibG9jazsgc2hhcGUtcmVuZGVyaW5nOiBhdXRvOyIgdmlld0JveD0iMCAwIDEwMCAxMDAiIHByZXNlcnZlQXNwZWN0UmF0aW89InhNaWRZTWlkIj4gIDxjaXJjbGUgY3g9IjUwIiBjeT0iNTAiIHI9IjMyIiBzdHJva2Utd2lkdGg9IjgiIHN0cm9rZT0iI2MzYzNjMyIgc3Ryb2tlLWRhc2hhcnJheT0iNTAuMjY1NDgyNDU3NDM2NjkgNTAuMjY1NDgyNDU3NDM2NjkiIGZpbGw9Im5vbmUiIHN0cm9rZS1saW5lY2FwPSJyb3VuZCI+ICAgIDxhbmltYXRlVHJhbnNmb3JtIGF0dHJpYnV0ZU5hbWU9InRyYW5zZm9ybSIgdHlwZT0icm90YXRlIiByZXBlYXRDb3VudD0iaW5kZWZpbml0ZSIgZHVyPSIxcyIga2V5VGltZXM9IjA7MSIgdmFsdWVzPSIwIDUwIDUwOzM2MCA1MCA1MCI+PC9hbmltYXRlVHJhbnNmb3JtPiAgPC9jaXJjbGU+PC9zdmc+\\\")\\n }\\n \");\n },\n function(Bokeh) {\n Bokeh.set_log_level(\"info\");\n },\n function(Bokeh) {} // ensure no trailing comma for IE\n ];\n\n function run_inline_js() {\n if ((root.Bokeh !== undefined) || (force === true)) {\n for (var i = 0; i < inline_js.length; i++) {\n inline_js[i].call(root, root.Bokeh);\n }} else if (Date.now() < root._bokeh_timeout) {\n setTimeout(run_inline_js, 100);\n } else if (!root._bokeh_failed_load) {\n console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n root._bokeh_failed_load = true;\n }\n }\n\n if (root._bokeh_is_loading === 0) {\n console.debug(\"Bokeh: BokehJS loaded, going straight to plotting\");\n run_inline_js();\n } else {\n load_libs(css_urls, js_urls, js_modules, function() {\n console.debug(\"Bokeh: BokehJS plotting callback run at\", now());\n run_inline_js();\n });\n }\n}(window));"
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "application/javascript": [
+ "\n",
+ "if ((window.PyViz === undefined) || (window.PyViz instanceof HTMLElement)) {\n",
+ " window.PyViz = {comms: {}, comm_status:{}, kernels:{}, receivers: {}, plot_index: []}\n",
+ "}\n",
+ "\n",
+ "\n",
+ " function JupyterCommManager() {\n",
+ " }\n",
+ "\n",
+ " JupyterCommManager.prototype.register_target = function(plot_id, comm_id, msg_handler) {\n",
+ " if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n",
+ " var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n",
+ " comm_manager.register_target(comm_id, function(comm) {\n",
+ " comm.on_msg(msg_handler);\n",
+ " });\n",
+ " } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n",
+ " window.PyViz.kernels[plot_id].registerCommTarget(comm_id, function(comm) {\n",
+ " comm.onMsg = msg_handler;\n",
+ " });\n",
+ " } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n",
+ " google.colab.kernel.comms.registerTarget(comm_id, (comm) => {\n",
+ " var messages = comm.messages[Symbol.asyncIterator]();\n",
+ " function processIteratorResult(result) {\n",
+ " var message = result.value;\n",
+ " console.log(message)\n",
+ " var content = {data: message.data, comm_id};\n",
+ " var buffers = []\n",
+ " for (var buffer of message.buffers || []) {\n",
+ " buffers.push(new DataView(buffer))\n",
+ " }\n",
+ " var metadata = message.metadata || {};\n",
+ " var msg = {content, buffers, metadata}\n",
+ " msg_handler(msg);\n",
+ " return messages.next().then(processIteratorResult);\n",
+ " }\n",
+ " return messages.next().then(processIteratorResult);\n",
+ " })\n",
+ " }\n",
+ " }\n",
+ "\n",
+ " JupyterCommManager.prototype.get_client_comm = function(plot_id, comm_id, msg_handler) {\n",
+ " if (comm_id in window.PyViz.comms) {\n",
+ " return window.PyViz.comms[comm_id];\n",
+ " } else if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n",
+ " var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n",
+ " var comm = comm_manager.new_comm(comm_id, {}, {}, {}, comm_id);\n",
+ " if (msg_handler) {\n",
+ " comm.on_msg(msg_handler);\n",
+ " }\n",
+ " } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n",
+ " var comm = window.PyViz.kernels[plot_id].connectToComm(comm_id);\n",
+ " comm.open();\n",
+ " if (msg_handler) {\n",
+ " comm.onMsg = msg_handler;\n",
+ " }\n",
+ " } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n",
+ " var comm_promise = google.colab.kernel.comms.open(comm_id)\n",
+ " comm_promise.then((comm) => {\n",
+ " window.PyViz.comms[comm_id] = comm;\n",
+ " if (msg_handler) {\n",
+ " var messages = comm.messages[Symbol.asyncIterator]();\n",
+ " function processIteratorResult(result) {\n",
+ " var message = result.value;\n",
+ " var content = {data: message.data};\n",
+ " var metadata = message.metadata || {comm_id};\n",
+ " var msg = {content, metadata}\n",
+ " msg_handler(msg);\n",
+ " return messages.next().then(processIteratorResult);\n",
+ " }\n",
+ " return messages.next().then(processIteratorResult);\n",
+ " }\n",
+ " }) \n",
+ " var sendClosure = (data, metadata, buffers, disposeOnDone) => {\n",
+ " return comm_promise.then((comm) => {\n",
+ " comm.send(data, metadata, buffers, disposeOnDone);\n",
+ " });\n",
+ " };\n",
+ " var comm = {\n",
+ " send: sendClosure\n",
+ " };\n",
+ " }\n",
+ " window.PyViz.comms[comm_id] = comm;\n",
+ " return comm;\n",
+ " }\n",
+ " window.PyViz.comm_manager = new JupyterCommManager();\n",
+ " \n",
+ "\n",
+ "\n",
+ "var JS_MIME_TYPE = 'application/javascript';\n",
+ "var HTML_MIME_TYPE = 'text/html';\n",
+ "var EXEC_MIME_TYPE = 'application/vnd.holoviews_exec.v0+json';\n",
+ "var CLASS_NAME = 'output';\n",
+ "\n",
+ "/**\n",
+ " * Render data to the DOM node\n",
+ " */\n",
+ "function render(props, node) {\n",
+ " var div = document.createElement(\"div\");\n",
+ " var script = document.createElement(\"script\");\n",
+ " node.appendChild(div);\n",
+ " node.appendChild(script);\n",
+ "}\n",
+ "\n",
+ "/**\n",
+ " * Handle when a new output is added\n",
+ " */\n",
+ "function handle_add_output(event, handle) {\n",
+ " var output_area = handle.output_area;\n",
+ " var output = handle.output;\n",
+ " if ((output.data == undefined) || (!output.data.hasOwnProperty(EXEC_MIME_TYPE))) {\n",
+ " return\n",
+ " }\n",
+ " var id = output.metadata[EXEC_MIME_TYPE][\"id\"];\n",
+ " var toinsert = output_area.element.find(\".\" + CLASS_NAME.split(' ')[0]);\n",
+ " if (id !== undefined) {\n",
+ " var nchildren = toinsert.length;\n",
+ " var html_node = toinsert[nchildren-1].children[0];\n",
+ " html_node.innerHTML = output.data[HTML_MIME_TYPE];\n",
+ " var scripts = [];\n",
+ " var nodelist = html_node.querySelectorAll(\"script\");\n",
+ " for (var i in nodelist) {\n",
+ " if (nodelist.hasOwnProperty(i)) {\n",
+ " scripts.push(nodelist[i])\n",
+ " }\n",
+ " }\n",
+ "\n",
+ " scripts.forEach( function (oldScript) {\n",
+ " var newScript = document.createElement(\"script\");\n",
+ " var attrs = [];\n",
+ " var nodemap = oldScript.attributes;\n",
+ " for (var j in nodemap) {\n",
+ " if (nodemap.hasOwnProperty(j)) {\n",
+ " attrs.push(nodemap[j])\n",
+ " }\n",
+ " }\n",
+ " attrs.forEach(function(attr) { newScript.setAttribute(attr.name, attr.value) });\n",
+ " newScript.appendChild(document.createTextNode(oldScript.innerHTML));\n",
+ " oldScript.parentNode.replaceChild(newScript, oldScript);\n",
+ " });\n",
+ " if (JS_MIME_TYPE in output.data) {\n",
+ " toinsert[nchildren-1].children[1].textContent = output.data[JS_MIME_TYPE];\n",
+ " }\n",
+ " output_area._hv_plot_id = id;\n",
+ " if ((window.Bokeh !== undefined) && (id in Bokeh.index)) {\n",
+ " window.PyViz.plot_index[id] = Bokeh.index[id];\n",
+ " } else {\n",
+ " window.PyViz.plot_index[id] = null;\n",
+ " }\n",
+ " } else if (output.metadata[EXEC_MIME_TYPE][\"server_id\"] !== undefined) {\n",
+ " var bk_div = document.createElement(\"div\");\n",
+ " bk_div.innerHTML = output.data[HTML_MIME_TYPE];\n",
+ " var script_attrs = bk_div.children[0].attributes;\n",
+ " for (var i = 0; i < script_attrs.length; i++) {\n",
+ " toinsert[toinsert.length - 1].childNodes[1].setAttribute(script_attrs[i].name, script_attrs[i].value);\n",
+ " }\n",
+ " // store reference to server id on output_area\n",
+ " output_area._bokeh_server_id = output.metadata[EXEC_MIME_TYPE][\"server_id\"];\n",
+ " }\n",
+ "}\n",
+ "\n",
+ "/**\n",
+ " * Handle when an output is cleared or removed\n",
+ " */\n",
+ "function handle_clear_output(event, handle) {\n",
+ " var id = handle.cell.output_area._hv_plot_id;\n",
+ " var server_id = handle.cell.output_area._bokeh_server_id;\n",
+ " if (((id === undefined) || !(id in PyViz.plot_index)) && (server_id !== undefined)) { return; }\n",
+ " var comm = window.PyViz.comm_manager.get_client_comm(\"hv-extension-comm\", \"hv-extension-comm\", function () {});\n",
+ " if (server_id !== null) {\n",
+ " comm.send({event_type: 'server_delete', 'id': server_id});\n",
+ " return;\n",
+ " } else if (comm !== null) {\n",
+ " comm.send({event_type: 'delete', 'id': id});\n",
+ " }\n",
+ " delete PyViz.plot_index[id];\n",
+ " if ((window.Bokeh !== undefined) & (id in window.Bokeh.index)) {\n",
+ " var doc = window.Bokeh.index[id].model.document\n",
+ " doc.clear();\n",
+ " const i = window.Bokeh.documents.indexOf(doc);\n",
+ " if (i > -1) {\n",
+ " window.Bokeh.documents.splice(i, 1);\n",
+ " }\n",
+ " }\n",
+ "}\n",
+ "\n",
+ "/**\n",
+ " * Handle kernel restart event\n",
+ " */\n",
+ "function handle_kernel_cleanup(event, handle) {\n",
+ " delete PyViz.comms[\"hv-extension-comm\"];\n",
+ " window.PyViz.plot_index = {}\n",
+ "}\n",
+ "\n",
+ "/**\n",
+ " * Handle update_display_data messages\n",
+ " */\n",
+ "function handle_update_output(event, handle) {\n",
+ " handle_clear_output(event, {cell: {output_area: handle.output_area}})\n",
+ " handle_add_output(event, handle)\n",
+ "}\n",
+ "\n",
+ "function register_renderer(events, OutputArea) {\n",
+ " function append_mime(data, metadata, element) {\n",
+ " // create a DOM node to render to\n",
+ " var toinsert = this.create_output_subarea(\n",
+ " metadata,\n",
+ " CLASS_NAME,\n",
+ " EXEC_MIME_TYPE\n",
+ " );\n",
+ " this.keyboard_manager.register_events(toinsert);\n",
+ " // Render to node\n",
+ " var props = {data: data, metadata: metadata[EXEC_MIME_TYPE]};\n",
+ " render(props, toinsert[0]);\n",
+ " element.append(toinsert);\n",
+ " return toinsert\n",
+ " }\n",
+ "\n",
+ " events.on('output_added.OutputArea', handle_add_output);\n",
+ " events.on('output_updated.OutputArea', handle_update_output);\n",
+ " events.on('clear_output.CodeCell', handle_clear_output);\n",
+ " events.on('delete.Cell', handle_clear_output);\n",
+ " events.on('kernel_ready.Kernel', handle_kernel_cleanup);\n",
+ "\n",
+ " OutputArea.prototype.register_mime_type(EXEC_MIME_TYPE, append_mime, {\n",
+ " safe: true,\n",
+ " index: 0\n",
+ " });\n",
+ "}\n",
+ "\n",
+ "if (window.Jupyter !== undefined) {\n",
+ " try {\n",
+ " var events = require('base/js/events');\n",
+ " var OutputArea = require('notebook/js/outputarea').OutputArea;\n",
+ " if (OutputArea.prototype.mime_types().indexOf(EXEC_MIME_TYPE) == -1) {\n",
+ " register_renderer(events, OutputArea);\n",
+ " }\n",
+ " } catch(err) {\n",
+ " }\n",
+ "}\n"
+ ],
+ "application/vnd.holoviews_load.v0+json": "\nif ((window.PyViz === undefined) || (window.PyViz instanceof HTMLElement)) {\n window.PyViz = {comms: {}, comm_status:{}, kernels:{}, receivers: {}, plot_index: []}\n}\n\n\n function JupyterCommManager() {\n }\n\n JupyterCommManager.prototype.register_target = function(plot_id, comm_id, msg_handler) {\n if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n comm_manager.register_target(comm_id, function(comm) {\n comm.on_msg(msg_handler);\n });\n } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n window.PyViz.kernels[plot_id].registerCommTarget(comm_id, function(comm) {\n comm.onMsg = msg_handler;\n });\n } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n google.colab.kernel.comms.registerTarget(comm_id, (comm) => {\n var messages = comm.messages[Symbol.asyncIterator]();\n function processIteratorResult(result) {\n var message = result.value;\n console.log(message)\n var content = {data: message.data, comm_id};\n var buffers = []\n for (var buffer of message.buffers || []) {\n buffers.push(new DataView(buffer))\n }\n var metadata = message.metadata || {};\n var msg = {content, buffers, metadata}\n msg_handler(msg);\n return messages.next().then(processIteratorResult);\n }\n return messages.next().then(processIteratorResult);\n })\n }\n }\n\n JupyterCommManager.prototype.get_client_comm = function(plot_id, comm_id, msg_handler) {\n if (comm_id in window.PyViz.comms) {\n return window.PyViz.comms[comm_id];\n } else if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n var comm = comm_manager.new_comm(comm_id, {}, {}, {}, comm_id);\n if (msg_handler) {\n comm.on_msg(msg_handler);\n }\n } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n var comm = window.PyViz.kernels[plot_id].connectToComm(comm_id);\n comm.open();\n if (msg_handler) {\n comm.onMsg = msg_handler;\n }\n } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n var comm_promise = google.colab.kernel.comms.open(comm_id)\n comm_promise.then((comm) => {\n window.PyViz.comms[comm_id] = comm;\n if (msg_handler) {\n var messages = comm.messages[Symbol.asyncIterator]();\n function processIteratorResult(result) {\n var message = result.value;\n var content = {data: message.data};\n var metadata = message.metadata || {comm_id};\n var msg = {content, metadata}\n msg_handler(msg);\n return messages.next().then(processIteratorResult);\n }\n return messages.next().then(processIteratorResult);\n }\n }) \n var sendClosure = (data, metadata, buffers, disposeOnDone) => {\n return comm_promise.then((comm) => {\n comm.send(data, metadata, buffers, disposeOnDone);\n });\n };\n var comm = {\n send: sendClosure\n };\n }\n window.PyViz.comms[comm_id] = comm;\n return comm;\n }\n window.PyViz.comm_manager = new JupyterCommManager();\n \n\n\nvar JS_MIME_TYPE = 'application/javascript';\nvar HTML_MIME_TYPE = 'text/html';\nvar EXEC_MIME_TYPE = 'application/vnd.holoviews_exec.v0+json';\nvar CLASS_NAME = 'output';\n\n/**\n * Render data to the DOM node\n */\nfunction render(props, node) {\n var div = document.createElement(\"div\");\n var script = document.createElement(\"script\");\n node.appendChild(div);\n node.appendChild(script);\n}\n\n/**\n * Handle when a new output is added\n */\nfunction handle_add_output(event, handle) {\n var output_area = handle.output_area;\n var output = handle.output;\n if ((output.data == undefined) || (!output.data.hasOwnProperty(EXEC_MIME_TYPE))) {\n return\n }\n var id = output.metadata[EXEC_MIME_TYPE][\"id\"];\n var toinsert = output_area.element.find(\".\" + CLASS_NAME.split(' ')[0]);\n if (id !== undefined) {\n var nchildren = toinsert.length;\n var html_node = toinsert[nchildren-1].children[0];\n html_node.innerHTML = output.data[HTML_MIME_TYPE];\n var scripts = [];\n var nodelist = html_node.querySelectorAll(\"script\");\n for (var i in nodelist) {\n if (nodelist.hasOwnProperty(i)) {\n scripts.push(nodelist[i])\n }\n }\n\n scripts.forEach( function (oldScript) {\n var newScript = document.createElement(\"script\");\n var attrs = [];\n var nodemap = oldScript.attributes;\n for (var j in nodemap) {\n if (nodemap.hasOwnProperty(j)) {\n attrs.push(nodemap[j])\n }\n }\n attrs.forEach(function(attr) { newScript.setAttribute(attr.name, attr.value) });\n newScript.appendChild(document.createTextNode(oldScript.innerHTML));\n oldScript.parentNode.replaceChild(newScript, oldScript);\n });\n if (JS_MIME_TYPE in output.data) {\n toinsert[nchildren-1].children[1].textContent = output.data[JS_MIME_TYPE];\n }\n output_area._hv_plot_id = id;\n if ((window.Bokeh !== undefined) && (id in Bokeh.index)) {\n window.PyViz.plot_index[id] = Bokeh.index[id];\n } else {\n window.PyViz.plot_index[id] = null;\n }\n } else if (output.metadata[EXEC_MIME_TYPE][\"server_id\"] !== undefined) {\n var bk_div = document.createElement(\"div\");\n bk_div.innerHTML = output.data[HTML_MIME_TYPE];\n var script_attrs = bk_div.children[0].attributes;\n for (var i = 0; i < script_attrs.length; i++) {\n toinsert[toinsert.length - 1].childNodes[1].setAttribute(script_attrs[i].name, script_attrs[i].value);\n }\n // store reference to server id on output_area\n output_area._bokeh_server_id = output.metadata[EXEC_MIME_TYPE][\"server_id\"];\n }\n}\n\n/**\n * Handle when an output is cleared or removed\n */\nfunction handle_clear_output(event, handle) {\n var id = handle.cell.output_area._hv_plot_id;\n var server_id = handle.cell.output_area._bokeh_server_id;\n if (((id === undefined) || !(id in PyViz.plot_index)) && (server_id !== undefined)) { return; }\n var comm = window.PyViz.comm_manager.get_client_comm(\"hv-extension-comm\", \"hv-extension-comm\", function () {});\n if (server_id !== null) {\n comm.send({event_type: 'server_delete', 'id': server_id});\n return;\n } else if (comm !== null) {\n comm.send({event_type: 'delete', 'id': id});\n }\n delete PyViz.plot_index[id];\n if ((window.Bokeh !== undefined) & (id in window.Bokeh.index)) {\n var doc = window.Bokeh.index[id].model.document\n doc.clear();\n const i = window.Bokeh.documents.indexOf(doc);\n if (i > -1) {\n window.Bokeh.documents.splice(i, 1);\n }\n }\n}\n\n/**\n * Handle kernel restart event\n */\nfunction handle_kernel_cleanup(event, handle) {\n delete PyViz.comms[\"hv-extension-comm\"];\n window.PyViz.plot_index = {}\n}\n\n/**\n * Handle update_display_data messages\n */\nfunction handle_update_output(event, handle) {\n handle_clear_output(event, {cell: {output_area: handle.output_area}})\n handle_add_output(event, handle)\n}\n\nfunction register_renderer(events, OutputArea) {\n function append_mime(data, metadata, element) {\n // create a DOM node to render to\n var toinsert = this.create_output_subarea(\n metadata,\n CLASS_NAME,\n EXEC_MIME_TYPE\n );\n this.keyboard_manager.register_events(toinsert);\n // Render to node\n var props = {data: data, metadata: metadata[EXEC_MIME_TYPE]};\n render(props, toinsert[0]);\n element.append(toinsert);\n return toinsert\n }\n\n events.on('output_added.OutputArea', handle_add_output);\n events.on('output_updated.OutputArea', handle_update_output);\n events.on('clear_output.CodeCell', handle_clear_output);\n events.on('delete.Cell', handle_clear_output);\n events.on('kernel_ready.Kernel', handle_kernel_cleanup);\n\n OutputArea.prototype.register_mime_type(EXEC_MIME_TYPE, append_mime, {\n safe: true,\n index: 0\n });\n}\n\nif (window.Jupyter !== undefined) {\n try {\n var events = require('base/js/events');\n var OutputArea = require('notebook/js/outputarea').OutputArea;\n if (OutputArea.prototype.mime_types().indexOf(EXEC_MIME_TYPE) == -1) {\n register_renderer(events, OutputArea);\n }\n } catch(err) {\n }\n}\n"
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "ename": "NameError",
+ "evalue": "name 'folder_path' is not defined",
+ "output_type": "error",
+ "traceback": [
+ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
+ "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
+ "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 134\u001b[0m \u001b[0mmark_down_1\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpn\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpane\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mMarkdown\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"\"\"**Select folders for the analysis from the file selector below**\"\"\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mwidth\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m600\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 135\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 136\u001b[0;31m \u001b[0mfiles_1\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpn\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mwidgets\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mFileSelector\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfolder_path\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'folderNames'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mheight\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m300\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mwidth\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m800\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 137\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 138\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
+ "\u001b[0;31mNameError\u001b[0m: name 'folder_path' is not defined"
+ ]
+ }
+ ],
"source": [
"import os\n",
"import sys\n",
@@ -469,9 +890,9 @@
" \n",
"mark_down_ip = pn.pane.Markdown(\"\"\"**Step 1 : Save Input Parameters**\"\"\", width=500)\n",
"mark_down_ip_note = pn.pane.Markdown(\"\"\"***Note : ***
\n",
- " - Save Input Parameters will save input parameters used for the analysis\n",
- " in all the folders you selected for the analysis (useful for future\n",
- " reference). All analysis steps will run without saving input parameters.\n",
+ " - Save Input Parameters will save input parameters used for the analysis.\n",
+ " Please use the path where input parameters file is saved in the Jupyter Notebook.\n",
+ " It gets save\n",
" \"\"\", width=500, sizing_mode=\"stretch_width\")\n",
"save_button = pn.widgets.Button(name='Save to file...', button_type='primary', width=500, sizing_mode=\"stretch_width\", align='end')\n",
"mark_down_storenames = pn.pane.Markdown(\"\"\"**Step 2 : Open Storenames GUI
and save storenames**\"\"\", width=500)\n",
From be58d05e73bdd257340d28e3a91f3867d740e2e9 Mon Sep 17 00:00:00 2001
From: Venus Sherathiya <32345130+venus-sherathiya@users.noreply.github.com>
Date: Mon, 1 Jul 2024 13:24:44 -0500
Subject: [PATCH 03/10] changes in the input parameters NOTE section
---
GuPPy/savingInputParameters.ipynb | 431 +-----------------------------
1 file changed, 5 insertions(+), 426 deletions(-)
diff --git a/GuPPy/savingInputParameters.ipynb b/GuPPy/savingInputParameters.ipynb
index 0f65360..0694764 100755
--- a/GuPPy/savingInputParameters.ipynb
+++ b/GuPPy/savingInputParameters.ipynb
@@ -2,430 +2,9 @@
"cells": [
{
"cell_type": "code",
- "execution_count": 2,
+ "execution_count": null,
"metadata": {},
- "outputs": [
- {
- "data": {
- "application/javascript": [
- "\n",
- "(function(root) {\n",
- " function now() {\n",
- " return new Date();\n",
- " }\n",
- "\n",
- " var force = true;\n",
- "\n",
- " if (typeof root._bokeh_onload_callbacks === \"undefined\" || force === true) {\n",
- " root._bokeh_onload_callbacks = [];\n",
- " root._bokeh_is_loading = undefined;\n",
- " }\n",
- "\n",
- " if (typeof (root._bokeh_timeout) === \"undefined\" || force === true) {\n",
- " root._bokeh_timeout = Date.now() + 5000;\n",
- " root._bokeh_failed_load = false;\n",
- " }\n",
- "\n",
- " function run_callbacks() {\n",
- " try {\n",
- " root._bokeh_onload_callbacks.forEach(function(callback) {\n",
- " if (callback != null)\n",
- " callback();\n",
- " });\n",
- " } finally {\n",
- " delete root._bokeh_onload_callbacks\n",
- " }\n",
- " console.debug(\"Bokeh: all callbacks have finished\");\n",
- " }\n",
- "\n",
- " function load_libs(css_urls, js_urls, js_modules, callback) {\n",
- " if (css_urls == null) css_urls = [];\n",
- " if (js_urls == null) js_urls = [];\n",
- " if (js_modules == null) js_modules = [];\n",
- "\n",
- " root._bokeh_onload_callbacks.push(callback);\n",
- " if (root._bokeh_is_loading > 0) {\n",
- " console.debug(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n",
- " return null;\n",
- " }\n",
- " if (js_urls.length === 0 && js_modules.length === 0) {\n",
- " run_callbacks();\n",
- " return null;\n",
- " }\n",
- " console.debug(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n",
- " root._bokeh_is_loading = css_urls.length + js_urls.length + js_modules.length;\n",
- "\n",
- " function on_load() {\n",
- " root._bokeh_is_loading--;\n",
- " if (root._bokeh_is_loading === 0) {\n",
- " console.debug(\"Bokeh: all BokehJS libraries/stylesheets loaded\");\n",
- " run_callbacks()\n",
- " }\n",
- " }\n",
- "\n",
- " function on_error() {\n",
- " console.error(\"failed to load \" + url);\n",
- " }\n",
- "\n",
- " for (var i = 0; i < css_urls.length; i++) {\n",
- " var url = css_urls[i];\n",
- " const element = document.createElement(\"link\");\n",
- " element.onload = on_load;\n",
- " element.onerror = on_error;\n",
- " element.rel = \"stylesheet\";\n",
- " element.type = \"text/css\";\n",
- " element.href = url;\n",
- " console.debug(\"Bokeh: injecting link tag for BokehJS stylesheet: \", url);\n",
- " document.body.appendChild(element);\n",
- " }\n",
- "\n",
- " var skip = [];\n",
- " if (window.requirejs) {\n",
- " window.requirejs.config({'paths': {'tabulator': 'https://unpkg.com/tabulator-tables@4.9.3/dist/js/tabulator'}});\n",
- " require([], function() {\n",
- " })\n",
- " }\n",
- " if (((window['tabulator'] !== undefined) && (!(window['tabulator'] instanceof HTMLElement))) || window.requirejs) {\n",
- " var urls = ['https://unpkg.com/tabulator-tables@4.9.3/dist/js/tabulator.js', 'https://unpkg.com/moment@2.27.0/moment.js'];\n",
- " for (var i = 0; i < urls.length; i++) {\n",
- " skip.push(urls[i])\n",
- " }\n",
- " }\n",
- " for (var i = 0; i < js_urls.length; i++) {\n",
- " var url = js_urls[i];\n",
- " if (skip.indexOf(url) >= 0) { on_load(); continue; }\n",
- " var element = document.createElement('script');\n",
- " element.onload = on_load;\n",
- " element.onerror = on_error;\n",
- " element.async = false;\n",
- " element.src = url;\n",
- " console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n",
- " document.head.appendChild(element);\n",
- " }\n",
- " for (var i = 0; i < js_modules.length; i++) {\n",
- " var url = js_modules[i];\n",
- " if (skip.indexOf(url) >= 0) { on_load(); continue; }\n",
- " var element = document.createElement('script');\n",
- " element.onload = on_load;\n",
- " element.onerror = on_error;\n",
- " element.async = false;\n",
- " element.src = url;\n",
- " element.type = \"module\";\n",
- " console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n",
- " document.head.appendChild(element);\n",
- " }\n",
- " if (!js_urls.length && !js_modules.length) {\n",
- " on_load()\n",
- " }\n",
- " };\n",
- "\n",
- " function inject_raw_css(css) {\n",
- " const element = document.createElement(\"style\");\n",
- " element.appendChild(document.createTextNode(css));\n",
- " document.body.appendChild(element);\n",
- " }\n",
- "\n",
- " var js_urls = [\"https://unpkg.com/tabulator-tables@4.9.3/dist/js/tabulator.js\", \"https://unpkg.com/moment@2.27.0/moment.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-2.3.1.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-widgets-2.3.1.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-tables-2.3.1.min.js\", \"https://unpkg.com/@holoviz/panel@^0.11.0/dist/panel.js\"];\n",
- " var js_modules = [];\n",
- " var css_urls = [\"https://unpkg.com/tabulator-tables@4.9.3/dist/css/tabulator_simple.min.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/alerts.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/card.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/widgets.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/markdown.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/json.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/loading.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/dataframe.css\"];\n",
- " var inline_js = [\n",
- " function(Bokeh) {\n",
- " inject_raw_css(\"\\n .bk.pn-loading.arcs:before {\\n background-image: url(\\\"data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHhtbG5zOnhsaW5rPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5L3hsaW5rIiBzdHlsZT0ibWFyZ2luOiBhdXRvOyBiYWNrZ3JvdW5kOiBub25lOyBkaXNwbGF5OiBibG9jazsgc2hhcGUtcmVuZGVyaW5nOiBhdXRvOyIgdmlld0JveD0iMCAwIDEwMCAxMDAiIHByZXNlcnZlQXNwZWN0UmF0aW89InhNaWRZTWlkIj4gIDxjaXJjbGUgY3g9IjUwIiBjeT0iNTAiIHI9IjMyIiBzdHJva2Utd2lkdGg9IjgiIHN0cm9rZT0iI2MzYzNjMyIgc3Ryb2tlLWRhc2hhcnJheT0iNTAuMjY1NDgyNDU3NDM2NjkgNTAuMjY1NDgyNDU3NDM2NjkiIGZpbGw9Im5vbmUiIHN0cm9rZS1saW5lY2FwPSJyb3VuZCI+ICAgIDxhbmltYXRlVHJhbnNmb3JtIGF0dHJpYnV0ZU5hbWU9InRyYW5zZm9ybSIgdHlwZT0icm90YXRlIiByZXBlYXRDb3VudD0iaW5kZWZpbml0ZSIgZHVyPSIxcyIga2V5VGltZXM9IjA7MSIgdmFsdWVzPSIwIDUwIDUwOzM2MCA1MCA1MCI+PC9hbmltYXRlVHJhbnNmb3JtPiAgPC9jaXJjbGU+PC9zdmc+\\\")\\n }\\n \");\n",
- " },\n",
- " function(Bokeh) {\n",
- " Bokeh.set_log_level(\"info\");\n",
- " },\n",
- " function(Bokeh) {} // ensure no trailing comma for IE\n",
- " ];\n",
- "\n",
- " function run_inline_js() {\n",
- " if ((root.Bokeh !== undefined) || (force === true)) {\n",
- " for (var i = 0; i < inline_js.length; i++) {\n",
- " inline_js[i].call(root, root.Bokeh);\n",
- " }} else if (Date.now() < root._bokeh_timeout) {\n",
- " setTimeout(run_inline_js, 100);\n",
- " } else if (!root._bokeh_failed_load) {\n",
- " console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n",
- " root._bokeh_failed_load = true;\n",
- " }\n",
- " }\n",
- "\n",
- " if (root._bokeh_is_loading === 0) {\n",
- " console.debug(\"Bokeh: BokehJS loaded, going straight to plotting\");\n",
- " run_inline_js();\n",
- " } else {\n",
- " load_libs(css_urls, js_urls, js_modules, function() {\n",
- " console.debug(\"Bokeh: BokehJS plotting callback run at\", now());\n",
- " run_inline_js();\n",
- " });\n",
- " }\n",
- "}(window));"
- ],
- "application/vnd.holoviews_load.v0+json": "\n(function(root) {\n function now() {\n return new Date();\n }\n\n var force = true;\n\n if (typeof root._bokeh_onload_callbacks === \"undefined\" || force === true) {\n root._bokeh_onload_callbacks = [];\n root._bokeh_is_loading = undefined;\n }\n\n if (typeof (root._bokeh_timeout) === \"undefined\" || force === true) {\n root._bokeh_timeout = Date.now() + 5000;\n root._bokeh_failed_load = false;\n }\n\n function run_callbacks() {\n try {\n root._bokeh_onload_callbacks.forEach(function(callback) {\n if (callback != null)\n callback();\n });\n } finally {\n delete root._bokeh_onload_callbacks\n }\n console.debug(\"Bokeh: all callbacks have finished\");\n }\n\n function load_libs(css_urls, js_urls, js_modules, callback) {\n if (css_urls == null) css_urls = [];\n if (js_urls == null) js_urls = [];\n if (js_modules == null) js_modules = [];\n\n root._bokeh_onload_callbacks.push(callback);\n if (root._bokeh_is_loading > 0) {\n console.debug(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n return null;\n }\n if (js_urls.length === 0 && js_modules.length === 0) {\n run_callbacks();\n return null;\n }\n console.debug(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n root._bokeh_is_loading = css_urls.length + js_urls.length + js_modules.length;\n\n function on_load() {\n root._bokeh_is_loading--;\n if (root._bokeh_is_loading === 0) {\n console.debug(\"Bokeh: all BokehJS libraries/stylesheets loaded\");\n run_callbacks()\n }\n }\n\n function on_error() {\n console.error(\"failed to load \" + url);\n }\n\n for (var i = 0; i < css_urls.length; i++) {\n var url = css_urls[i];\n const element = document.createElement(\"link\");\n element.onload = on_load;\n element.onerror = on_error;\n element.rel = \"stylesheet\";\n element.type = \"text/css\";\n element.href = url;\n console.debug(\"Bokeh: injecting link tag for BokehJS stylesheet: \", url);\n document.body.appendChild(element);\n }\n\n var skip = [];\n if (window.requirejs) {\n window.requirejs.config({'paths': {'tabulator': 'https://unpkg.com/tabulator-tables@4.9.3/dist/js/tabulator'}});\n require([], function() {\n })\n }\n if (((window['tabulator'] !== undefined) && (!(window['tabulator'] instanceof HTMLElement))) || window.requirejs) {\n var urls = ['https://unpkg.com/tabulator-tables@4.9.3/dist/js/tabulator.js', 'https://unpkg.com/moment@2.27.0/moment.js'];\n for (var i = 0; i < urls.length; i++) {\n skip.push(urls[i])\n }\n }\n for (var i = 0; i < js_urls.length; i++) {\n var url = js_urls[i];\n if (skip.indexOf(url) >= 0) { on_load(); continue; }\n var element = document.createElement('script');\n element.onload = on_load;\n element.onerror = on_error;\n element.async = false;\n element.src = url;\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n document.head.appendChild(element);\n }\n for (var i = 0; i < js_modules.length; i++) {\n var url = js_modules[i];\n if (skip.indexOf(url) >= 0) { on_load(); continue; }\n var element = document.createElement('script');\n element.onload = on_load;\n element.onerror = on_error;\n element.async = false;\n element.src = url;\n element.type = \"module\";\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n document.head.appendChild(element);\n }\n if (!js_urls.length && !js_modules.length) {\n on_load()\n }\n };\n\n function inject_raw_css(css) {\n const element = document.createElement(\"style\");\n element.appendChild(document.createTextNode(css));\n document.body.appendChild(element);\n }\n\n var js_urls = [\"https://unpkg.com/tabulator-tables@4.9.3/dist/js/tabulator.js\", \"https://unpkg.com/moment@2.27.0/moment.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-2.3.1.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-widgets-2.3.1.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-tables-2.3.1.min.js\", \"https://unpkg.com/@holoviz/panel@^0.11.0/dist/panel.js\"];\n var js_modules = [];\n var css_urls = [\"https://unpkg.com/tabulator-tables@4.9.3/dist/css/tabulator_simple.min.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/alerts.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/card.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/widgets.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/markdown.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/json.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/loading.css\", \"https://unpkg.com/@holoviz/panel@0.11.0/dist/css/dataframe.css\"];\n var inline_js = [\n function(Bokeh) {\n inject_raw_css(\"\\n .bk.pn-loading.arcs:before {\\n background-image: url(\\\"data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHhtbG5zOnhsaW5rPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5L3hsaW5rIiBzdHlsZT0ibWFyZ2luOiBhdXRvOyBiYWNrZ3JvdW5kOiBub25lOyBkaXNwbGF5OiBibG9jazsgc2hhcGUtcmVuZGVyaW5nOiBhdXRvOyIgdmlld0JveD0iMCAwIDEwMCAxMDAiIHByZXNlcnZlQXNwZWN0UmF0aW89InhNaWRZTWlkIj4gIDxjaXJjbGUgY3g9IjUwIiBjeT0iNTAiIHI9IjMyIiBzdHJva2Utd2lkdGg9IjgiIHN0cm9rZT0iI2MzYzNjMyIgc3Ryb2tlLWRhc2hhcnJheT0iNTAuMjY1NDgyNDU3NDM2NjkgNTAuMjY1NDgyNDU3NDM2NjkiIGZpbGw9Im5vbmUiIHN0cm9rZS1saW5lY2FwPSJyb3VuZCI+ICAgIDxhbmltYXRlVHJhbnNmb3JtIGF0dHJpYnV0ZU5hbWU9InRyYW5zZm9ybSIgdHlwZT0icm90YXRlIiByZXBlYXRDb3VudD0iaW5kZWZpbml0ZSIgZHVyPSIxcyIga2V5VGltZXM9IjA7MSIgdmFsdWVzPSIwIDUwIDUwOzM2MCA1MCA1MCI+PC9hbmltYXRlVHJhbnNmb3JtPiAgPC9jaXJjbGU+PC9zdmc+\\\")\\n }\\n \");\n },\n function(Bokeh) {\n Bokeh.set_log_level(\"info\");\n },\n function(Bokeh) {} // ensure no trailing comma for IE\n ];\n\n function run_inline_js() {\n if ((root.Bokeh !== undefined) || (force === true)) {\n for (var i = 0; i < inline_js.length; i++) {\n inline_js[i].call(root, root.Bokeh);\n }} else if (Date.now() < root._bokeh_timeout) {\n setTimeout(run_inline_js, 100);\n } else if (!root._bokeh_failed_load) {\n console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n root._bokeh_failed_load = true;\n }\n }\n\n if (root._bokeh_is_loading === 0) {\n console.debug(\"Bokeh: BokehJS loaded, going straight to plotting\");\n run_inline_js();\n } else {\n load_libs(css_urls, js_urls, js_modules, function() {\n console.debug(\"Bokeh: BokehJS plotting callback run at\", now());\n run_inline_js();\n });\n }\n}(window));"
- },
- "metadata": {},
- "output_type": "display_data"
- },
- {
- "data": {
- "application/javascript": [
- "\n",
- "if ((window.PyViz === undefined) || (window.PyViz instanceof HTMLElement)) {\n",
- " window.PyViz = {comms: {}, comm_status:{}, kernels:{}, receivers: {}, plot_index: []}\n",
- "}\n",
- "\n",
- "\n",
- " function JupyterCommManager() {\n",
- " }\n",
- "\n",
- " JupyterCommManager.prototype.register_target = function(plot_id, comm_id, msg_handler) {\n",
- " if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n",
- " var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n",
- " comm_manager.register_target(comm_id, function(comm) {\n",
- " comm.on_msg(msg_handler);\n",
- " });\n",
- " } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n",
- " window.PyViz.kernels[plot_id].registerCommTarget(comm_id, function(comm) {\n",
- " comm.onMsg = msg_handler;\n",
- " });\n",
- " } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n",
- " google.colab.kernel.comms.registerTarget(comm_id, (comm) => {\n",
- " var messages = comm.messages[Symbol.asyncIterator]();\n",
- " function processIteratorResult(result) {\n",
- " var message = result.value;\n",
- " console.log(message)\n",
- " var content = {data: message.data, comm_id};\n",
- " var buffers = []\n",
- " for (var buffer of message.buffers || []) {\n",
- " buffers.push(new DataView(buffer))\n",
- " }\n",
- " var metadata = message.metadata || {};\n",
- " var msg = {content, buffers, metadata}\n",
- " msg_handler(msg);\n",
- " return messages.next().then(processIteratorResult);\n",
- " }\n",
- " return messages.next().then(processIteratorResult);\n",
- " })\n",
- " }\n",
- " }\n",
- "\n",
- " JupyterCommManager.prototype.get_client_comm = function(plot_id, comm_id, msg_handler) {\n",
- " if (comm_id in window.PyViz.comms) {\n",
- " return window.PyViz.comms[comm_id];\n",
- " } else if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n",
- " var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n",
- " var comm = comm_manager.new_comm(comm_id, {}, {}, {}, comm_id);\n",
- " if (msg_handler) {\n",
- " comm.on_msg(msg_handler);\n",
- " }\n",
- " } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n",
- " var comm = window.PyViz.kernels[plot_id].connectToComm(comm_id);\n",
- " comm.open();\n",
- " if (msg_handler) {\n",
- " comm.onMsg = msg_handler;\n",
- " }\n",
- " } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n",
- " var comm_promise = google.colab.kernel.comms.open(comm_id)\n",
- " comm_promise.then((comm) => {\n",
- " window.PyViz.comms[comm_id] = comm;\n",
- " if (msg_handler) {\n",
- " var messages = comm.messages[Symbol.asyncIterator]();\n",
- " function processIteratorResult(result) {\n",
- " var message = result.value;\n",
- " var content = {data: message.data};\n",
- " var metadata = message.metadata || {comm_id};\n",
- " var msg = {content, metadata}\n",
- " msg_handler(msg);\n",
- " return messages.next().then(processIteratorResult);\n",
- " }\n",
- " return messages.next().then(processIteratorResult);\n",
- " }\n",
- " }) \n",
- " var sendClosure = (data, metadata, buffers, disposeOnDone) => {\n",
- " return comm_promise.then((comm) => {\n",
- " comm.send(data, metadata, buffers, disposeOnDone);\n",
- " });\n",
- " };\n",
- " var comm = {\n",
- " send: sendClosure\n",
- " };\n",
- " }\n",
- " window.PyViz.comms[comm_id] = comm;\n",
- " return comm;\n",
- " }\n",
- " window.PyViz.comm_manager = new JupyterCommManager();\n",
- " \n",
- "\n",
- "\n",
- "var JS_MIME_TYPE = 'application/javascript';\n",
- "var HTML_MIME_TYPE = 'text/html';\n",
- "var EXEC_MIME_TYPE = 'application/vnd.holoviews_exec.v0+json';\n",
- "var CLASS_NAME = 'output';\n",
- "\n",
- "/**\n",
- " * Render data to the DOM node\n",
- " */\n",
- "function render(props, node) {\n",
- " var div = document.createElement(\"div\");\n",
- " var script = document.createElement(\"script\");\n",
- " node.appendChild(div);\n",
- " node.appendChild(script);\n",
- "}\n",
- "\n",
- "/**\n",
- " * Handle when a new output is added\n",
- " */\n",
- "function handle_add_output(event, handle) {\n",
- " var output_area = handle.output_area;\n",
- " var output = handle.output;\n",
- " if ((output.data == undefined) || (!output.data.hasOwnProperty(EXEC_MIME_TYPE))) {\n",
- " return\n",
- " }\n",
- " var id = output.metadata[EXEC_MIME_TYPE][\"id\"];\n",
- " var toinsert = output_area.element.find(\".\" + CLASS_NAME.split(' ')[0]);\n",
- " if (id !== undefined) {\n",
- " var nchildren = toinsert.length;\n",
- " var html_node = toinsert[nchildren-1].children[0];\n",
- " html_node.innerHTML = output.data[HTML_MIME_TYPE];\n",
- " var scripts = [];\n",
- " var nodelist = html_node.querySelectorAll(\"script\");\n",
- " for (var i in nodelist) {\n",
- " if (nodelist.hasOwnProperty(i)) {\n",
- " scripts.push(nodelist[i])\n",
- " }\n",
- " }\n",
- "\n",
- " scripts.forEach( function (oldScript) {\n",
- " var newScript = document.createElement(\"script\");\n",
- " var attrs = [];\n",
- " var nodemap = oldScript.attributes;\n",
- " for (var j in nodemap) {\n",
- " if (nodemap.hasOwnProperty(j)) {\n",
- " attrs.push(nodemap[j])\n",
- " }\n",
- " }\n",
- " attrs.forEach(function(attr) { newScript.setAttribute(attr.name, attr.value) });\n",
- " newScript.appendChild(document.createTextNode(oldScript.innerHTML));\n",
- " oldScript.parentNode.replaceChild(newScript, oldScript);\n",
- " });\n",
- " if (JS_MIME_TYPE in output.data) {\n",
- " toinsert[nchildren-1].children[1].textContent = output.data[JS_MIME_TYPE];\n",
- " }\n",
- " output_area._hv_plot_id = id;\n",
- " if ((window.Bokeh !== undefined) && (id in Bokeh.index)) {\n",
- " window.PyViz.plot_index[id] = Bokeh.index[id];\n",
- " } else {\n",
- " window.PyViz.plot_index[id] = null;\n",
- " }\n",
- " } else if (output.metadata[EXEC_MIME_TYPE][\"server_id\"] !== undefined) {\n",
- " var bk_div = document.createElement(\"div\");\n",
- " bk_div.innerHTML = output.data[HTML_MIME_TYPE];\n",
- " var script_attrs = bk_div.children[0].attributes;\n",
- " for (var i = 0; i < script_attrs.length; i++) {\n",
- " toinsert[toinsert.length - 1].childNodes[1].setAttribute(script_attrs[i].name, script_attrs[i].value);\n",
- " }\n",
- " // store reference to server id on output_area\n",
- " output_area._bokeh_server_id = output.metadata[EXEC_MIME_TYPE][\"server_id\"];\n",
- " }\n",
- "}\n",
- "\n",
- "/**\n",
- " * Handle when an output is cleared or removed\n",
- " */\n",
- "function handle_clear_output(event, handle) {\n",
- " var id = handle.cell.output_area._hv_plot_id;\n",
- " var server_id = handle.cell.output_area._bokeh_server_id;\n",
- " if (((id === undefined) || !(id in PyViz.plot_index)) && (server_id !== undefined)) { return; }\n",
- " var comm = window.PyViz.comm_manager.get_client_comm(\"hv-extension-comm\", \"hv-extension-comm\", function () {});\n",
- " if (server_id !== null) {\n",
- " comm.send({event_type: 'server_delete', 'id': server_id});\n",
- " return;\n",
- " } else if (comm !== null) {\n",
- " comm.send({event_type: 'delete', 'id': id});\n",
- " }\n",
- " delete PyViz.plot_index[id];\n",
- " if ((window.Bokeh !== undefined) & (id in window.Bokeh.index)) {\n",
- " var doc = window.Bokeh.index[id].model.document\n",
- " doc.clear();\n",
- " const i = window.Bokeh.documents.indexOf(doc);\n",
- " if (i > -1) {\n",
- " window.Bokeh.documents.splice(i, 1);\n",
- " }\n",
- " }\n",
- "}\n",
- "\n",
- "/**\n",
- " * Handle kernel restart event\n",
- " */\n",
- "function handle_kernel_cleanup(event, handle) {\n",
- " delete PyViz.comms[\"hv-extension-comm\"];\n",
- " window.PyViz.plot_index = {}\n",
- "}\n",
- "\n",
- "/**\n",
- " * Handle update_display_data messages\n",
- " */\n",
- "function handle_update_output(event, handle) {\n",
- " handle_clear_output(event, {cell: {output_area: handle.output_area}})\n",
- " handle_add_output(event, handle)\n",
- "}\n",
- "\n",
- "function register_renderer(events, OutputArea) {\n",
- " function append_mime(data, metadata, element) {\n",
- " // create a DOM node to render to\n",
- " var toinsert = this.create_output_subarea(\n",
- " metadata,\n",
- " CLASS_NAME,\n",
- " EXEC_MIME_TYPE\n",
- " );\n",
- " this.keyboard_manager.register_events(toinsert);\n",
- " // Render to node\n",
- " var props = {data: data, metadata: metadata[EXEC_MIME_TYPE]};\n",
- " render(props, toinsert[0]);\n",
- " element.append(toinsert);\n",
- " return toinsert\n",
- " }\n",
- "\n",
- " events.on('output_added.OutputArea', handle_add_output);\n",
- " events.on('output_updated.OutputArea', handle_update_output);\n",
- " events.on('clear_output.CodeCell', handle_clear_output);\n",
- " events.on('delete.Cell', handle_clear_output);\n",
- " events.on('kernel_ready.Kernel', handle_kernel_cleanup);\n",
- "\n",
- " OutputArea.prototype.register_mime_type(EXEC_MIME_TYPE, append_mime, {\n",
- " safe: true,\n",
- " index: 0\n",
- " });\n",
- "}\n",
- "\n",
- "if (window.Jupyter !== undefined) {\n",
- " try {\n",
- " var events = require('base/js/events');\n",
- " var OutputArea = require('notebook/js/outputarea').OutputArea;\n",
- " if (OutputArea.prototype.mime_types().indexOf(EXEC_MIME_TYPE) == -1) {\n",
- " register_renderer(events, OutputArea);\n",
- " }\n",
- " } catch(err) {\n",
- " }\n",
- "}\n"
- ],
- "application/vnd.holoviews_load.v0+json": "\nif ((window.PyViz === undefined) || (window.PyViz instanceof HTMLElement)) {\n window.PyViz = {comms: {}, comm_status:{}, kernels:{}, receivers: {}, plot_index: []}\n}\n\n\n function JupyterCommManager() {\n }\n\n JupyterCommManager.prototype.register_target = function(plot_id, comm_id, msg_handler) {\n if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n comm_manager.register_target(comm_id, function(comm) {\n comm.on_msg(msg_handler);\n });\n } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n window.PyViz.kernels[plot_id].registerCommTarget(comm_id, function(comm) {\n comm.onMsg = msg_handler;\n });\n } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n google.colab.kernel.comms.registerTarget(comm_id, (comm) => {\n var messages = comm.messages[Symbol.asyncIterator]();\n function processIteratorResult(result) {\n var message = result.value;\n console.log(message)\n var content = {data: message.data, comm_id};\n var buffers = []\n for (var buffer of message.buffers || []) {\n buffers.push(new DataView(buffer))\n }\n var metadata = message.metadata || {};\n var msg = {content, buffers, metadata}\n msg_handler(msg);\n return messages.next().then(processIteratorResult);\n }\n return messages.next().then(processIteratorResult);\n })\n }\n }\n\n JupyterCommManager.prototype.get_client_comm = function(plot_id, comm_id, msg_handler) {\n if (comm_id in window.PyViz.comms) {\n return window.PyViz.comms[comm_id];\n } else if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n var comm = comm_manager.new_comm(comm_id, {}, {}, {}, comm_id);\n if (msg_handler) {\n comm.on_msg(msg_handler);\n }\n } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n var comm = window.PyViz.kernels[plot_id].connectToComm(comm_id);\n comm.open();\n if (msg_handler) {\n comm.onMsg = msg_handler;\n }\n } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n var comm_promise = google.colab.kernel.comms.open(comm_id)\n comm_promise.then((comm) => {\n window.PyViz.comms[comm_id] = comm;\n if (msg_handler) {\n var messages = comm.messages[Symbol.asyncIterator]();\n function processIteratorResult(result) {\n var message = result.value;\n var content = {data: message.data};\n var metadata = message.metadata || {comm_id};\n var msg = {content, metadata}\n msg_handler(msg);\n return messages.next().then(processIteratorResult);\n }\n return messages.next().then(processIteratorResult);\n }\n }) \n var sendClosure = (data, metadata, buffers, disposeOnDone) => {\n return comm_promise.then((comm) => {\n comm.send(data, metadata, buffers, disposeOnDone);\n });\n };\n var comm = {\n send: sendClosure\n };\n }\n window.PyViz.comms[comm_id] = comm;\n return comm;\n }\n window.PyViz.comm_manager = new JupyterCommManager();\n \n\n\nvar JS_MIME_TYPE = 'application/javascript';\nvar HTML_MIME_TYPE = 'text/html';\nvar EXEC_MIME_TYPE = 'application/vnd.holoviews_exec.v0+json';\nvar CLASS_NAME = 'output';\n\n/**\n * Render data to the DOM node\n */\nfunction render(props, node) {\n var div = document.createElement(\"div\");\n var script = document.createElement(\"script\");\n node.appendChild(div);\n node.appendChild(script);\n}\n\n/**\n * Handle when a new output is added\n */\nfunction handle_add_output(event, handle) {\n var output_area = handle.output_area;\n var output = handle.output;\n if ((output.data == undefined) || (!output.data.hasOwnProperty(EXEC_MIME_TYPE))) {\n return\n }\n var id = output.metadata[EXEC_MIME_TYPE][\"id\"];\n var toinsert = output_area.element.find(\".\" + CLASS_NAME.split(' ')[0]);\n if (id !== undefined) {\n var nchildren = toinsert.length;\n var html_node = toinsert[nchildren-1].children[0];\n html_node.innerHTML = output.data[HTML_MIME_TYPE];\n var scripts = [];\n var nodelist = html_node.querySelectorAll(\"script\");\n for (var i in nodelist) {\n if (nodelist.hasOwnProperty(i)) {\n scripts.push(nodelist[i])\n }\n }\n\n scripts.forEach( function (oldScript) {\n var newScript = document.createElement(\"script\");\n var attrs = [];\n var nodemap = oldScript.attributes;\n for (var j in nodemap) {\n if (nodemap.hasOwnProperty(j)) {\n attrs.push(nodemap[j])\n }\n }\n attrs.forEach(function(attr) { newScript.setAttribute(attr.name, attr.value) });\n newScript.appendChild(document.createTextNode(oldScript.innerHTML));\n oldScript.parentNode.replaceChild(newScript, oldScript);\n });\n if (JS_MIME_TYPE in output.data) {\n toinsert[nchildren-1].children[1].textContent = output.data[JS_MIME_TYPE];\n }\n output_area._hv_plot_id = id;\n if ((window.Bokeh !== undefined) && (id in Bokeh.index)) {\n window.PyViz.plot_index[id] = Bokeh.index[id];\n } else {\n window.PyViz.plot_index[id] = null;\n }\n } else if (output.metadata[EXEC_MIME_TYPE][\"server_id\"] !== undefined) {\n var bk_div = document.createElement(\"div\");\n bk_div.innerHTML = output.data[HTML_MIME_TYPE];\n var script_attrs = bk_div.children[0].attributes;\n for (var i = 0; i < script_attrs.length; i++) {\n toinsert[toinsert.length - 1].childNodes[1].setAttribute(script_attrs[i].name, script_attrs[i].value);\n }\n // store reference to server id on output_area\n output_area._bokeh_server_id = output.metadata[EXEC_MIME_TYPE][\"server_id\"];\n }\n}\n\n/**\n * Handle when an output is cleared or removed\n */\nfunction handle_clear_output(event, handle) {\n var id = handle.cell.output_area._hv_plot_id;\n var server_id = handle.cell.output_area._bokeh_server_id;\n if (((id === undefined) || !(id in PyViz.plot_index)) && (server_id !== undefined)) { return; }\n var comm = window.PyViz.comm_manager.get_client_comm(\"hv-extension-comm\", \"hv-extension-comm\", function () {});\n if (server_id !== null) {\n comm.send({event_type: 'server_delete', 'id': server_id});\n return;\n } else if (comm !== null) {\n comm.send({event_type: 'delete', 'id': id});\n }\n delete PyViz.plot_index[id];\n if ((window.Bokeh !== undefined) & (id in window.Bokeh.index)) {\n var doc = window.Bokeh.index[id].model.document\n doc.clear();\n const i = window.Bokeh.documents.indexOf(doc);\n if (i > -1) {\n window.Bokeh.documents.splice(i, 1);\n }\n }\n}\n\n/**\n * Handle kernel restart event\n */\nfunction handle_kernel_cleanup(event, handle) {\n delete PyViz.comms[\"hv-extension-comm\"];\n window.PyViz.plot_index = {}\n}\n\n/**\n * Handle update_display_data messages\n */\nfunction handle_update_output(event, handle) {\n handle_clear_output(event, {cell: {output_area: handle.output_area}})\n handle_add_output(event, handle)\n}\n\nfunction register_renderer(events, OutputArea) {\n function append_mime(data, metadata, element) {\n // create a DOM node to render to\n var toinsert = this.create_output_subarea(\n metadata,\n CLASS_NAME,\n EXEC_MIME_TYPE\n );\n this.keyboard_manager.register_events(toinsert);\n // Render to node\n var props = {data: data, metadata: metadata[EXEC_MIME_TYPE]};\n render(props, toinsert[0]);\n element.append(toinsert);\n return toinsert\n }\n\n events.on('output_added.OutputArea', handle_add_output);\n events.on('output_updated.OutputArea', handle_update_output);\n events.on('clear_output.CodeCell', handle_clear_output);\n events.on('delete.Cell', handle_clear_output);\n events.on('kernel_ready.Kernel', handle_kernel_cleanup);\n\n OutputArea.prototype.register_mime_type(EXEC_MIME_TYPE, append_mime, {\n safe: true,\n index: 0\n });\n}\n\nif (window.Jupyter !== undefined) {\n try {\n var events = require('base/js/events');\n var OutputArea = require('notebook/js/outputarea').OutputArea;\n if (OutputArea.prototype.mime_types().indexOf(EXEC_MIME_TYPE) == -1) {\n register_renderer(events, OutputArea);\n }\n } catch(err) {\n }\n}\n"
- },
- "metadata": {},
- "output_type": "display_data"
- },
- {
- "ename": "NameError",
- "evalue": "name 'folder_path' is not defined",
- "output_type": "error",
- "traceback": [
- "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
- "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
- "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 134\u001b[0m \u001b[0mmark_down_1\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpn\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpane\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mMarkdown\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"\"\"**Select folders for the analysis from the file selector below**\"\"\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mwidth\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m600\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 135\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 136\u001b[0;31m \u001b[0mfiles_1\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpn\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mwidgets\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mFileSelector\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfolder_path\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'folderNames'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mheight\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m300\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mwidth\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m800\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 137\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 138\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
- "\u001b[0;31mNameError\u001b[0m: name 'folder_path' is not defined"
- ]
- }
- ],
+ "outputs": [],
"source": [
"import os\n",
"import sys\n",
@@ -890,9 +469,9 @@
" \n",
"mark_down_ip = pn.pane.Markdown(\"\"\"**Step 1 : Save Input Parameters**\"\"\", width=500)\n",
"mark_down_ip_note = pn.pane.Markdown(\"\"\"***Note : ***
\n",
- " - Save Input Parameters will save input parameters used for the analysis.\n",
- " Please use the path where input parameters file is saved in the Jupyter Notebook.\n",
- " It gets save\n",
+ " - It is mandatory to save the input parameters.\n",
+ " It gets saved in the HOME directory.\n",
+ " Copy the path to the file in the Jupyter Notebook.\n",
" \"\"\", width=500, sizing_mode=\"stretch_width\")\n",
"save_button = pn.widgets.Button(name='Save to file...', button_type='primary', width=500, sizing_mode=\"stretch_width\", align='end')\n",
"mark_down_storenames = pn.pane.Markdown(\"\"\"**Step 2 : Open Storenames GUI
and save storenames**\"\"\", width=500)\n",
From 9eb6d6f1e1397c6c9e9807710ce13fb775eff39a Mon Sep 17 00:00:00 2001
From: pauladkisson
Date: Fri, 12 Jul 2024 08:23:26 -0700
Subject: [PATCH 04/10] added tqdm to slow read step
---
GuPPy/readTevTsq.py | 3 +-
GuPPy/runFiberPhotometryAnalysis.ipynb | 209 ++++++-------------------
2 files changed, 51 insertions(+), 161 deletions(-)
diff --git a/GuPPy/readTevTsq.py b/GuPPy/readTevTsq.py
index c20e442..3c84192 100755
--- a/GuPPy/readTevTsq.py
+++ b/GuPPy/readTevTsq.py
@@ -12,6 +12,7 @@
import pandas as pd
from numpy import int32, uint32, uint8, uint16, float64, int64, int32, float32
import multiprocessing as mp
+from tqdm import tqdm
def insertLog(text, level):
file = os.path.join('.','..','guppy.log')
@@ -322,7 +323,7 @@ def readtev(data, filepath, event, outputPath):
if formatNew != 5:
nsample = (data_size[first_row,]-10)*int(table[formatNew, 2])
S['data'] = np.zeros((len(fp_loc), nsample))
- for i in range(0, len(fp_loc)):
+ for i in tqdm(range(0, len(fp_loc))):
with open(tevfilepath, 'rb') as fp:
fp.seek(fp_loc[i], os.SEEK_SET)
S['data'][i,:] = np.fromfile(fp, dtype=table[formatNew, 3], count=nsample).reshape(1, nsample, order='F')
diff --git a/GuPPy/runFiberPhotometryAnalysis.ipynb b/GuPPy/runFiberPhotometryAnalysis.ipynb
index e753823..26db78e 100755
--- a/GuPPy/runFiberPhotometryAnalysis.ipynb
+++ b/GuPPy/runFiberPhotometryAnalysis.ipynb
@@ -9,9 +9,19 @@
},
{
"cell_type": "code",
- "execution_count": 1,
+ "execution_count": 3,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "The autoreload extension is already loaded. To reload it, use:\n",
+ " %reload_ext autoreload\n",
+ "Warning: Cannot change to a different GUI toolkit: tk. Using osx instead.\n"
+ ]
+ }
+ ],
"source": [
"%load_ext autoreload\n",
"%autoreload 2\n",
@@ -44,11 +54,11 @@
},
{
"cell_type": "code",
- "execution_count": 2,
+ "execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
- "inputParametersPath = \"/Users/vns0170/GuPPyParamtersUsed.json\""
+ "inputParametersPath = \"/Users/pauladkisson/GuPPyParamtersUsed.json\""
]
},
{
@@ -75,7 +85,7 @@
},
{
"cell_type": "code",
- "execution_count": 3,
+ "execution_count": 9,
"metadata": {},
"outputs": [
{
@@ -83,46 +93,40 @@
"output_type": "stream",
"text": [
"### Reading raw data... ###\n",
- "/Users/vns0170/Downloads/FP_Data/habitEarly/Photo_048_392-200728-121222\n",
- "Trying to read tsq file.\n",
- "Data from tsq file fetched.\n",
- "Reading data for event Dv1A ...\n",
- "Reading data for event Dv2A ...\n",
- "Data for event Dv1A fetched and stored.\n",
- "Data for event Dv2A fetched and stored.\n",
- "Reading data for event Dv3B ...\n",
- "Reading data for event Dv4B ...\n",
- "Data for event Dv3B fetched and stored.\n",
- "Reading data for event LNRW ...\n",
- "Data for event Dv4B fetched and stored.\n",
- "Reading data for event LNnR ...\n",
- "Data for event LNRW fetched and stored.\n",
- "Reading data for event PrtN ...\n",
- "Data for event LNnR fetched and stored.\n",
- "Reading data for event PrtR ...\n",
- "Data for event PrtN fetched and stored.\n",
- "Data for event PrtR fetched and stored.\n",
- "Time taken = 5.42340\n",
- "/Users/vns0170/Downloads/FP_Data/habitEarly/Photo_63_207-181030-103332\n",
+ "/Volumes/T7/CatalystNeuro/NWB/Lerner/guppy_example_data/Photo_112_283-190620-093542\n",
"Trying to read tsq file.\n",
"Data from tsq file fetched.\n",
- "Reading data for event Dv1A ...\n",
- "Reading data for event Dv2A ...\n",
+ "Reading data for event Dv1A ...\n"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "100%|██████████| 29389/29389 [06:38<00:00, 73.70it/s] \n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
"Data for event Dv1A fetched and stored.\n",
+ "Reading data for event Dv2A ...\n"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "100%|██████████| 29389/29389 [09:43<00:00, 50.39it/s]\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
"Data for event Dv2A fetched and stored.\n",
- "Reading data for event Dv3B ...\n",
- "Reading data for event Dv4B ...\n",
- "Data for event Dv3B fetched and stored.\n",
- "Reading data for event LNRW ...\n",
- "Data for event Dv4B fetched and stored.\n",
- "Reading data for event LNnR ...\n",
- "Data for event LNRW fetched and stored.\n",
- "Reading data for event PrtN ...\n",
- "Data for event LNnR fetched and stored.\n",
- "Reading data for event PrtR ...\n",
- "Data for event PrtN fetched and stored.\n",
- "Data for event PrtR fetched and stored.\n",
- "Time taken = 6.39616\n",
+ "Time taken = 983.78427\n",
"### Raw data fetched and saved.\n"
]
}
@@ -141,7 +145,7 @@
},
{
"cell_type": "code",
- "execution_count": 4,
+ "execution_count": 10,
"metadata": {},
"outputs": [
{
@@ -149,7 +153,7 @@
"output_type": "stream",
"text": [
"Extracting signal data and event timestamps...\n",
- "Remove Artifacts : True\n",
+ "Remove Artifacts : False\n",
"Combine Data : False\n",
"Isosbestic Control Channel : True\n",
"Correcting timestamps by getting rid of the first 1 seconds and convert timestamps to seconds...\n",
@@ -158,52 +162,9 @@
"Timestamps corrections applied to the data and event timestamps.\n",
"Applying correction of timestamps to the data and event timestamps...\n",
"Timestamps corrections applied to the data and event timestamps.\n",
- "Applying correction of timestamps to the data and event timestamps...\n",
- "Timestamps corrections applied to the data and event timestamps.\n",
- "Applying correction of timestamps to the data and event timestamps...\n",
- "Timestamps corrections applied to the data and event timestamps.\n",
- "Applying correction of timestamps to the data and event timestamps...\n",
- "Timestamps corrections applied to the data and event timestamps.\n",
- "Applying correction of timestamps to the data and event timestamps...\n",
- "Timestamps corrections applied to the data and event timestamps.\n",
- "Applying correction of timestamps to the data and event timestamps...\n",
- "Timestamps corrections applied to the data and event timestamps.\n",
- "Applying correction of timestamps to the data and event timestamps...\n",
- "Timestamps corrections applied to the data and event timestamps.\n",
- "Correcting timestamps by getting rid of the first 1 seconds and convert timestamps to seconds...\n",
- "Timestamps corrected and converted to seconds.\n",
- "Applying correction of timestamps to the data and event timestamps...\n",
- "Timestamps corrections applied to the data and event timestamps.\n",
- "Applying correction of timestamps to the data and event timestamps...\n",
- "Timestamps corrections applied to the data and event timestamps.\n",
- "Applying correction of timestamps to the data and event timestamps...\n",
- "Timestamps corrections applied to the data and event timestamps.\n",
- "Applying correction of timestamps to the data and event timestamps...\n",
- "Timestamps corrections applied to the data and event timestamps.\n",
- "Applying correction of timestamps to the data and event timestamps...\n",
- "Timestamps corrections applied to the data and event timestamps.\n",
- "Applying correction of timestamps to the data and event timestamps...\n",
- "Timestamps corrections applied to the data and event timestamps.\n",
- "Applying correction of timestamps to the data and event timestamps...\n",
- "Timestamps corrections applied to the data and event timestamps.\n",
- "Applying correction of timestamps to the data and event timestamps...\n",
- "Timestamps corrections applied to the data and event timestamps.\n",
- "Removing Artifacts from the data and correcting timestamps...\n",
- "Computing z-score for each of the data...\n",
- "Remove Artifacts : True\n",
- "Remove Artifacts : True\n",
- "z-score for the data computed.\n",
- "Processing timestamps to get rid of artifacts using concatenate method...\n",
- "Timestamps processed, artifacts are removed and good chunks are concatenated.\n",
- "Artifacts from the data are removed and timestamps are corrected.\n",
- "Removing Artifacts from the data and correcting timestamps...\n",
"Computing z-score for each of the data...\n",
- "Remove Artifacts : True\n",
- "Remove Artifacts : True\n",
+ "Remove Artifacts : False\n",
"z-score for the data computed.\n",
- "Processing timestamps to get rid of artifacts using concatenate method...\n",
- "Timestamps processed, artifacts are removed and good chunks are concatenated.\n",
- "Artifacts from the data are removed and timestamps are corrected.\n",
"Signal data and event timestamps are extracted.\n"
]
}
@@ -234,7 +195,7 @@
},
{
"cell_type": "code",
- "execution_count": 5,
+ "execution_count": 11,
"metadata": {},
"outputs": [
{
@@ -243,83 +204,11 @@
"text": [
"Computing PSTH, Peak and Area for each event...\n",
"Average for group : False\n",
- "Computing PSTH for event rwdNP...Computing PSTH for event urwdNP...\n",
- "\n",
- "PSTH for event rwdNP computed.\n",
- "Computing PSTH for event rwdNP...\n",
- "PSTH for event rwdNP computed.\n",
- "Computing PSTH for event urwdPE...\n",
- "PSTH for event urwdNP computed.\n",
- "Computing PSTH for event urwdNP...\n",
- "PSTH for event urwdPE computed.\n",
- "Computing PSTH for event urwdPE...\n",
- "PSTH for event urwdNP computed.\n",
- "Computing PSTH for event rwdPE...\n",
- "PSTH for event urwdPE computed.\n",
- "PSTH for event rwdPE computed.\n",
- "Computing PSTH for event rwdPE...\n",
- "PSTH for event rwdPE computed.\n",
- "Computing peak and area for PSTH mean signal for event rwdNP...Computing peak and area for PSTH mean signal for event urwdNP...\n",
- "\n",
- "Peak and Area for PSTH mean signal for event rwdNP computed.\n",
- "Computing peak and area for PSTH mean signal for event rwdNP...\n",
- "Peak and Area for PSTH mean signal for event rwdNP computed.\n",
- "Computing peak and area for PSTH mean signal for event urwdPE...\n",
- "Peak and Area for PSTH mean signal for event urwdNP computed.\n",
- "Computing peak and area for PSTH mean signal for event urwdNP...\n",
- "Peak and Area for PSTH mean signal for event urwdPE computed.\n",
- "Computing peak and area for PSTH mean signal for event urwdPE...\n",
- "Peak and Area for PSTH mean signal for event urwdNP computed.\n",
- "Computing peak and area for PSTH mean signal for event rwdPE...\n",
- "Peak and Area for PSTH mean signal for event rwdPE computed.\n",
- "Computing peak and area for PSTH mean signal for event rwdPE...\n",
- "Peak and Area for PSTH mean signal for event urwdPE computed.\n",
- "Peak and Area for PSTH mean signal for event rwdPE computed.\n",
- "Computing PSTH for event rwdNP...Computing PSTH for event urwdNP...\n",
- "\n",
- "PSTH for event rwdNP computed.\n",
- "Computing PSTH for event rwdNP...\n",
- "PSTH for event rwdNP computed.\n",
- "Computing PSTH for event urwdPE...\n",
- "PSTH for event urwdNP computed.\n",
- "Computing PSTH for event urwdNP...\n",
- "PSTH for event urwdPE computed.\n",
- "Computing PSTH for event urwdPE...\n",
- "PSTH for event urwdNP computed.\n",
- "Computing PSTH for event rwdPE...\n",
- "PSTH for event urwdPE computed.\n",
- "PSTH for event rwdPE computed.\n",
- "Computing PSTH for event rwdPE...\n",
- "PSTH for event rwdPE computed.\n",
- "Computing peak and area for PSTH mean signal for event urwdNP...Computing peak and area for PSTH mean signal for event rwdNP...\n",
- "\n",
- "Peak and Area for PSTH mean signal for event rwdNP computed.\n",
- "Computing peak and area for PSTH mean signal for event rwdNP...\n",
- "Peak and Area for PSTH mean signal for event rwdNP computed.\n",
- "Computing peak and area for PSTH mean signal for event urwdPE...\n",
- "Peak and Area for PSTH mean signal for event urwdNP computed.\n",
- "Computing peak and area for PSTH mean signal for event urwdNP...\n",
- "Peak and Area for PSTH mean signal for event urwdPE computed.\n",
- "Computing peak and area for PSTH mean signal for event urwdPE...\n",
- "Peak and Area for PSTH mean signal for event urwdNP computed.\n",
- "Computing peak and area for PSTH mean signal for event rwdPE...\n",
- "Peak and Area for PSTH mean signal for event urwdPE computed.\n",
- "Peak and Area for PSTH mean signal for event rwdPE computed.\n",
- "Computing peak and area for PSTH mean signal for event rwdPE...\n",
- "Peak and Area for PSTH mean signal for event rwdPE computed.\n",
"PSTH, Area and Peak are computed for all events.\n",
"Finding transients in z-score data and calculating frequency and amplitude....\n",
"Calculating frequency and amplitude of transients in z-score data....\n",
"Creating chunks for multiprocessing...\n",
"Chunks are created for multiprocessing.\n",
- "Creating chunks for multiprocessing...\n",
- "Chunks are created for multiprocessing.\n",
- "Frequency and amplitude of transients in z_score data are calculated.\n",
- "Calculating frequency and amplitude of transients in z-score data....\n",
- "Creating chunks for multiprocessing...\n",
- "Chunks are created for multiprocessing.\n",
- "Creating chunks for multiprocessing...\n",
- "Chunks are created for multiprocessing.\n",
"Frequency and amplitude of transients in z_score data are calculated.\n",
"Transients in z-score data found and frequency and amplitude are calculated.\n"
]
From e02269c5962f4966f81193822b720bbb764364e9 Mon Sep 17 00:00:00 2001
From: pauladkisson
Date: Fri, 12 Jul 2024 13:06:56 -0700
Subject: [PATCH 05/10] added guppy_read_env for pynwb
---
GuPPy/readTevTsq.py | 27 +++++++++
GuPPy/runFiberPhotometryAnalysis.ipynb | 82 ++++++++++++++++----------
guppy_read_env.yaml | 14 +++++
3 files changed, 92 insertions(+), 31 deletions(-)
create mode 100644 guppy_read_env.yaml
diff --git a/GuPPy/readTevTsq.py b/GuPPy/readTevTsq.py
index 3c84192..ddf2915 100755
--- a/GuPPy/readTevTsq.py
+++ b/GuPPy/readTevTsq.py
@@ -13,6 +13,7 @@
from numpy import int32, uint32, uint8, uint16, float64, int64, int32, float32
import multiprocessing as mp
from tqdm import tqdm
+from pprint import pprint
def insertLog(text, level):
file = os.path.join('.','..','guppy.log')
@@ -336,6 +337,10 @@ def readtev(data, filepath, event, outputPath):
S['data'] = (S['data'].T).reshape(-1, order='F')
+
+ S_print = S.copy()
+ S_print.pop('data')
+ pprint(S_print)
save_dict_to_hdf5(S, event, outputPath)
@@ -547,6 +552,28 @@ def readRawData(inputParametersPath):
insertLog('Raw data fetched and saved.', logging.INFO)
insertLog("#" * 400, logging.INFO)
+# from pynwb import NWBHDF5IO
+# def read_nwb(filepath, event, outputPath, indices):
+# """
+# Read photometry data from an NWB file and save the output to a hdf5 file.
+# """
+# print(f"Reading NWB file {filepath} for event {event} to save to {outputPath} with indices {indices}")
+
+# with NWBHDF5IO(filepath, 'r') as io:
+# nwbfile = io.read()
+# fiber_photometry_response_series = nwbfile.acquisition[event].data[:, indices]
+# sampling_rate = fiber_photometry_response_series.rate
+
+# S = dict()
+# S['storename'] = str(event)
+# S['sampling_rate'] = sampling_rate
+# S['timestamps'] = np.arange(0, fiber_photometry_response_series.shape[0]) / sampling_rate
+# S['data'] = fiber_photometry_response_series
+ # save_dict_to_hdf5(S, event, outputPath)
+ # check_data(S, filepath, event, outputPath)
+ # print("Data for event {} fetched and stored.".format(event))
+ # insertLog("Data for event {} fetched and stored.".format(event), logging.INFO)
+
# if __name__ == "__main__":
# print('run')
# try:
diff --git a/GuPPy/runFiberPhotometryAnalysis.ipynb b/GuPPy/runFiberPhotometryAnalysis.ipynb
index 26db78e..758fcd9 100755
--- a/GuPPy/runFiberPhotometryAnalysis.ipynb
+++ b/GuPPy/runFiberPhotometryAnalysis.ipynb
@@ -9,7 +9,7 @@
},
{
"cell_type": "code",
- "execution_count": 3,
+ "execution_count": 15,
"metadata": {},
"outputs": [
{
@@ -85,54 +85,74 @@
},
{
"cell_type": "code",
- "execution_count": 9,
+ "execution_count": 19,
"metadata": {},
"outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "### Reading raw data... ###\n",
- "/Volumes/T7/CatalystNeuro/NWB/Lerner/guppy_example_data/Photo_112_283-190620-093542\n",
- "Trying to read tsq file.\n",
- "Data from tsq file fetched.\n",
- "Reading data for event Dv1A ...\n"
- ]
- },
{
"name": "stderr",
"output_type": "stream",
"text": [
- "100%|██████████| 29389/29389 [06:38<00:00, 73.70it/s] \n"
+ "[autoreload of readTevTsq failed: Traceback (most recent call last):\n",
+ " File \"/opt/anaconda3/envs/guppy_env/lib/python3.6/site-packages/IPython/extensions/autoreload.py\", line 245, in check\n",
+ " superreload(m, reload, self.old_objects)\n",
+ " File \"/opt/anaconda3/envs/guppy_env/lib/python3.6/site-packages/IPython/extensions/autoreload.py\", line 394, in superreload\n",
+ " module = reload(module)\n",
+ " File \"/opt/anaconda3/envs/guppy_env/lib/python3.6/imp.py\", line 315, in reload\n",
+ " return importlib.reload(module)\n",
+ " File \"/opt/anaconda3/envs/guppy_env/lib/python3.6/importlib/__init__.py\", line 166, in reload\n",
+ " _bootstrap._exec(spec, module)\n",
+ " File \"\", line 618, in _exec\n",
+ " File \"\", line 678, in exec_module\n",
+ " File \"\", line 219, in _call_with_frames_removed\n",
+ " File \"/Users/pauladkisson/Documents/CatalystNeuro/NWB/LernerConv/GuPPy/GuPPy/readTevTsq.py\", line 555, in \n",
+ " from pynwb import NWBHDF5IO\n",
+ "ModuleNotFoundError: No module named 'pynwb'\n",
+ "]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
- "Data for event Dv1A fetched and stored.\n",
- "Reading data for event Dv2A ...\n"
- ]
- },
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "100%|██████████| 29389/29389 [09:43<00:00, 50.39it/s]\n"
+ "### Reading raw data... ###\n",
+ "/Volumes/T7/CatalystNeuro/NWB/Lerner/guppy_example_data/sub-112.283_ses-FP_PS_2019-06-20T09-32-04.nwb\n",
+ "Trying to read tsq file.\n",
+ "\u001b[1mtsq file not found.\u001b[1m\n",
+ "Checking if doric file exists.\n",
+ "\u001b[1mDoric file not found.\u001b[1m\n",
+ "### Raw data fetched and saved.\n"
]
- },
+ }
+ ],
+ "source": [
+ "from readTevTsq import readRawData\n",
+ "readRawData(inputParametersPath)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 18,
+ "metadata": {},
+ "outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
- "Data for event Dv2A fetched and stored.\n",
- "Time taken = 983.78427\n",
- "### Raw data fetched and saved.\n"
+ "Reading NWB file /Volumes/T7/CatalystNeuro/NWB/Lerner/guppy_example_data/sub-112.283_ses-FP_PS_2019-06-20T09-32-04.nwb for event fiber_photometry_response_series to save to /Volumes/T7/CatalystNeuro/NWB/Lerner/guppy_example_data/sub-112.283_ses-FP_PS_2019-06-20T09-32-04_output\n"
]
}
],
"source": [
- "readRawData(inputParametersPath)"
+ "#readRawData(inputParametersPath)\n",
+ "\n",
+ "from readTevTsq import read_nwb\n",
+ "import json\n",
+ "with open(inputParametersPath) as f:\n",
+ " inputParameters = json.load(f)\n",
+ "filepath = inputParameters['folderNames'][0]\n",
+ "event = \"fiber_photometry_response_series\"\n",
+ "outputPath = \"/Volumes/T7/CatalystNeuro/NWB/Lerner/guppy_example_data/sub-112.283_ses-FP_PS_2019-06-20T09-32-04_output\"\n",
+ "read_nwb(filepath=filepath, event=event, outputPath=outputPath)\n"
]
},
{
@@ -577,9 +597,9 @@
],
"metadata": {
"kernelspec": {
- "display_name": "guppy-test",
+ "display_name": "guppy_env",
"language": "python",
- "name": "guppy-test"
+ "name": "python3"
},
"language_info": {
"codemirror_mode": {
@@ -591,7 +611,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.6.10"
+ "version": "3.12.4"
}
},
"nbformat": 4,
diff --git a/guppy_read_env.yaml b/guppy_read_env.yaml
new file mode 100644
index 0000000..11e09f2
--- /dev/null
+++ b/guppy_read_env.yaml
@@ -0,0 +1,14 @@
+name: guppy_read_env
+channels:
+ - defaults
+ - conda-forge
+dependencies:
+ - python>=3.8
+ - h5py
+ - numpy
+ - pandas
+ - tqdm
+ - ipykernel
+ - pip
+ - pip:
+ - pynwb
From 6aec43652944ebd87d8065d252d64f4128672c6c Mon Sep 17 00:00:00 2001
From: pauladkisson
Date: Mon, 15 Jul 2024 08:44:33 -0700
Subject: [PATCH 06/10] treat nwb files like tdt files in preprocessing
---
GuPPy/preprocess.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/GuPPy/preprocess.py b/GuPPy/preprocess.py
index 5f9eb98..5df4211 100755
--- a/GuPPy/preprocess.py
+++ b/GuPPy/preprocess.py
@@ -155,8 +155,9 @@ def add_control_channel(filepath, arr):
return arr
# check if dealing with TDT files or csv files
+# NWB files are treated like TDT files
def check_TDT(filepath):
- path = glob.glob(os.path.join(filepath, '*.tsq'))
+ path = glob.glob(os.path.join(filepath, '*.tsq')) + glob.glob(os.path.join(filepath, '*.nwb'))
if len(path)>0:
return True
else:
From 21e3577a11811a371188a20b2f9a3f16de3efa39 Mon Sep 17 00:00:00 2001
From: pauladkisson
Date: Mon, 15 Jul 2024 09:13:40 -0700
Subject: [PATCH 07/10] added read_nwb()
---
GuPPy/readTevTsq.py | 55 ++++++++--------
GuPPy/runFiberPhotometryAnalysis.ipynb | 88 +++++++++++++++-----------
2 files changed, 80 insertions(+), 63 deletions(-)
diff --git a/GuPPy/readTevTsq.py b/GuPPy/readTevTsq.py
index ddf2915..c3bf0c4 100755
--- a/GuPPy/readTevTsq.py
+++ b/GuPPy/readTevTsq.py
@@ -337,10 +337,6 @@ def readtev(data, filepath, event, outputPath):
S['data'] = (S['data'].T).reshape(-1, order='F')
-
- S_print = S.copy()
- S_print.pop('data')
- pprint(S_print)
save_dict_to_hdf5(S, event, outputPath)
@@ -552,27 +548,36 @@ def readRawData(inputParametersPath):
insertLog('Raw data fetched and saved.', logging.INFO)
insertLog("#" * 400, logging.INFO)
-# from pynwb import NWBHDF5IO
-# def read_nwb(filepath, event, outputPath, indices):
-# """
-# Read photometry data from an NWB file and save the output to a hdf5 file.
-# """
-# print(f"Reading NWB file {filepath} for event {event} to save to {outputPath} with indices {indices}")
-
-# with NWBHDF5IO(filepath, 'r') as io:
-# nwbfile = io.read()
-# fiber_photometry_response_series = nwbfile.acquisition[event].data[:, indices]
-# sampling_rate = fiber_photometry_response_series.rate
-
-# S = dict()
-# S['storename'] = str(event)
-# S['sampling_rate'] = sampling_rate
-# S['timestamps'] = np.arange(0, fiber_photometry_response_series.shape[0]) / sampling_rate
-# S['data'] = fiber_photometry_response_series
- # save_dict_to_hdf5(S, event, outputPath)
- # check_data(S, filepath, event, outputPath)
- # print("Data for event {} fetched and stored.".format(event))
- # insertLog("Data for event {} fetched and stored.".format(event), logging.INFO)
+def read_nwb(filepath, outputPath, indices):
+ """
+ Read photometry data from an NWB file and save the output to a hdf5 file.
+ """
+ from pynwb import NWBHDF5IO # Dynamic import is necessary since pynwb isn't available in the main environment (python 3.6)
+ print(f"Reading all events {indices} from NWB file {filepath} to save to {outputPath}")
+
+ with NWBHDF5IO(filepath, 'r') as io:
+ nwbfile = io.read()
+ fiber_photometry_response_series = nwbfile.acquisition['fiber_photometry_response_series']
+ data = fiber_photometry_response_series.data[:]
+ sampling_rate = fiber_photometry_response_series.rate
+ timestamps = np.arange(0, data.shape[0]) / sampling_rate
+ npoints = 128
+
+ for index in indices:
+ event = f'event_{index}'
+ S = {}
+ S['storename'] = str(event)
+ S['sampling_rate'] = sampling_rate
+ S['timestamps'] = timestamps[::npoints]
+ S['data'] = data[:, index]
+ S['npoints'] = 128
+ S['channels'] = np.ones_like(S['timestamps'])
+
+ save_dict_to_hdf5(S, event, outputPath)
+ check_data(S, filepath, event, outputPath)
+ print("Data for event {} fetched and stored.".format(event))
+ insertLog("Data for event {} fetched and stored.".format(event), logging.INFO)
+
# if __name__ == "__main__":
# print('run')
diff --git a/GuPPy/runFiberPhotometryAnalysis.ipynb b/GuPPy/runFiberPhotometryAnalysis.ipynb
index 758fcd9..3de9d5f 100755
--- a/GuPPy/runFiberPhotometryAnalysis.ipynb
+++ b/GuPPy/runFiberPhotometryAnalysis.ipynb
@@ -9,7 +9,7 @@
},
{
"cell_type": "code",
- "execution_count": 15,
+ "execution_count": 12,
"metadata": {},
"outputs": [
{
@@ -17,8 +17,7 @@
"output_type": "stream",
"text": [
"The autoreload extension is already loaded. To reload it, use:\n",
- " %reload_ext autoreload\n",
- "Warning: Cannot change to a different GUI toolkit: tk. Using osx instead.\n"
+ " %reload_ext autoreload\n"
]
}
],
@@ -29,7 +28,6 @@
"%matplotlib tk\n",
"import os\n",
"import json\n",
- "from readTevTsq import readRawData\n",
"from preprocess import extractTsAndSignal\n",
"from computePsth import psthForEachStorename\n",
"from findTransientsFreqAndAmp import executeFindFreqAndAmp"
@@ -54,7 +52,7 @@
},
{
"cell_type": "code",
- "execution_count": 8,
+ "execution_count": 13,
"metadata": {},
"outputs": [],
"source": [
@@ -85,41 +83,50 @@
},
{
"cell_type": "code",
- "execution_count": 19,
+ "execution_count": 3,
"metadata": {},
"outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "### Reading raw data... ###\n",
+ "/Volumes/T7/CatalystNeuro/NWB/Lerner/guppy_example_data/Photo_112_283-190620-093542\n",
+ "Trying to read tsq file.\n",
+ "Data from tsq file fetched.\n",
+ "Reading data for event Dv1A ...\n",
+ "In readtev(), type(S['sampling_rate']) = \n"
+ ]
+ },
{
"name": "stderr",
"output_type": "stream",
"text": [
- "[autoreload of readTevTsq failed: Traceback (most recent call last):\n",
- " File \"/opt/anaconda3/envs/guppy_env/lib/python3.6/site-packages/IPython/extensions/autoreload.py\", line 245, in check\n",
- " superreload(m, reload, self.old_objects)\n",
- " File \"/opt/anaconda3/envs/guppy_env/lib/python3.6/site-packages/IPython/extensions/autoreload.py\", line 394, in superreload\n",
- " module = reload(module)\n",
- " File \"/opt/anaconda3/envs/guppy_env/lib/python3.6/imp.py\", line 315, in reload\n",
- " return importlib.reload(module)\n",
- " File \"/opt/anaconda3/envs/guppy_env/lib/python3.6/importlib/__init__.py\", line 166, in reload\n",
- " _bootstrap._exec(spec, module)\n",
- " File \"\", line 618, in _exec\n",
- " File \"\", line 678, in exec_module\n",
- " File \"\", line 219, in _call_with_frames_removed\n",
- " File \"/Users/pauladkisson/Documents/CatalystNeuro/NWB/LernerConv/GuPPy/GuPPy/readTevTsq.py\", line 555, in \n",
- " from pynwb import NWBHDF5IO\n",
- "ModuleNotFoundError: No module named 'pynwb'\n",
- "]\n"
+ "100%|██████████| 29389/29389 [05:47<00:00, 84.64it/s] \n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
- "### Reading raw data... ###\n",
- "/Volumes/T7/CatalystNeuro/NWB/Lerner/guppy_example_data/sub-112.283_ses-FP_PS_2019-06-20T09-32-04.nwb\n",
- "Trying to read tsq file.\n",
- "\u001b[1mtsq file not found.\u001b[1m\n",
- "Checking if doric file exists.\n",
- "\u001b[1mDoric file not found.\u001b[1m\n",
+ "Data for event Dv1A fetched and stored.\n",
+ "Reading data for event Dv2A ...\n",
+ "In readtev(), type(S['sampling_rate']) = \n"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "100%|██████████| 29389/29389 [09:31<00:00, 51.38it/s]\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Data for event Dv2A fetched and stored.\n",
+ "Time taken = 920.97965\n",
"### Raw data fetched and saved.\n"
]
}
@@ -131,28 +138,33 @@
},
{
"cell_type": "code",
- "execution_count": 18,
+ "execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
- "Reading NWB file /Volumes/T7/CatalystNeuro/NWB/Lerner/guppy_example_data/sub-112.283_ses-FP_PS_2019-06-20T09-32-04.nwb for event fiber_photometry_response_series to save to /Volumes/T7/CatalystNeuro/NWB/Lerner/guppy_example_data/sub-112.283_ses-FP_PS_2019-06-20T09-32-04_output\n"
+ "Reading all events [0, 1] from NWB file /Volumes/T7/CatalystNeuro/NWB/Lerner/guppy_example_data/sub-112.283_ses-FP_PS_2019-06-20T09-32-04/sub-112.283_ses-FP_PS_2019-06-20T09-32-04.nwb to save to /Volumes/T7/CatalystNeuro/NWB/Lerner/guppy_example_data/sub-112.283_ses-FP_PS_2019-06-20T09-32-04/sub-112.283_ses-FP_PS_2019-06-20T09-32-04_output_0\n",
+ "Data for event event_0 fetched and stored.\n",
+ "Data for event event_1 fetched and stored.\n"
]
}
],
"source": [
- "#readRawData(inputParametersPath)\n",
- "\n",
"from readTevTsq import read_nwb\n",
"import json\n",
+ "from pathlib import Path\n",
+ "inputParametersPath = \"/Users/pauladkisson/GuPPyParamtersUsed.json\"\n",
+ "\n",
"with open(inputParametersPath) as f:\n",
" inputParameters = json.load(f)\n",
- "filepath = inputParameters['folderNames'][0]\n",
- "event = \"fiber_photometry_response_series\"\n",
- "outputPath = \"/Volumes/T7/CatalystNeuro/NWB/Lerner/guppy_example_data/sub-112.283_ses-FP_PS_2019-06-20T09-32-04_output\"\n",
- "read_nwb(filepath=filepath, event=event, outputPath=outputPath)\n"
+ "folder_path = Path(inputParameters['folderNames'][0])\n",
+ "nwbfile_name = folder_path.name + \".nwb\"\n",
+ "filepath = folder_path / nwbfile_name\n",
+ "outputPath = \"/Volumes/T7/CatalystNeuro/NWB/Lerner/guppy_example_data/sub-112.283_ses-FP_PS_2019-06-20T09-32-04/sub-112.283_ses-FP_PS_2019-06-20T09-32-04_output_0\"\n",
+ "indices = [0, 1]\n",
+ "read_nwb(filepath, outputPath, indices)"
]
},
{
@@ -165,7 +177,7 @@
},
{
"cell_type": "code",
- "execution_count": 10,
+ "execution_count": 14,
"metadata": {},
"outputs": [
{
@@ -215,7 +227,7 @@
},
{
"cell_type": "code",
- "execution_count": 11,
+ "execution_count": 15,
"metadata": {},
"outputs": [
{
From f38f6988bc2f4b608c01f7f8156690fde06d34b8 Mon Sep 17 00:00:00 2001
From: pauladkisson
Date: Mon, 15 Jul 2024 10:13:40 -0700
Subject: [PATCH 08/10] generalized read_nwb() to handle different types of
response series
---
GuPPy/readTevTsq.py | 17 +++++++++++------
1 file changed, 11 insertions(+), 6 deletions(-)
diff --git a/GuPPy/readTevTsq.py b/GuPPy/readTevTsq.py
index c3bf0c4..3ab19cd 100755
--- a/GuPPy/readTevTsq.py
+++ b/GuPPy/readTevTsq.py
@@ -548,7 +548,7 @@ def readRawData(inputParametersPath):
insertLog('Raw data fetched and saved.', logging.INFO)
insertLog("#" * 400, logging.INFO)
-def read_nwb(filepath, outputPath, indices):
+def read_nwb(filepath, outputPath, response_series_name, indices, npoints=128):
"""
Read photometry data from an NWB file and save the output to a hdf5 file.
"""
@@ -557,11 +557,16 @@ def read_nwb(filepath, outputPath, indices):
with NWBHDF5IO(filepath, 'r') as io:
nwbfile = io.read()
- fiber_photometry_response_series = nwbfile.acquisition['fiber_photometry_response_series']
+ fiber_photometry_response_series = nwbfile.acquisition[response_series_name]
data = fiber_photometry_response_series.data[:]
- sampling_rate = fiber_photometry_response_series.rate
- timestamps = np.arange(0, data.shape[0]) / sampling_rate
- npoints = 128
+ sampling_rate = getattr(fiber_photometry_response_series, 'rate', default=None)
+ timestamps = getattr(fiber_photometry_response_series, 'timestamps', default=None)
+ if sampling_rate is None and timestamps is not None:
+ sampling_rate = 1 / np.median(np.diff(timestamps))
+ elif timestamps is None and sampling_rate is not None:
+ timestamps = np.arange(0, data.shape[0]) / sampling_rate
+ else:
+ raise Exception(f"Fiber photometry response series {response_series_name} must have rate or timestamps.")
for index in indices:
event = f'event_{index}'
@@ -570,7 +575,7 @@ def read_nwb(filepath, outputPath, indices):
S['sampling_rate'] = sampling_rate
S['timestamps'] = timestamps[::npoints]
S['data'] = data[:, index]
- S['npoints'] = 128
+ S['npoints'] = npoints
S['channels'] = np.ones_like(S['timestamps'])
save_dict_to_hdf5(S, event, outputPath)
From 0837a353c1dd540dd0a42bb15ff051cafdc840f5 Mon Sep 17 00:00:00 2001
From: pauladkisson
Date: Mon, 15 Jul 2024 10:58:43 -0700
Subject: [PATCH 09/10] updated ReadRawData to use read_nwb if nwbfiles are
found
---
GuPPy/readTevTsq.py | 38 ++++++-
GuPPy/runFiberPhotometryAnalysis.ipynb | 144 ++++++++-----------------
2 files changed, 79 insertions(+), 103 deletions(-)
diff --git a/GuPPy/readTevTsq.py b/GuPPy/readTevTsq.py
index 3ab19cd..8d47533 100755
--- a/GuPPy/readTevTsq.py
+++ b/GuPPy/readTevTsq.py
@@ -13,7 +13,7 @@
from numpy import int32, uint32, uint8, uint16, float64, int64, int32, float32
import multiprocessing as mp
from tqdm import tqdm
-from pprint import pprint
+from pathlib import Path
def insertLog(text, level):
file = os.path.join('.','..','guppy.log')
@@ -494,6 +494,8 @@ def readRawData(inputParametersPath):
with open(inputParametersPath) as f:
inputParameters = json.load(f)
+ nwb_response_series_names = inputParameters['nwb_response_series_names']
+ nwb_response_series_indices = inputParameters['nwb_response_series_indices']
folderNames = inputParameters['folderNames']
numProcesses = inputParameters['numberOfCores']
storesListPath = []
@@ -513,6 +515,8 @@ def readRawData(inputParametersPath):
step = 0
for i in range(len(folderNames)):
filepath = folderNames[i]
+ nwb_response_series_name = nwb_response_series_names[i]
+ indices = nwb_response_series_indices[i]
print(filepath)
insertLog(f"### Reading raw data for folder {folderNames[i]}", logging.DEBUG)
storesListPath = glob.glob(os.path.join(filepath, '*_output_*'))
@@ -523,6 +527,8 @@ def readRawData(inputParametersPath):
pass
else:
flag = check_doric(filepath)
+ if flag == 0: # doric file(s) not found
+ flag = check_nwb(filepath)
# read data corresponding to each storename selected by user while saving the storeslist file
for j in range(len(storesListPath)):
@@ -538,6 +544,9 @@ def readRawData(inputParametersPath):
execute_import_doric(filepath, storesList, flag, op)
elif flag=='doric_doric':
execute_import_doric(filepath, storesList, flag, op)
+ elif flag=='nwb':
+ filepath = Path(filepath)
+ read_nwb(filepath, op, nwb_response_series_name, indices)
else:
execute_import_csv(filepath, np.unique(storesList[0,:]), op, numProcesses)
@@ -548,19 +557,38 @@ def readRawData(inputParametersPath):
insertLog('Raw data fetched and saved.', logging.INFO)
insertLog("#" * 400, logging.INFO)
+def check_nwb(filepath):
+ nwbfile_paths = glob.glob(os.path.join(filepath, '*.nwb'))
+ if len(nwbfile_paths) > 1:
+ insertLog('Two nwb files are present at the location.', logging.ERROR)
+ raise Exception('Two nwb files are present at the location.')
+ elif len(nwbfile_paths) == 0:
+ insertLog("\033[1m" + "NWB file not found." + "\033[0m", logging.ERROR)
+ print("\033[1m" + "NWB file not found." + "\033[0m")
+ return 0
+ else:
+ flag = 'nwb'
+ return flag
+
+
def read_nwb(filepath, outputPath, response_series_name, indices, npoints=128):
"""
Read photometry data from an NWB file and save the output to a hdf5 file.
"""
from pynwb import NWBHDF5IO # Dynamic import is necessary since pynwb isn't available in the main environment (python 3.6)
- print(f"Reading all events {indices} from NWB file {filepath} to save to {outputPath}")
+ nwbfilepath = glob.glob(os.path.join(filepath, '*.nwb'))
+ if len(nwbfilepath)>1:
+ raise Exception('Two nwb files are present at the location.')
+ else:
+ nwbfilepath = nwbfilepath[0]
+ print(f"Reading all events {indices} from NWB file {nwbfilepath} to save to {outputPath}")
- with NWBHDF5IO(filepath, 'r') as io:
+ with NWBHDF5IO(nwbfilepath, 'r') as io:
nwbfile = io.read()
fiber_photometry_response_series = nwbfile.acquisition[response_series_name]
data = fiber_photometry_response_series.data[:]
- sampling_rate = getattr(fiber_photometry_response_series, 'rate', default=None)
- timestamps = getattr(fiber_photometry_response_series, 'timestamps', default=None)
+ sampling_rate = getattr(fiber_photometry_response_series, 'rate', None)
+ timestamps = getattr(fiber_photometry_response_series, 'timestamps', None)
if sampling_rate is None and timestamps is not None:
sampling_rate = 1 / np.median(np.diff(timestamps))
elif timestamps is None and sampling_rate is not None:
diff --git a/GuPPy/runFiberPhotometryAnalysis.ipynb b/GuPPy/runFiberPhotometryAnalysis.ipynb
index 3de9d5f..621aadc 100755
--- a/GuPPy/runFiberPhotometryAnalysis.ipynb
+++ b/GuPPy/runFiberPhotometryAnalysis.ipynb
@@ -4,40 +4,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "### Step 1: Import Python Packages"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 12,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "The autoreload extension is already loaded. To reload it, use:\n",
- " %reload_ext autoreload\n"
- ]
- }
- ],
- "source": [
- "%load_ext autoreload\n",
- "%autoreload 2\n",
- "\n",
- "%matplotlib tk\n",
- "import os\n",
- "import json\n",
- "from preprocess import extractTsAndSignal\n",
- "from computePsth import psthForEachStorename\n",
- "from findTransientsFreqAndAmp import executeFindFreqAndAmp"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "### Step 2: Input Parameters GUI\n",
+ "### Step 1: Input Parameters GUI\n",
"\n",
"a) Open a new terminal/anaconda window and navigate to location of code by entering 'cd path_to_code'\n",
"
Example: 'cd Desktop/GuPPy-main/'
\n",
@@ -50,9 +17,16 @@
"f) Do not close GUI browser window for input parameters file saving. To make changes to parameters, simply hit \"Save\" and continue to Step 4. "
]
},
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "For steps 1-3, please use the conda environment defined by guppy_read_env.yaml, especially if using nwb files."
+ ]
+ },
{
"cell_type": "code",
- "execution_count": 13,
+ "execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
@@ -63,7 +37,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "### Step 3: Storenames GUI \n",
+ "### Step 2: Storenames GUI \n",
"\n",
"a) Click Storenames GUI icon
\n",
"b) Select desired storenames to be analyzed
\n",
@@ -78,12 +52,12 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "### Step 4: Read Raw Data"
+ "### Step 3: Read Raw Data"
]
},
{
"cell_type": "code",
- "execution_count": 3,
+ "execution_count": 2,
"metadata": {},
"outputs": [
{
@@ -91,42 +65,14 @@
"output_type": "stream",
"text": [
"### Reading raw data... ###\n",
- "/Volumes/T7/CatalystNeuro/NWB/Lerner/guppy_example_data/Photo_112_283-190620-093542\n",
+ "/Volumes/T7/CatalystNeuro/NWB/Lerner/guppy_example_data/sub-112.283_ses-FP_PS_2019-06-20T09-32-04\n",
"Trying to read tsq file.\n",
- "Data from tsq file fetched.\n",
- "Reading data for event Dv1A ...\n",
- "In readtev(), type(S['sampling_rate']) = \n"
- ]
- },
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "100%|██████████| 29389/29389 [05:47<00:00, 84.64it/s] \n"
- ]
- },
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Data for event Dv1A fetched and stored.\n",
- "Reading data for event Dv2A ...\n",
- "In readtev(), type(S['sampling_rate']) = \n"
- ]
- },
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "100%|██████████| 29389/29389 [09:31<00:00, 51.38it/s]\n"
- ]
- },
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Data for event Dv2A fetched and stored.\n",
- "Time taken = 920.97965\n",
+ "\u001b[1mtsq file not found.\u001b[1m\n",
+ "Checking if doric file exists.\n",
+ "\u001b[1mDoric file not found.\u001b[1m\n",
+ "Reading all events [0, 1] from NWB file /Volumes/T7/CatalystNeuro/NWB/Lerner/guppy_example_data/sub-112.283_ses-FP_PS_2019-06-20T09-32-04/sub-112.283_ses-FP_PS_2019-06-20T09-32-04.nwb to save to /Volumes/T7/CatalystNeuro/NWB/Lerner/guppy_example_data/sub-112.283_ses-FP_PS_2019-06-20T09-32-04/sub-112.283_ses-FP_PS_2019-06-20T09-32-04_output_0\n",
+ "Data for event event_0 fetched and stored.\n",
+ "Data for event event_1 fetched and stored.\n",
"### Raw data fetched and saved.\n"
]
}
@@ -136,35 +82,37 @@
"readRawData(inputParametersPath)"
]
},
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Step 4: Import Python Packages"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "For steps 4-8, please use the main guppy environment installed from the spec_file appropriate for your OS."
+ ]
+ },
{
"cell_type": "code",
- "execution_count": 3,
+ "execution_count": null,
"metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Reading all events [0, 1] from NWB file /Volumes/T7/CatalystNeuro/NWB/Lerner/guppy_example_data/sub-112.283_ses-FP_PS_2019-06-20T09-32-04/sub-112.283_ses-FP_PS_2019-06-20T09-32-04.nwb to save to /Volumes/T7/CatalystNeuro/NWB/Lerner/guppy_example_data/sub-112.283_ses-FP_PS_2019-06-20T09-32-04/sub-112.283_ses-FP_PS_2019-06-20T09-32-04_output_0\n",
- "Data for event event_0 fetched and stored.\n",
- "Data for event event_1 fetched and stored.\n"
- ]
- }
- ],
+ "outputs": [],
"source": [
- "from readTevTsq import read_nwb\n",
+ "%load_ext autoreload\n",
+ "%autoreload 2\n",
+ "\n",
+ "%matplotlib tk\n",
+ "import os\n",
"import json\n",
- "from pathlib import Path\n",
- "inputParametersPath = \"/Users/pauladkisson/GuPPyParamtersUsed.json\"\n",
+ "from preprocess import extractTsAndSignal\n",
+ "from computePsth import psthForEachStorename\n",
+ "from findTransientsFreqAndAmp import executeFindFreqAndAmp\n",
"\n",
- "with open(inputParametersPath) as f:\n",
- " inputParameters = json.load(f)\n",
- "folder_path = Path(inputParameters['folderNames'][0])\n",
- "nwbfile_name = folder_path.name + \".nwb\"\n",
- "filepath = folder_path / nwbfile_name\n",
- "outputPath = \"/Volumes/T7/CatalystNeuro/NWB/Lerner/guppy_example_data/sub-112.283_ses-FP_PS_2019-06-20T09-32-04/sub-112.283_ses-FP_PS_2019-06-20T09-32-04_output_0\"\n",
- "indices = [0, 1]\n",
- "read_nwb(filepath, outputPath, indices)"
+ "inputParametersPath = \"/Users/pauladkisson/GuPPyParamtersUsed.json\""
]
},
{
@@ -177,7 +125,7 @@
},
{
"cell_type": "code",
- "execution_count": 14,
+ "execution_count": 3,
"metadata": {},
"outputs": [
{
@@ -227,7 +175,7 @@
},
{
"cell_type": "code",
- "execution_count": 15,
+ "execution_count": 5,
"metadata": {},
"outputs": [
{
From a935f4c09b51f52140eaec9a59b036947ddad5f7 Mon Sep 17 00:00:00 2001
From: pauladkisson
Date: Mon, 15 Jul 2024 11:27:44 -0700
Subject: [PATCH 10/10] added documentation
---
GuPPy/readTevTsq.py | 41 +++++++++++++++++++++++++++++++++++++++--
1 file changed, 39 insertions(+), 2 deletions(-)
diff --git a/GuPPy/readTevTsq.py b/GuPPy/readTevTsq.py
index 8d47533..5268018 100755
--- a/GuPPy/readTevTsq.py
+++ b/GuPPy/readTevTsq.py
@@ -14,6 +14,7 @@
import multiprocessing as mp
from tqdm import tqdm
from pathlib import Path
+from typing import List
def insertLog(text, level):
file = os.path.join('.','..','guppy.log')
@@ -557,7 +558,25 @@ def readRawData(inputParametersPath):
insertLog('Raw data fetched and saved.', logging.INFO)
insertLog("#" * 400, logging.INFO)
-def check_nwb(filepath):
+def check_nwb(filepath: str):
+ """
+ Check if an NWB file is present at the given location.
+
+ Parameters
+ ----------
+ filepath : str
+ Path to the folder containing the NWB file.
+
+ Returns
+ -------
+ flag : str
+ Flag indicating the presence of an NWB file. If present, the flag is set to 'nwb'. If not present, the flag is set to 0.
+
+ Raises
+ ------
+ Exception
+ If two NWB files are present at the location.
+ """
nwbfile_paths = glob.glob(os.path.join(filepath, '*.nwb'))
if len(nwbfile_paths) > 1:
insertLog('Two nwb files are present at the location.', logging.ERROR)
@@ -571,9 +590,27 @@ def check_nwb(filepath):
return flag
-def read_nwb(filepath, outputPath, response_series_name, indices, npoints=128):
+def read_nwb(filepath: str, outputPath: str, response_series_name: str, indices: List[int], npoints: int = 128):
"""
Read photometry data from an NWB file and save the output to a hdf5 file.
+
+ Parameters
+ ----------
+ filepath : str
+ Path to the folder containing the NWB file.
+ outputPath : str
+ Path to the folder where the output data will be saved.
+ response_series_name : str
+ Name of the response series in the NWB file.
+ indices : List[int]
+ List of indices of the response series to be read.
+ npoints : int, optional
+ Number of points for each chunk. Timestamps are only saved for the first point in each chunk. Default is 128.
+
+ Raises
+ ------
+ Exception
+ If two NWB files are present at the location.
"""
from pynwb import NWBHDF5IO # Dynamic import is necessary since pynwb isn't available in the main environment (python 3.6)
nwbfilepath = glob.glob(os.path.join(filepath, '*.nwb'))