diff --git a/GuPPy/computePsth.py b/GuPPy/computePsth.py
index 3585738..1495f23 100755
--- a/GuPPy/computePsth.py
+++ b/GuPPy/computePsth.py
@@ -657,12 +657,14 @@ def averageForGroup(folderNames, event, inputParameters):
print("Group of data averaged.")
-def psthForEachStorename(inputParameters):
+def psthForEachStorename(inputParametersPath):
print("Computing PSTH, Peak and Area for each event...")
-
- inputParameters = inputParameters
+ with open(inputParametersPath) as f:
+ inputParameters = json.load(f)
+
+ #inputParameters = inputParameters
#storesList = np.genfromtxt(inputParameters['storesListPath'], dtype='str', delimiter=',')
@@ -772,17 +774,17 @@ def psthForEachStorename(inputParameters):
print("PSTH, Area and Peak are computed for all events.")
return inputParameters
-if __name__ == "__main__":
- try:
- inputParameters = psthForEachStorename(json.loads(sys.argv[1]))
- subprocess.call(["python",
- os.path.join(inputParameters["curr_dir"],"GuPPy","findTransientsFreqAndAmp.py"),
- json.dumps(inputParameters)])
- insertLog('#'*400, logging.INFO)
- except Exception as e:
- with open(os.path.join(os.path.expanduser('~'), 'pbSteps.txt'), 'a') as file:
- file.write(str(-1)+"\n")
- insertLog(str(e), logging.ERROR)
- raise e
+# if __name__ == "__main__":
+# try:
+# inputParameters = psthForEachStorename(json.loads(sys.argv[1]))
+# subprocess.call(["python",
+# os.path.join(inputParameters["curr_dir"],"GuPPy","findTransientsFreqAndAmp.py"),
+# json.dumps(inputParameters)])
+# insertLog('#'*400, logging.INFO)
+# except Exception as e:
+# with open(os.path.join(os.path.expanduser('~'), 'pbSteps.txt'), 'a') as file:
+# file.write(str(-1)+"\n")
+# insertLog(str(e), logging.ERROR)
+# raise e
diff --git a/GuPPy/findTransientsFreqAndAmp.py b/GuPPy/findTransientsFreqAndAmp.py
index 7cbab39..9fc12f0 100755
--- a/GuPPy/findTransientsFreqAndAmp.py
+++ b/GuPPy/findTransientsFreqAndAmp.py
@@ -185,7 +185,7 @@ def visuzlize_peaks(filepath, z_score, timestamps, peaksIndex):
timestamps[peaksIndex], z_score[peaksIndex], 'o')
ax.set_title(basename)
fig.suptitle(os.path.basename(dirname))
- #plt.show()
+ plt.show()
def findFreqAndAmp(filepath, inputParameters, window=15, numProcesses=mp.cpu_count()):
@@ -354,7 +354,7 @@ def executeFindFreqAndAmp(inputParameters):
findFreqAndAmp(filepath, inputParameters, window=moving_window, numProcesses=numProcesses)
writeToFile(str(10+((inputParameters['step']+1)*10))+'\n')
inputParameters['step'] += 1
- plt.show()
+ #plt.show()
else:
for i in range(len(folderNames)):
insertLog(f"Finding transients in z-score data of {folderNames[i]} and calculating frequency and amplitude.",
@@ -368,19 +368,19 @@ def executeFindFreqAndAmp(inputParameters):
writeToFile(str(10+((inputParameters['step']+1)*10))+'\n')
inputParameters['step'] += 1
insertLog('Transients in z-score data found and frequency and amplitude are calculated.', logging.INFO)
- plt.show()
+ #plt.show()
print('Transients in z-score data found and frequency and amplitude are calculated.')
-if __name__ == "__main__":
- try:
- executeFindFreqAndAmp(json.loads(sys.argv[1]))
- insertLog('#'*400, logging.INFO)
- except Exception as e:
- with open(os.path.join(os.path.expanduser('~'), 'pbSteps.txt'), 'a') as file:
- file.write(str(-1)+"\n")
- insertLog(str(e), logging.ERROR)
- raise e
+# if __name__ == "__main__":
+# try:
+# executeFindFreqAndAmp(json.loads(sys.argv[1]))
+# insertLog('#'*400, logging.INFO)
+# except Exception as e:
+# with open(os.path.join(os.path.expanduser('~'), 'pbSteps.txt'), 'a') as file:
+# file.write(str(-1)+"\n")
+# insertLog(str(e), logging.ERROR)
+# raise e
diff --git a/GuPPy/preprocess.py b/GuPPy/preprocess.py
index 916d114..ab5bf55 100755
--- a/GuPPy/preprocess.py
+++ b/GuPPy/preprocess.py
@@ -16,7 +16,7 @@
import matplotlib.pyplot as plt
from matplotlib.widgets import MultiCursor
from combineDataFn import processTimestampsForCombiningData
-plt.switch_backend('TKAgg')
+#plt.switch_backend('TKAgg')
def takeOnlyDirs(paths):
removePaths = []
@@ -176,8 +176,9 @@ def add_control_channel(filepath, arr):
return arr
# check if dealing with TDT files or csv files
+# NWB files are treated like TDT files
def check_TDT(filepath):
- path = glob.glob(os.path.join(filepath, '*.tsq'))
+ path = glob.glob(os.path.join(filepath, '*.tsq')) + glob.glob(os.path.join(filepath, '*.nwb'))
if len(path)>0:
return True
else:
@@ -458,7 +459,7 @@ def visualize_z_score(filepath):
ax.plot(x,y)
ax.set_title(basename)
fig.suptitle(name)
- #plt.show()
+ plt.show()
# function to plot deltaF/F
def visualize_dff(filepath):
@@ -478,7 +479,7 @@ def visualize_dff(filepath):
ax.plot(x,y)
ax.set_title(basename)
fig.suptitle(name)
- #plt.show()
+ plt.show()
@@ -574,7 +575,7 @@ def plt_close_event(event):
cid = fig.canvas.mpl_connect('close_event', plt_close_event)
#multi = MultiCursor(fig.canvas, (ax1, ax2), color='g', lw=1, horizOn=False, vertOn=True)
- #plt.show()
+ plt.show()
#return fig
# function to plot control and signal, also provide a feature to select chunks for artifacts removal
@@ -1176,16 +1177,18 @@ def execute_zscore(folderNames, inputParameters):
writeToFile(str(10+((inputParameters['step']+1)*10))+'\n')
inputParameters['step'] += 1
- plt.show()
+ #plt.show()
insertLog("Signal data and event timestamps are extracted.", logging.INFO)
print("Signal data and event timestamps are extracted.")
-def extractTsAndSignal(inputParameters):
+def extractTsAndSignal(inputParametersPath):
print("Extracting signal data and event timestamps...")
insertLog("Extracting signal data and event timestamps", logging.DEBUG)
- inputParameters = inputParameters
+
+ with open(inputParametersPath) as f:
+ inputParameters = json.load(f)
#storesList = np.genfromtxt(inputParameters['storesListPath'], dtype='str', delimiter=',')
@@ -1224,15 +1227,15 @@ def extractTsAndSignal(inputParameters):
-if __name__ == "__main__":
- try:
- extractTsAndSignal(json.loads(sys.argv[1]))
- insertLog('#'*400, logging.INFO)
- except Exception as e:
- with open(os.path.join(os.path.expanduser('~'), 'pbSteps.txt'), 'a') as file:
- file.write(str(-1)+"\n")
- insertLog(str(e), logging.ERROR)
- raise e
+# if __name__ == "__main__":
+# try:
+# extractTsAndSignal(json.loads(sys.argv[1]))
+# insertLog('#'*400, logging.INFO)
+# except Exception as e:
+# with open(os.path.join(os.path.expanduser('~'), 'pbSteps.txt'), 'a') as file:
+# file.write(str(-1)+"\n")
+# insertLog(str(e), logging.ERROR)
+# raise e
diff --git a/GuPPy/readTevTsq.py b/GuPPy/readTevTsq.py
index e5defe2..6e7631b 100755
--- a/GuPPy/readTevTsq.py
+++ b/GuPPy/readTevTsq.py
@@ -12,6 +12,9 @@
import pandas as pd
from numpy import int32, uint32, uint8, uint16, float64, int64, int32, float32
import multiprocessing as mp
+from tqdm import tqdm
+from pathlib import Path
+from typing import List
def takeOnlyDirs(paths):
removePaths = []
@@ -329,7 +332,7 @@ def readtev(data, filepath, event, outputPath):
if formatNew != 5:
nsample = (data_size[first_row,]-10)*int(table[formatNew, 2])
S['data'] = np.zeros((len(fp_loc), nsample))
- for i in range(0, len(fp_loc)):
+ for i in tqdm(range(0, len(fp_loc))):
with open(tevfilepath, 'rb') as fp:
fp.seek(fp_loc[i], os.SEEK_SET)
S['data'][i,:] = np.fromfile(fp, dtype=table[formatNew, 3], count=nsample).reshape(1, nsample, order='F')
@@ -489,13 +492,17 @@ def execute_import_doric(filepath, storesList, flag, outputPath):
# function to read data from 'tsq' and 'tev' files
-def readRawData(inputParameters):
+def readRawData(inputParametersPath):
print('### Reading raw data... ###')
insertLog('### Reading raw data... ###', logging.DEBUG)
# get input parameters
- inputParameters = inputParameters
+ with open(inputParametersPath) as f:
+ inputParameters = json.load(f)
+
+ nwb_response_series_names = inputParameters['nwb_response_series_names']
+ nwb_response_series_indices = inputParameters['nwb_response_series_indices']
folderNames = inputParameters['folderNames']
numProcesses = inputParameters['numberOfCores']
storesListPath = []
@@ -515,6 +522,8 @@ def readRawData(inputParameters):
step = 0
for i in range(len(folderNames)):
filepath = folderNames[i]
+ nwb_response_series_name = nwb_response_series_names[i]
+ indices = nwb_response_series_indices[i]
print(filepath)
insertLog(f"### Reading raw data for folder {folderNames[i]}", logging.DEBUG)
storesListPath = takeOnlyDirs(glob.glob(os.path.join(filepath, '*_output_*')))
@@ -525,6 +534,8 @@ def readRawData(inputParameters):
pass
else:
flag = check_doric(filepath)
+ if flag == 0: # doric file(s) not found
+ flag = check_nwb(filepath)
# read data corresponding to each storename selected by user while saving the storeslist file
for j in range(len(storesListPath)):
@@ -540,6 +551,9 @@ def readRawData(inputParameters):
execute_import_doric(filepath, storesList, flag, op)
elif flag=='doric_doric':
execute_import_doric(filepath, storesList, flag, op)
+ elif flag=='nwb':
+ filepath = Path(filepath)
+ read_nwb(filepath, op, nwb_response_series_name, indices)
else:
execute_import_csv(filepath, np.unique(storesList[0,:]), op, numProcesses)
@@ -550,14 +564,105 @@ def readRawData(inputParameters):
insertLog('Raw data fetched and saved.', logging.INFO)
insertLog("#" * 400, logging.INFO)
-if __name__ == "__main__":
- print('run')
- try:
- readRawData(json.loads(sys.argv[1]))
- insertLog('#'*400, logging.INFO)
- except Exception as e:
- with open(os.path.join(os.path.expanduser('~'), 'pbSteps.txt'), 'a') as file:
- file.write(str(-1)+"\n")
- insertLog(f"An error occurred: {e}", logging.ERROR)
- raise e
+def check_nwb(filepath: str):
+ """
+ Check if an NWB file is present at the given location.
+
+ Parameters
+ ----------
+ filepath : str
+ Path to the folder containing the NWB file.
+
+ Returns
+ -------
+ flag : str
+ Flag indicating the presence of an NWB file. If present, the flag is set to 'nwb'. If not present, the flag is set to 0.
+
+ Raises
+ ------
+ Exception
+ If two NWB files are present at the location.
+ """
+ nwbfile_paths = glob.glob(os.path.join(filepath, '*.nwb'))
+ if len(nwbfile_paths) > 1:
+ insertLog('Two nwb files are present at the location.', logging.ERROR)
+ raise Exception('Two nwb files are present at the location.')
+ elif len(nwbfile_paths) == 0:
+ insertLog("\033[1m" + "NWB file not found." + "\033[0m", logging.ERROR)
+ print("\033[1m" + "NWB file not found." + "\033[0m")
+ return 0
+ else:
+ flag = 'nwb'
+ return flag
+
+
+def read_nwb(filepath: str, outputPath: str, response_series_name: str, indices: List[int], npoints: int = 128):
+ """
+ Read photometry data from an NWB file and save the output to a hdf5 file.
+
+ Parameters
+ ----------
+ filepath : str
+ Path to the folder containing the NWB file.
+ outputPath : str
+ Path to the folder where the output data will be saved.
+ response_series_name : str
+ Name of the response series in the NWB file.
+ indices : List[int]
+ List of indices of the response series to be read.
+ npoints : int, optional
+ Number of points for each chunk. Timestamps are only saved for the first point in each chunk. Default is 128.
+
+ Raises
+ ------
+ Exception
+ If two NWB files are present at the location.
+ """
+ from pynwb import NWBHDF5IO # Dynamic import is necessary since pynwb isn't available in the main environment (python 3.6)
+ nwbfilepath = glob.glob(os.path.join(filepath, '*.nwb'))
+ if len(nwbfilepath)>1:
+ raise Exception('Two nwb files are present at the location.')
+ else:
+ nwbfilepath = nwbfilepath[0]
+ print(f"Reading all events {indices} from NWB file {nwbfilepath} to save to {outputPath}")
+
+ with NWBHDF5IO(nwbfilepath, 'r') as io:
+ nwbfile = io.read()
+ fiber_photometry_response_series = nwbfile.acquisition[response_series_name]
+ data = fiber_photometry_response_series.data[:]
+ sampling_rate = getattr(fiber_photometry_response_series, 'rate', None)
+ timestamps = getattr(fiber_photometry_response_series, 'timestamps', None)
+ if sampling_rate is None and timestamps is not None:
+ sampling_rate = 1 / np.median(np.diff(timestamps))
+ elif timestamps is None and sampling_rate is not None:
+ timestamps = np.arange(0, data.shape[0]) / sampling_rate
+ else:
+ raise Exception(f"Fiber photometry response series {response_series_name} must have rate or timestamps.")
+
+ for index in indices:
+ event = f'event_{index}'
+ S = {}
+ S['storename'] = str(event)
+ S['sampling_rate'] = sampling_rate
+ S['timestamps'] = timestamps[::npoints]
+ S['data'] = data[:, index]
+ S['npoints'] = npoints
+ S['channels'] = np.ones_like(S['timestamps'])
+
+ save_dict_to_hdf5(S, event, outputPath)
+ check_data(S, filepath, event, outputPath)
+ print("Data for event {} fetched and stored.".format(event))
+ insertLog("Data for event {} fetched and stored.".format(event), logging.INFO)
+
+
+# if __name__ == "__main__":
+# print('run')
+# try:
+# readRawData(json.loads(sys.argv[1]))
+# insertLog('#'*400, logging.INFO)
+# except Exception as e:
+# with open(os.path.join(os.path.expanduser('~'), 'pbSteps.txt'), 'a') as file:
+# file.write(str(-1)+"\n")
+# insertLog(f"An error occurred: {e}", logging.ERROR)
+# raise e
diff --git a/GuPPy/runFiberPhotometryAnalysis.ipynb b/GuPPy/runFiberPhotometryAnalysis.ipynb
index ef5828c..621aadc 100755
--- a/GuPPy/runFiberPhotometryAnalysis.ipynb
+++ b/GuPPy/runFiberPhotometryAnalysis.ipynb
@@ -4,36 +4,12 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "### Step 1: Import Python Packages"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "%load_ext autoreload\n",
- "%autoreload 2\n",
- "\n",
- "%matplotlib\n",
- "import os\n",
- "from readTevTsq import readRawData\n",
- "from preprocess import extractTsAndSignal\n",
- "from computePsth import psthForEachStorename\n",
- "from findTransientsFreqAndAmp import executeFindFreqAndAmp"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "### Step 2: Input Parameters GUI\n",
+ "### Step 1: Input Parameters GUI\n",
"\n",
"a) Open a new terminal/anaconda window and navigate to location of code by entering 'cd path_to_code'\n",
- "
Example: 'cd Desktop/GuPPy-main/GuPPy'
\n",
+ "
Example: 'cd Desktop/GuPPy-main/'
\n",
"b) Execute the following command to open GUI\n",
- "
panel serve --show savingInputParameters.ipynb\n", + "
panel serve --show GuPPy/savingInputParameters.ipynb\n", "c) Navigate to data location (using down arrow) and select one or more folders to analyze