diff --git a/GuPPy/computePsth.py b/GuPPy/computePsth.py index 3585738..1495f23 100755 --- a/GuPPy/computePsth.py +++ b/GuPPy/computePsth.py @@ -657,12 +657,14 @@ def averageForGroup(folderNames, event, inputParameters): print("Group of data averaged.") -def psthForEachStorename(inputParameters): +def psthForEachStorename(inputParametersPath): print("Computing PSTH, Peak and Area for each event...") - - inputParameters = inputParameters + with open(inputParametersPath) as f: + inputParameters = json.load(f) + + #inputParameters = inputParameters #storesList = np.genfromtxt(inputParameters['storesListPath'], dtype='str', delimiter=',') @@ -772,17 +774,17 @@ def psthForEachStorename(inputParameters): print("PSTH, Area and Peak are computed for all events.") return inputParameters -if __name__ == "__main__": - try: - inputParameters = psthForEachStorename(json.loads(sys.argv[1])) - subprocess.call(["python", - os.path.join(inputParameters["curr_dir"],"GuPPy","findTransientsFreqAndAmp.py"), - json.dumps(inputParameters)]) - insertLog('#'*400, logging.INFO) - except Exception as e: - with open(os.path.join(os.path.expanduser('~'), 'pbSteps.txt'), 'a') as file: - file.write(str(-1)+"\n") - insertLog(str(e), logging.ERROR) - raise e +# if __name__ == "__main__": +# try: +# inputParameters = psthForEachStorename(json.loads(sys.argv[1])) +# subprocess.call(["python", +# os.path.join(inputParameters["curr_dir"],"GuPPy","findTransientsFreqAndAmp.py"), +# json.dumps(inputParameters)]) +# insertLog('#'*400, logging.INFO) +# except Exception as e: +# with open(os.path.join(os.path.expanduser('~'), 'pbSteps.txt'), 'a') as file: +# file.write(str(-1)+"\n") +# insertLog(str(e), logging.ERROR) +# raise e diff --git a/GuPPy/findTransientsFreqAndAmp.py b/GuPPy/findTransientsFreqAndAmp.py index 7cbab39..9fc12f0 100755 --- a/GuPPy/findTransientsFreqAndAmp.py +++ b/GuPPy/findTransientsFreqAndAmp.py @@ -185,7 +185,7 @@ def visuzlize_peaks(filepath, z_score, timestamps, peaksIndex): timestamps[peaksIndex], z_score[peaksIndex], 'o') ax.set_title(basename) fig.suptitle(os.path.basename(dirname)) - #plt.show() + plt.show() def findFreqAndAmp(filepath, inputParameters, window=15, numProcesses=mp.cpu_count()): @@ -354,7 +354,7 @@ def executeFindFreqAndAmp(inputParameters): findFreqAndAmp(filepath, inputParameters, window=moving_window, numProcesses=numProcesses) writeToFile(str(10+((inputParameters['step']+1)*10))+'\n') inputParameters['step'] += 1 - plt.show() + #plt.show() else: for i in range(len(folderNames)): insertLog(f"Finding transients in z-score data of {folderNames[i]} and calculating frequency and amplitude.", @@ -368,19 +368,19 @@ def executeFindFreqAndAmp(inputParameters): writeToFile(str(10+((inputParameters['step']+1)*10))+'\n') inputParameters['step'] += 1 insertLog('Transients in z-score data found and frequency and amplitude are calculated.', logging.INFO) - plt.show() + #plt.show() print('Transients in z-score data found and frequency and amplitude are calculated.') -if __name__ == "__main__": - try: - executeFindFreqAndAmp(json.loads(sys.argv[1])) - insertLog('#'*400, logging.INFO) - except Exception as e: - with open(os.path.join(os.path.expanduser('~'), 'pbSteps.txt'), 'a') as file: - file.write(str(-1)+"\n") - insertLog(str(e), logging.ERROR) - raise e +# if __name__ == "__main__": +# try: +# executeFindFreqAndAmp(json.loads(sys.argv[1])) +# insertLog('#'*400, logging.INFO) +# except Exception as e: +# with open(os.path.join(os.path.expanduser('~'), 'pbSteps.txt'), 'a') as file: +# file.write(str(-1)+"\n") +# insertLog(str(e), logging.ERROR) +# raise e diff --git a/GuPPy/preprocess.py b/GuPPy/preprocess.py index 916d114..ab5bf55 100755 --- a/GuPPy/preprocess.py +++ b/GuPPy/preprocess.py @@ -16,7 +16,7 @@ import matplotlib.pyplot as plt from matplotlib.widgets import MultiCursor from combineDataFn import processTimestampsForCombiningData -plt.switch_backend('TKAgg') +#plt.switch_backend('TKAgg') def takeOnlyDirs(paths): removePaths = [] @@ -176,8 +176,9 @@ def add_control_channel(filepath, arr): return arr # check if dealing with TDT files or csv files +# NWB files are treated like TDT files def check_TDT(filepath): - path = glob.glob(os.path.join(filepath, '*.tsq')) + path = glob.glob(os.path.join(filepath, '*.tsq')) + glob.glob(os.path.join(filepath, '*.nwb')) if len(path)>0: return True else: @@ -458,7 +459,7 @@ def visualize_z_score(filepath): ax.plot(x,y) ax.set_title(basename) fig.suptitle(name) - #plt.show() + plt.show() # function to plot deltaF/F def visualize_dff(filepath): @@ -478,7 +479,7 @@ def visualize_dff(filepath): ax.plot(x,y) ax.set_title(basename) fig.suptitle(name) - #plt.show() + plt.show() @@ -574,7 +575,7 @@ def plt_close_event(event): cid = fig.canvas.mpl_connect('close_event', plt_close_event) #multi = MultiCursor(fig.canvas, (ax1, ax2), color='g', lw=1, horizOn=False, vertOn=True) - #plt.show() + plt.show() #return fig # function to plot control and signal, also provide a feature to select chunks for artifacts removal @@ -1176,16 +1177,18 @@ def execute_zscore(folderNames, inputParameters): writeToFile(str(10+((inputParameters['step']+1)*10))+'\n') inputParameters['step'] += 1 - plt.show() + #plt.show() insertLog("Signal data and event timestamps are extracted.", logging.INFO) print("Signal data and event timestamps are extracted.") -def extractTsAndSignal(inputParameters): +def extractTsAndSignal(inputParametersPath): print("Extracting signal data and event timestamps...") insertLog("Extracting signal data and event timestamps", logging.DEBUG) - inputParameters = inputParameters + + with open(inputParametersPath) as f: + inputParameters = json.load(f) #storesList = np.genfromtxt(inputParameters['storesListPath'], dtype='str', delimiter=',') @@ -1224,15 +1227,15 @@ def extractTsAndSignal(inputParameters): -if __name__ == "__main__": - try: - extractTsAndSignal(json.loads(sys.argv[1])) - insertLog('#'*400, logging.INFO) - except Exception as e: - with open(os.path.join(os.path.expanduser('~'), 'pbSteps.txt'), 'a') as file: - file.write(str(-1)+"\n") - insertLog(str(e), logging.ERROR) - raise e +# if __name__ == "__main__": +# try: +# extractTsAndSignal(json.loads(sys.argv[1])) +# insertLog('#'*400, logging.INFO) +# except Exception as e: +# with open(os.path.join(os.path.expanduser('~'), 'pbSteps.txt'), 'a') as file: +# file.write(str(-1)+"\n") +# insertLog(str(e), logging.ERROR) +# raise e diff --git a/GuPPy/readTevTsq.py b/GuPPy/readTevTsq.py index e5defe2..6e7631b 100755 --- a/GuPPy/readTevTsq.py +++ b/GuPPy/readTevTsq.py @@ -12,6 +12,9 @@ import pandas as pd from numpy import int32, uint32, uint8, uint16, float64, int64, int32, float32 import multiprocessing as mp +from tqdm import tqdm +from pathlib import Path +from typing import List def takeOnlyDirs(paths): removePaths = [] @@ -329,7 +332,7 @@ def readtev(data, filepath, event, outputPath): if formatNew != 5: nsample = (data_size[first_row,]-10)*int(table[formatNew, 2]) S['data'] = np.zeros((len(fp_loc), nsample)) - for i in range(0, len(fp_loc)): + for i in tqdm(range(0, len(fp_loc))): with open(tevfilepath, 'rb') as fp: fp.seek(fp_loc[i], os.SEEK_SET) S['data'][i,:] = np.fromfile(fp, dtype=table[formatNew, 3], count=nsample).reshape(1, nsample, order='F') @@ -489,13 +492,17 @@ def execute_import_doric(filepath, storesList, flag, outputPath): # function to read data from 'tsq' and 'tev' files -def readRawData(inputParameters): +def readRawData(inputParametersPath): print('### Reading raw data... ###') insertLog('### Reading raw data... ###', logging.DEBUG) # get input parameters - inputParameters = inputParameters + with open(inputParametersPath) as f: + inputParameters = json.load(f) + + nwb_response_series_names = inputParameters['nwb_response_series_names'] + nwb_response_series_indices = inputParameters['nwb_response_series_indices'] folderNames = inputParameters['folderNames'] numProcesses = inputParameters['numberOfCores'] storesListPath = [] @@ -515,6 +522,8 @@ def readRawData(inputParameters): step = 0 for i in range(len(folderNames)): filepath = folderNames[i] + nwb_response_series_name = nwb_response_series_names[i] + indices = nwb_response_series_indices[i] print(filepath) insertLog(f"### Reading raw data for folder {folderNames[i]}", logging.DEBUG) storesListPath = takeOnlyDirs(glob.glob(os.path.join(filepath, '*_output_*'))) @@ -525,6 +534,8 @@ def readRawData(inputParameters): pass else: flag = check_doric(filepath) + if flag == 0: # doric file(s) not found + flag = check_nwb(filepath) # read data corresponding to each storename selected by user while saving the storeslist file for j in range(len(storesListPath)): @@ -540,6 +551,9 @@ def readRawData(inputParameters): execute_import_doric(filepath, storesList, flag, op) elif flag=='doric_doric': execute_import_doric(filepath, storesList, flag, op) + elif flag=='nwb': + filepath = Path(filepath) + read_nwb(filepath, op, nwb_response_series_name, indices) else: execute_import_csv(filepath, np.unique(storesList[0,:]), op, numProcesses) @@ -550,14 +564,105 @@ def readRawData(inputParameters): insertLog('Raw data fetched and saved.', logging.INFO) insertLog("#" * 400, logging.INFO) -if __name__ == "__main__": - print('run') - try: - readRawData(json.loads(sys.argv[1])) - insertLog('#'*400, logging.INFO) - except Exception as e: - with open(os.path.join(os.path.expanduser('~'), 'pbSteps.txt'), 'a') as file: - file.write(str(-1)+"\n") - insertLog(f"An error occurred: {e}", logging.ERROR) - raise e +def check_nwb(filepath: str): + """ + Check if an NWB file is present at the given location. + + Parameters + ---------- + filepath : str + Path to the folder containing the NWB file. + + Returns + ------- + flag : str + Flag indicating the presence of an NWB file. If present, the flag is set to 'nwb'. If not present, the flag is set to 0. + + Raises + ------ + Exception + If two NWB files are present at the location. + """ + nwbfile_paths = glob.glob(os.path.join(filepath, '*.nwb')) + if len(nwbfile_paths) > 1: + insertLog('Two nwb files are present at the location.', logging.ERROR) + raise Exception('Two nwb files are present at the location.') + elif len(nwbfile_paths) == 0: + insertLog("\033[1m" + "NWB file not found." + "\033[0m", logging.ERROR) + print("\033[1m" + "NWB file not found." + "\033[0m") + return 0 + else: + flag = 'nwb' + return flag + + +def read_nwb(filepath: str, outputPath: str, response_series_name: str, indices: List[int], npoints: int = 128): + """ + Read photometry data from an NWB file and save the output to a hdf5 file. + + Parameters + ---------- + filepath : str + Path to the folder containing the NWB file. + outputPath : str + Path to the folder where the output data will be saved. + response_series_name : str + Name of the response series in the NWB file. + indices : List[int] + List of indices of the response series to be read. + npoints : int, optional + Number of points for each chunk. Timestamps are only saved for the first point in each chunk. Default is 128. + + Raises + ------ + Exception + If two NWB files are present at the location. + """ + from pynwb import NWBHDF5IO # Dynamic import is necessary since pynwb isn't available in the main environment (python 3.6) + nwbfilepath = glob.glob(os.path.join(filepath, '*.nwb')) + if len(nwbfilepath)>1: + raise Exception('Two nwb files are present at the location.') + else: + nwbfilepath = nwbfilepath[0] + print(f"Reading all events {indices} from NWB file {nwbfilepath} to save to {outputPath}") + + with NWBHDF5IO(nwbfilepath, 'r') as io: + nwbfile = io.read() + fiber_photometry_response_series = nwbfile.acquisition[response_series_name] + data = fiber_photometry_response_series.data[:] + sampling_rate = getattr(fiber_photometry_response_series, 'rate', None) + timestamps = getattr(fiber_photometry_response_series, 'timestamps', None) + if sampling_rate is None and timestamps is not None: + sampling_rate = 1 / np.median(np.diff(timestamps)) + elif timestamps is None and sampling_rate is not None: + timestamps = np.arange(0, data.shape[0]) / sampling_rate + else: + raise Exception(f"Fiber photometry response series {response_series_name} must have rate or timestamps.") + + for index in indices: + event = f'event_{index}' + S = {} + S['storename'] = str(event) + S['sampling_rate'] = sampling_rate + S['timestamps'] = timestamps[::npoints] + S['data'] = data[:, index] + S['npoints'] = npoints + S['channels'] = np.ones_like(S['timestamps']) + + save_dict_to_hdf5(S, event, outputPath) + check_data(S, filepath, event, outputPath) + print("Data for event {} fetched and stored.".format(event)) + insertLog("Data for event {} fetched and stored.".format(event), logging.INFO) + + +# if __name__ == "__main__": +# print('run') +# try: +# readRawData(json.loads(sys.argv[1])) +# insertLog('#'*400, logging.INFO) +# except Exception as e: +# with open(os.path.join(os.path.expanduser('~'), 'pbSteps.txt'), 'a') as file: +# file.write(str(-1)+"\n") +# insertLog(f"An error occurred: {e}", logging.ERROR) +# raise e diff --git a/GuPPy/runFiberPhotometryAnalysis.ipynb b/GuPPy/runFiberPhotometryAnalysis.ipynb index ef5828c..621aadc 100755 --- a/GuPPy/runFiberPhotometryAnalysis.ipynb +++ b/GuPPy/runFiberPhotometryAnalysis.ipynb @@ -4,36 +4,12 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Step 1: Import Python Packages" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%load_ext autoreload\n", - "%autoreload 2\n", - "\n", - "%matplotlib\n", - "import os\n", - "from readTevTsq import readRawData\n", - "from preprocess import extractTsAndSignal\n", - "from computePsth import psthForEachStorename\n", - "from findTransientsFreqAndAmp import executeFindFreqAndAmp" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Step 2: Input Parameters GUI\n", + "### Step 1: Input Parameters GUI\n", "\n", "a) Open a new terminal/anaconda window and navigate to location of code by entering 'cd path_to_code'\n", - "
Example: 'cd Desktop/GuPPy-main/GuPPy'
\n", + "
Example: 'cd Desktop/GuPPy-main/'
\n", "b) Execute the following command to open GUI\n", - "
panel serve --show savingInputParameters.ipynb
\n", + "
panel serve --show GuPPy/savingInputParameters.ipynb
\n", "c) Navigate to data location (using down arrow) and select one or more folders to analyze
\n", "d) Select appropriate options and save to file by clicking on 'Save' button
\n", "Note: removeArtifacts should be set to 'False' initially
\n", @@ -41,20 +17,27 @@ "f) Do not close GUI browser window for input parameters file saving. To make changes to parameters, simply hit \"Save\" and continue to Step 4. " ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "For steps 1-3, please use the conda environment defined by guppy_read_env.yaml, especially if using nwb files." + ] + }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ - "inputParameters = \"/Users/VENUS/Downloads/FP_Data/habitEarly/inputParameters/inputParameters.json\" " + "inputParametersPath = \"/Users/pauladkisson/GuPPyParamtersUsed.json\"" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### Step 3: Storenames GUI \n", + "### Step 2: Storenames GUI \n", "\n", "a) Click Storenames GUI icon
\n", "b) Select desired storenames to be analyzed
\n", @@ -69,7 +52,48 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Step 4: Read Raw Data" + "### Step 3: Read Raw Data" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "### Reading raw data... ###\n", + "/Volumes/T7/CatalystNeuro/NWB/Lerner/guppy_example_data/sub-112.283_ses-FP_PS_2019-06-20T09-32-04\n", + "Trying to read tsq file.\n", + "\u001b[1mtsq file not found.\u001b[1m\n", + "Checking if doric file exists.\n", + "\u001b[1mDoric file not found.\u001b[1m\n", + "Reading all events [0, 1] from NWB file /Volumes/T7/CatalystNeuro/NWB/Lerner/guppy_example_data/sub-112.283_ses-FP_PS_2019-06-20T09-32-04/sub-112.283_ses-FP_PS_2019-06-20T09-32-04.nwb to save to /Volumes/T7/CatalystNeuro/NWB/Lerner/guppy_example_data/sub-112.283_ses-FP_PS_2019-06-20T09-32-04/sub-112.283_ses-FP_PS_2019-06-20T09-32-04_output_0\n", + "Data for event event_0 fetched and stored.\n", + "Data for event event_1 fetched and stored.\n", + "### Raw data fetched and saved.\n" + ] + } + ], + "source": [ + "from readTevTsq import readRawData\n", + "readRawData(inputParametersPath)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Step 4: Import Python Packages" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "For steps 4-8, please use the main guppy environment installed from the spec_file appropriate for your OS." ] }, { @@ -78,7 +102,17 @@ "metadata": {}, "outputs": [], "source": [ - "readRawData(inputParameters)" + "%load_ext autoreload\n", + "%autoreload 2\n", + "\n", + "%matplotlib tk\n", + "import os\n", + "import json\n", + "from preprocess import extractTsAndSignal\n", + "from computePsth import psthForEachStorename\n", + "from findTransientsFreqAndAmp import executeFindFreqAndAmp\n", + "\n", + "inputParametersPath = \"/Users/pauladkisson/GuPPyParamtersUsed.json\"" ] }, { @@ -91,11 +125,32 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Extracting signal data and event timestamps...\n", + "Remove Artifacts : False\n", + "Combine Data : False\n", + "Isosbestic Control Channel : True\n", + "Correcting timestamps by getting rid of the first 1 seconds and convert timestamps to seconds...\n", + "Timestamps corrected and converted to seconds.\n", + "Applying correction of timestamps to the data and event timestamps...\n", + "Timestamps corrections applied to the data and event timestamps.\n", + "Applying correction of timestamps to the data and event timestamps...\n", + "Timestamps corrections applied to the data and event timestamps.\n", + "Computing z-score for each of the data...\n", + "Remove Artifacts : False\n", + "z-score for the data computed.\n", + "Signal data and event timestamps are extracted.\n" + ] + } + ], "source": [ - "extractTsAndSignal(inputParameters)" + "extractTsAndSignal(inputParametersPath)" ] }, { @@ -120,12 +175,28 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Computing PSTH, Peak and Area for each event...\n", + "Average for group : False\n", + "PSTH, Area and Peak are computed for all events.\n", + "Finding transients in z-score data and calculating frequency and amplitude....\n", + "Calculating frequency and amplitude of transients in z-score data....\n", + "Creating chunks for multiprocessing...\n", + "Chunks are created for multiprocessing.\n", + "Frequency and amplitude of transients in z_score data are calculated.\n", + "Transients in z-score data found and frequency and amplitude are calculated.\n" + ] + } + ], "source": [ - "psthForEachStorename(inputParameters)\n", - "executeFindFreqAndAmp(inputParameters)" + "ip = psthForEachStorename(inputParametersPath)\n", + "executeFindFreqAndAmp(ip)" ] }, { @@ -152,21 +223,43 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "metadata": {}, "outputs": [], "source": [ - "inputParameters_group = \"/Users/VENUS/Downloads/FP_Data/T1FAM/inputParameters/inputParameters.json\" " + "inputParametersPath_group = \"/Users/vns0170/GuPPyParamtersUsed.json\" " ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Computing PSTH, Peak and Area for each event...\n", + "Average for group : True\n", + "Averaging group of data...\n", + "Group of data averaged.\n", + "Averaging group of data...\n", + "Group of data averaged.\n", + "Averaging group of data...\n", + "Group of data averaged.\n", + "Averaging group of data...\n", + "Group of data averaged.\n", + "PSTH, Area and Peak are computed for all events.\n", + "Finding transients in z-score data and calculating frequency and amplitude....\n", + "Combining results for frequency and amplitude of transients in z-score data...\n", + "Results for frequency and amplitude of transients in z-score data are combined.\n", + "Transients in z-score data found and frequency and amplitude are calculated.\n" + ] + } + ], "source": [ - "psthForEachStorename(inputParameters_group)\n", - "executeFindFreqAndAmp(inputParameters_group)" + "ip = psthForEachStorename(inputParametersPath_group)\n", + "executeFindFreqAndAmp(ip)" ] }, { @@ -191,70 +284,28 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [ - "conda init zsh" - ] + "source": [] }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "/Users/VENUS/all_codes/GuPPy\n" - ] - } - ], - "source": [ - "cd ~/all_codes/GuPPy" - ] + "outputs": [], + "source": [] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], - "source": [ - "!sh GuPPy_create_environment.sh" - ] + "source": [] }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r\n", - "CommandNotFoundError: Your shell has not been properly configured to use 'conda activate'.\r\n", - "To initialize your shell, run\r\n", - "\r\n", - " $ conda init \r\n", - "\r\n", - "Currently supported shells are:\r\n", - " - bash\r\n", - " - fish\r\n", - " - tcsh\r\n", - " - xonsh\r\n", - " - zsh\r\n", - " - powershell\r\n", - "\r\n", - "See 'conda init --help' for more information and options.\r\n", - "\r\n", - "IMPORTANT: You may need to close and restart your shell after running 'conda init'.\r\n", - "\r\n", - "\r\n" - ] - } - ], - "source": [ - "!conda activate guppy_test" - ] + "outputs": [], + "source": [] }, { "cell_type": "code", @@ -506,9 +557,9 @@ ], "metadata": { "kernelspec": { - "display_name": "guppy", + "display_name": "guppy_env", "language": "python", - "name": "guppy" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -520,7 +571,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.12" + "version": "3.12.4" } }, "nbformat": 4, diff --git a/GuPPy/savingInputParameters.ipynb b/GuPPy/savingInputParameters.ipynb index a888ff1..843afbc 100755 --- a/GuPPy/savingInputParameters.ipynb +++ b/GuPPy/savingInputParameters.ipynb @@ -427,11 +427,12 @@ " \"highAmpFilt\": highAmpFilt.value,\n", " \"transientsThresh\": transientsThresh.value \n", " }\n", - " for folder in files_1.value:\n", - " with open(os.path.join(folder, 'GuPPyParamtersUsed.json'), 'w') as f:\n", - " json.dump(analysisParameters, f, indent=4)\n", - " insertLog(f\"Input Parameters file saved at {folder}\",\n", - " logging.INFO)\n", + " parameters = getInputParameters()\n", + "\n", + " with open(os.path.join(os.path.expanduser('~'), 'GuPPyParamtersUsed.json'), 'w') as f:\n", + " json.dump(parameters, f, indent=4)\n", + " insertLog(f\"Input Parameters file saved at {os.path.join(os.path.expanduser('~'), 'GuPPyParamtersUsed.json')}\",\n", + " logging.INFO)\n", " \n", " insertLog('#'*400, logging.INFO)\n", " \n", @@ -468,9 +469,9 @@ " \n", "mark_down_ip = pn.pane.Markdown(\"\"\"**Step 1 : Save Input Parameters**\"\"\", width=500)\n", "mark_down_ip_note = pn.pane.Markdown(\"\"\"***Note : ***
\n", - " - Save Input Parameters will save input parameters used for the analysis\n", - " in all the folders you selected for the analysis (useful for future\n", - " reference). All analysis steps will run without saving input parameters.\n", + " - It is mandatory to save the input parameters.\n", + " It gets saved in the HOME directory.\n", + " Copy the path to the file in the Jupyter Notebook.\n", " \"\"\", width=500, sizing_mode=\"stretch_width\")\n", "save_button = pn.widgets.Button(name='Save to file...', button_type='primary', width=500, sizing_mode=\"stretch_width\", align='end')\n", "mark_down_storenames = pn.pane.Markdown(\"\"\"**Step 2 : Open Storenames GUI
and save storenames**\"\"\", width=500)\n", diff --git a/guppy_read_env.yaml b/guppy_read_env.yaml new file mode 100644 index 0000000..11e09f2 --- /dev/null +++ b/guppy_read_env.yaml @@ -0,0 +1,14 @@ +name: guppy_read_env +channels: + - defaults + - conda-forge +dependencies: + - python>=3.8 + - h5py + - numpy + - pandas + - tqdm + - ipykernel + - pip + - pip: + - pynwb