diff --git a/connPFM/connectivity/connectivity_utils.py b/connPFM/connectivity/connectivity_utils.py index 61ad89a..329fd6d 100644 --- a/connPFM/connectivity/connectivity_utils.py +++ b/connPFM/connectivity/connectivity_utils.py @@ -8,7 +8,25 @@ def calculate_ets(y, n): - """Calculate edge-time series.""" + """ + Calculate edge-time series. + + Parameters + ---------- + y : numpy matrix + matrix with time-series for each ROI + n : list + number of nodes + Returns + ------- + u : ndarray + vector of indices for the upper triangle of the matrix y axis + v : ndarray + vector of indices for the upper triangle of the matrix x axis + + ets: matrix + edge time-series + """ # upper triangle indices (node pairs = edges) u, v = np.argwhere(np.triu(np.ones(n), 1)).T @@ -19,7 +37,40 @@ def calculate_ets(y, n): def rss_surr(z_ts, u, v, surrprefix, sursufix, masker, irand, nbins, hist_range=(0, 1)): - """Calculate RSS on surrogate data.""" + """ + Calculate RSS on surrogate data. + + Parameters + ---------- + z_ts : numpy matrix + z-scored time-series matrix fore each ROI + u : ndarray + vector of indices for the upper triangle of the matrix y axis + v : ndarray + vector of indices for the upper triangle of the matrix x axis + surrprefix : string + prefix of the surrogate file + sursufix : string + suffix of the surrogate file + masker : instance of NiftiMasker + masker object to load the surrogate data + irand : int + index of the surrogate + nbins : int + number of bins for the histogram + hist_range : tuple + range of the histogram + Returns + ------- + rssr : numpy matrix + RSSr matrix + etsr : numpy matrix + edge-time series matrix for surrogate + ets_hist : numpy matrix + edge-time series histogram for surrogate + bin_edges : numpy matrix + histogram bins for the edge-time series matrix + """ [t, n] = z_ts.shape if surrprefix != "": @@ -83,6 +134,19 @@ def threshold_ets_matrix(ets_matrix, thr, selected_idxs=None): """ Threshold the edge time-series matrix based on the selected time-points and the surrogate matrices. + + Parameters + ---------- + ets_matrix : numpy matrix + edge time-series matrix + thr : float + threshold value + selected_idxs : numpy array + indices of the selected time-points + Returns + ------- + thresholded_matrix : numpy matrix + thresholded edge time-series matrix """ # Initialize matrix with zeros thresholded_matrix = np.zeros(ets_matrix.shape) @@ -106,7 +170,23 @@ def threshold_ets_matrix(ets_matrix, thr, selected_idxs=None): def calculate_surrogate_ets(surrprefix, sursufix, irand, masker): - """Read surrogate data.""" + """ + Read surrogate data. + + Parameters + ---------- + surrprefix : string + prefix of the surrogate file + sursufix : string + suffix of the surrogate file + irand : int + index of the surrogate + masker : instance of NiftiMasker + Returns + ------- + ets : numpy matrix + edge time-series matrix + """ auc = masker.fit_transform(f"{surrprefix}{irand}{sursufix}.nii.gz") [t, n] = auc.shape ets, _, _ = calculate_ets(np.nan_to_num(auc), n) @@ -122,7 +202,28 @@ def calculate_hist( hist_range, nbins=500, ): - """Calculate histogram.""" + """ + Calculate histogram. + Parameters + ---------- + surrprefix : string + prefix of the surrogate file + sursufix : string + suffix of the surrogate file + irand : int + index of the surrogate + masker : instance of NiftiMasker + hist_range : tuple + range of the histogram + nbins : int + number of bins for the histogram + Returns + ------- + ets_hist : numpy matrix + edge-time series histogram for surrogate + bin_edges : numpy matrix + histogram bins for the edge-time series matrix + """ ets_temp = calculate_surrogate_ets(surrprefix, sursufix, irand, masker) ets_hist, bin_edges = np.histogram(ets_temp.flatten(), bins=nbins, range=hist_range) @@ -131,7 +232,21 @@ def calculate_hist( def calculate_hist_threshold(hist, bins, percentile=95): - """Calculate histogram threshold.""" + """ + Calculate histogram threshold. + Parameters + ---------- + hist : numpy matrix + edge-time series histogram for surrogate + bins : numpy matrix + histogram bins for the edge-time series matrix + percentile : float + percentile for the histogram threshold + Returns + ------- + thr : float + threshold value + """ cumsum_percentile = np.cumsum(hist) / np.sum(hist) * 100 thr = bins[len(cumsum_percentile[cumsum_percentile <= percentile])] @@ -144,6 +259,14 @@ def sum_histograms( """ Get histograms of all surrogates and sum them to obtain a single histogram that summarizes the data. + Parameters + ---------- + hist_list : list + list of histograms + Returns + ------- + hist_sum : numpy matrix + histogram of all surrogates """ # Initialize matrix to store surrogate histograms diff --git a/connPFM/connectivity/ev.py b/connPFM/connectivity/ev.py index 763668f..dfe7063 100755 --- a/connPFM/connectivity/ev.py +++ b/connPFM/connectivity/ev.py @@ -24,7 +24,47 @@ def event_detection( nbins=1000, te=[0], ): - """Perform event detection on given data.""" + """ + Perform event detection on given data. + + Parameters + ---------- + data_file : str or list of str + Path to data file. + atlas : str + Path to atlas file. + surrprefix : str + Prefix for surrogate data files. + sursufix : str + Suffix for surrogate data files. + nsur : int + Number of surrogates. + segments : bool + If True, perform event detection on each segment separately. + peak_detection : str + Method to use for peak detection. + nbins : int + Number of bins to use for histogram. + te : list of int + List of TEs to perform event detection on. + Returns + ------- + ets : ndarray + Edge-time-series matrix. + rss : ndarray + Root-sum-square time-series. + rssr: ndarray + Root-sum-square time-series for surrogates + idxpeak : ndarray + Indices of peaks in RSS. + ets_denoised : ndarray + Denoised edge-time-series matrix. + mu : arrray + mean co-fluctuation (edge time series) across all peaks + u : ndarray + vector of indices for the upper triangle of the matrix y axis + v : ndarray + vector of indices for the upper triangle of the matrix x axis""" data, masker = load_data(data_file, atlas, n_echos=len(te)) # load and zscore time series @@ -163,6 +203,40 @@ def ev_workflow( ): """ Main function to perform event detection and plot results. + + Parameters + ---------- + data_file : str + path to the data file + auc_file : str + path to the auc file + atlas : str + path to the atlas file + surr_dir : str + path to the directory containing the surrogate data + out_dir : str + path to the output directory + matrix : str + path to the output for the event dection matrix + te : list + list of TEs + nsurrogates : int + number of surrogates + dvars : str + path to the dvars file + enorm : str + path to the enorm file + afni_text : str + path to the afni text file + history_str : str + string to be added to the history of the output files + peak_detection : str + method to use for peak detection + + Returns + ------- + ets_auc_denoised : ndarray + edge time series matrix denoised """ #  If te is None, make it a list with 0 if te is None and len(data_file) == 1: diff --git a/connPFM/connectivity/plotting.py b/connPFM/connectivity/plotting.py index ccc3ee4..a536240 100644 --- a/connPFM/connectivity/plotting.py +++ b/connPFM/connectivity/plotting.py @@ -34,6 +34,27 @@ def plot_ets_matrix( ): """ Plots edge-time matrix + + Parameters + ---------- + ets : ndarray + edge time series matrix + outdir : str + output directory + rss : ndarray + root-sum-square timeseries + sufix : str + sufix to be added to the output file + dvars_file : str + path to the dvars file + enorm_file : str + path to the enorm file + peaks : list + list of peaks + vmin : float + minimum value for the colorbar + vmax : float + maximum value for the colorbar """ if vmin is None: vmin = np.min(ets) diff --git a/connPFM/debiasing/debiasing.py b/connPFM/debiasing/debiasing.py index 1430389..47361c2 100644 --- a/connPFM/debiasing/debiasing.py +++ b/connPFM/debiasing/debiasing.py @@ -11,7 +11,31 @@ def debiasing(data_file, mask, te, mtx, tr, prefix, groups, groups_dist, history_str): - """Perform debiasing based on denoised edge-time matrix.""" + """ + Perform debiasing based on denoised edge-time matrix. + + Parameters + ---------- + data_file : str or list of str + Path to data files + mask : str + Path to mask file + te : int + list of TEs to perform the debiasing + mtx : ndarray + matrix to do the debiasing + tr : float + repetition time + prefix : str + prefix for output files + groups : bool + If True, perform debiasing with groups hrf + groups_dist : int + Distance between groups + history_str : str + History string + """ + if te is None and len(data_file) == 1: te = [0] elif len(te) > 1: diff --git a/connPFM/deconvolution/roiPFM.py b/connPFM/deconvolution/roiPFM.py index a4ee947..e7d4402 100644 --- a/connPFM/deconvolution/roiPFM.py +++ b/connPFM/deconvolution/roiPFM.py @@ -28,7 +28,45 @@ def roiPFM( hrf_path=None, history_str="", ): + """ + Compute PFM for the timeseries of the dataset ROIs + Parameters + ---------- + data : list of str + list of datasets containing the different echos + atlas : str + dataset with the different ROIs to extract the timeseries + output: str + path for putput file + tr: integer + repetition time of the dataset + username: str + name of the user in case of launching jobs in a cluster + te: list of integer + echo times for the different echoes if data is multiecho + dir: str + temporal directory name + block: boolean + if True, use the block design for the hrf + jobs: integer + number of jobs to launch in a cluster, + if 0 then use serial execution (recomended only for testing) + nsurrogates: integer + number of surrogate datasets to generate + nstability: integer + number of stability iterations + percentile: float + percentile to use for the stability threshold + maxiterfactor: float + factor to use for the maximum number of iterations + hrf_shape: str + shape of the HRF to use + hrf_path: str + path to the user hrf file to use + history_str: str + history string to add to the output file + """ if te is None and len(data) == 1: te = [0] elif len(te) > 1: diff --git a/connPFM/utils/io.py b/connPFM/utils/io.py index c2de254..cdca3b7 100644 --- a/connPFM/utils/io.py +++ b/connPFM/utils/io.py @@ -12,6 +12,23 @@ def load_data(data, atlas, n_echos=1): """ Load and mask data with atlas using NiftiLabelsMasker. + + Parameters + ---------- + data : list of str + list of datasets containing the different echos + atlas : str + dataset with the different ROIs to extract the timeseries + n_echos : integer + Number of echos + + Returns + ------- + data_masked : Numpy matrix + nROI x nscans Timeseries of the selected ROIs extracted from the dataset, + in case of multiecho echoes are concatenated as nROI x (nscans x echos) + masker : instance of NiftiMasker + masker object to load the data """ # Initialize masker object masker = NiftiLabelsMasker(labels_img=atlas, standardize=False, strategy="mean") @@ -42,6 +59,23 @@ def load_data(data, atlas, n_echos=1): def save_img(data, output, masker, history_str=None): """ Save data as Nifti image, and update header history. + + Parameters + ---------- + data : list of str + nROI x nscans Timeseries of the selected ROIs extracted from the dataset + output: str + path for putput file + masker : instance of NiftiMasker + masker object to tramfrom the data to a 3dmatrix + + Returns + ------- + data_masked : Numpy matrix + nROI x nscans Timeseries of the selected ROIs extracted fromt the dataset, + in case of multiecho echoes are concatenated as nROI x (nscans x echos) + masker : instance of NiftiMasker + masker object to load the data """ # Transform data back to Nifti image data_img = masker.inverse_transform(data)