Raw File
{
 "cells": [
  {
   "cell_type": "markdown",
   "id": "68623630",
   "metadata": {},
   "source": [
    "## Import Packages"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "6db73c3e",
   "metadata": {},
   "outputs": [],
   "source": [
    "%matplotlib inline\n",
    "from typing import Callable, Iterable\n",
    "import nilearn\n",
    "from nilearn import plotting\n",
    "import nilearn.datasets as datasets\n",
    "import nilearn.image as image\n",
    "from nilearn import surface\n",
    "from sklearn.utils import shuffle\n",
    "import nibabel as nib\n",
    "import numpy as np\n",
    "import pandas as pd\n",
    "import seaborn as sns\n",
    "import matplotlib.pyplot as plt\n",
    "from matplotlib.colors import LinearSegmentedColormap\n",
    "from matplotlib.colors import ListedColormap\n",
    "\n",
    "from brainspace.gradient import GradientMaps\n",
    "from brainspace.utils.parcellation import map_to_labels\n",
    "\n",
    "\n",
    "from neurolang.frontend import NeurolangPDL, ExplicitVBR\n",
    "import data_utils\n",
    "from data_utils import vox2mm, merge_difumo_network_label, truncate_colormap, create_coordinate_dataset"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "68e02faf",
   "metadata": {},
   "source": [
    "## Call NeuroLang's Probablistic Frontend"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "4ce72b9a",
   "metadata": {},
   "outputs": [],
   "source": [
    "nl = NeurolangPDL()"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "9c47ab78",
   "metadata": {},
   "source": [
    "## Load Grey Matter Mask and Add it as Tuple List (i.e. Table) in NeuroLang"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "e23e00cb",
   "metadata": {},
   "outputs": [],
   "source": [
    "# Grey Matter Mask\n",
    "mni_mask = image.resample_img(\n",
    "    \n",
    "    nib.load(datasets.fetch_icbm152_2009()[\"gm\"]), np.eye(3)*3, interpolation=\"nearest\"\n",
    ")\n",
    "\n",
    "plotting.plot_stat_map(mni_mask, title='Grey Matter Mask')\n",
    "\n",
    "# Extract GM voxel coordinates (probability > 0.25) and store them in table \"GreyMatterVoxel\"\n",
    "gm_matrix_coordinates = np.array(np.where(mni_mask.get_fdata() > 0.25)).T\n",
    "\n",
    "gm_xyz_coordinates = pd.DataFrame(vox2mm(gm_matrix_coordinates, affine=mni_mask.affine), \n",
    "                                  columns=[\"x\", \"y\", \"z\"])\n",
    "\n",
    "nl.add_tuple_set(gm_xyz_coordinates, name=\"GreyMatterVoxel\")"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "289d9a7a",
   "metadata": {},
   "source": [
    "## Load LPFC mask and Add it as a Table in NeuroLang"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "4fb06bee",
   "metadata": {},
   "outputs": [],
   "source": [
    "# Lateral Prefrontal Cortex Mask\n",
    "lpfc_mask=image.resample_to_img(source_img=nib.load('lpfc_mask.nii'), target_img=mni_mask,\n",
    "                                        interpolation='nearest')\n",
    "\n",
    "plotting.plot_roi(lpfc_mask, cut_coords=(45,30,30), title='Lateral Prefrontal Cortex Mask')\n",
    "\n",
    "\n",
    "\n",
    "# Extract LPFC voxel coordinates and store them in table \"LateralPFCVoxel\"\n",
    "lpfc_matrix_coordinates=np.array(np.where(lpfc_mask.get_fdata() != 0)).T \n",
    "\n",
    "lpfc_xyz_coordinates = pd.DataFrame(vox2mm(lpfc_matrix_coordinates, affine=mni_mask.affine),\n",
    "                                   columns = [\"x\", \"y\", \"z\"])\n",
    "\n",
    "nl.add_tuple_set(lpfc_xyz_coordinates, name=\"LPFCVoxel\")"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "f2603bb3",
   "metadata": {},
   "source": [
    "## Load Neurosynth Data and Add them to NeuroLang"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "7f1c132b",
   "metadata": {},
   "outputs": [],
   "source": [
    "# Neurosynth Data\n",
    "term_in_study, peak_reported, study_ids = data_utils.fetch_neurosynth(\n",
    "    tfidf_threshold=0.001,\n",
    ")\n",
    "\n",
    "# Store Peak Activation Coordinates in a \"PeakReported\" table\n",
    "nl.add_tuple_set(peak_reported, name=\"PeakReported\")\n",
    "# Store Study IDs in a \"Study\" table\n",
    "nl.add_tuple_set(study_ids.drop_duplicates(), name=\"Study\")\n",
    "# Add uniform weights for study selection in the meta-analysis and store them in \"SelectedStudy\" table\n",
    "nl.add_uniform_probabilistic_choice_over_set(study_ids.drop_duplicates(), name=\"SelectedStudy\")"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "e9d2e219",
   "metadata": {},
   "source": [
    "## Load Difumo-1024 Atlas Data and Add them to NeuroLang"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "df451851",
   "metadata": {},
   "outputs": [],
   "source": [
    "#Difumo 1024 Regions Atlas\n",
    "region_voxels, difumo_meta = data_utils.fetch_difumo(\n",
    "    mask=mni_mask,\n",
    "    coord_type=\"xyz\",\n",
    "    n_components=1024,\n",
    ")\n",
    "\n",
    "\n",
    "nl.add_tuple_set(region_voxels.drop_duplicates(), name=\"RegionVoxel\")"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "63acc459",
   "metadata": {},
   "source": [
    "## Define Functions to be used in NeuroLang Programs"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "dff44ee0",
   "metadata": {},
   "outputs": [],
   "source": [
    "EPS = 1e-20\n",
    "@nl.add_symbol\n",
    "def log_odds(p, p0):\n",
    "    a = min(p/(1-p) + EPS, 1.)\n",
    "    b = min(p0/(1-p0) + EPS, 1.)\n",
    "    logodds = np.log10(a/b)\n",
    "    return logodds\n",
    "\n",
    "@nl.add_symbol\n",
    "def weighted_center(coords: Iterable[int], w: Iterable[float]) -> int:\n",
    "    return int(np.sum(np.multiply(w, coords)) / np.sum(w))\n",
    "\n",
    "\n",
    "@nl.add_symbol\n",
    "def agg_int_to_str(a,b):\n",
    "    d = min(a, b)\n",
    "    e = max(a,b)\n",
    "    return str(\"%d-%d\"%(d, e))\n",
    "\n",
    "@nl.add_symbol\n",
    "def Euclidean_Dist(a, b, c, d, e, f):\n",
    "    x = np.array((a, b, c))\n",
    "    y = np.array((d, e, f))\n",
    "    temp = x-y\n",
    "    dist = np.sqrt(np.dot(temp.T, temp))\n",
    "    return dist\n",
    "\n",
    "@nl.add_symbol\n",
    "def maximum(w: Iterable[float]) -> float:\n",
    "    return np.max(w)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "6fbf57b0",
   "metadata": {},
   "source": [
    "## Infer the VoxelReported table "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "1fed2f60",
   "metadata": {},
   "outputs": [],
   "source": [
    "with nl.scope as e:\n",
    "    voxel_reported=nl.execute_datalog_program(\"\"\"\n",
    "    \n",
    "    VoxelReported(x, y, z, study) :- GreyMatterVoxel(x, y, z) & PeakReported(x2, y2, z2, study) & (distance == EUCLIDEAN(x, y, z, x2, y2, z2)) & (distance < 10)\n",
    "    \n",
    "    ans(x, y, z, study) :- VoxelReported(x, y, z, study) \n",
    "    \"\"\")\n",
    "\n",
    "nl.add_tuple_set(voxel_reported.as_pandas_dataframe().drop_duplicates(), name=\"VoxelReported\")"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "12f320e0",
   "metadata": {},
   "source": [
    "## Finding Regions within the LPFC mask"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "71848b64",
   "metadata": {},
   "outputs": [],
   "source": [
    "with nl.scope as e:\n",
    "    lpfc_regions = nl.execute_datalog_program(\"\"\"\n",
    "    \n",
    "    RegionVolume(r, count(x, y, z)) :- RegionVoxel(r, x, y, z, w)\n",
    "    \n",
    "    VolumeOfOverlapWithMask(r, count(x, y, z)) :- RegionVoxel(r, x, y, z, w) & LPFCVoxel(x, y, z)\n",
    "    \n",
    "    LPFCRegion(r) :- RegionVolume(r, v0) & VolumeOfOverlapWithMask(r, v) & (v/v0 > 0.5)\n",
    "    \n",
    "    ans(r) :- LPFCRegion(r) \n",
    "    \"\"\")\n",
    "nl.add_tuple_set(lpfc_regions.as_pandas_dataframe().drop_duplicates(), name=\"LPFCRegion\")    "
   ]
  },
  {
   "cell_type": "markdown",
   "id": "93fe83d8",
   "metadata": {},
   "source": [
    "## Inferring MetaAnalytic Connectivity Matrix"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "aaa3ef19",
   "metadata": {},
   "outputs": [],
   "source": [
    "with nl.scope as e:\n",
    "    connectivity_matrix = nl.execute_datalog_program(\"\"\"\n",
    "    \n",
    "    RegionMaxWeight(r, max(w)) :- RegionVoxel(r, x, y, z, w) \n",
    "    \n",
    "    RegionVoxelNormalizedWeight(r, x, y, z) :: w/W :- RegionVoxel(r, x, y, z, w) & RegionMaxWeight(r, W)\n",
    "    \n",
    "    LPFCRegionActive(r, study) :- RegionVoxelNormalizedWeight(r, x, y, z) & VoxelReported(x, y, z, study) & LPFCRegion(r)\n",
    "    \n",
    "    LPFCRegionNotActive(r, study) :- LPFCRegion(r) & ~LPFCRegionActive(r, study) & Study(study) \n",
    "    \n",
    "    BrainRegionActive(r, study) :- VoxelReported(x, y, z, study) & RegionVoxelNormalizedWeight(r, x, y, z)\n",
    "    \n",
    "    ProbabilityOfCoactivation(r, r2, PROB) :- BrainRegionActive(r, study) // (LPFCRegionActive(r2, study) & SelectedStudy(study))\n",
    "    \n",
    "    ProbabilityOfNoCoactivation(r, r2, PROB) :- BrainRegionActive(r, study) // (LPFCRegionNotActive(r2, study) & SelectedStudy(study))\n",
    "    \n",
    "    MetaAnalyticConnectivityMatrix(r2, r, max(p1, p0)) :- ProbabilityOfCoactivation(r, r2, p1) & ProbabilityOfNoCoactivation(r, r2, p0)\n",
    "    \n",
    "    ans(r2, r, LOR) :-  MetaAnalyticConnectivityMatrix(r2, r, LOR)\n",
    "    \"\"\")"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "c9b69cae",
   "metadata": {},
   "source": [
    "## Estimating the Gradients Using Diffusion Embedding"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "ec8989a1",
   "metadata": {},
   "outputs": [],
   "source": [
    "#compute similarity matrix using eta-squared metric and then derive gradients using PCA or DifussionEmbedding\n",
    "def eta2(X): \n",
    "    S = np.zeros((X.shape[0],X.shape[0]))\n",
    "    for i in range(0,X.shape[0]):\n",
    "        for j in range(i,X.shape[0]):\n",
    "            mi = np.mean([X[i,:],X[j,:]],0) \n",
    "            mm = np.mean(mi)\n",
    "            ssw = np.sum(np.square(X[i,:]-mi) + np.square(X[j,:]-mi))\n",
    "            sst = np.sum(np.square(X[i,:]-mm) + np.square(X[j,:]-mm))\n",
    "            S[i,j] = 1-ssw/sst\n",
    "    \n",
    "    S += S.T \n",
    "    S -= np.eye(S.shape[0])    \n",
    "    return S\n",
    "\n",
    "similarity_mat = eta2(connectivity_matrix.fillna(0).values)\n",
    "gradient_maps = GradientMaps(n_components=20, approach=\"dm\")\n",
    "gradient_maps.fit(similarity_mat, sparsity=0.0)\n",
    "\n",
    "grads = pd.DataFrame(gm.gradients_, index=coactivation_mat.index)\n",
    "grads_join = grads.join(region_voxels.set_index('region'))\n",
    "i, j, k = np.round(\n",
    "    nib.affines.apply_affine(\n",
    "        np.linalg.inv(mni_mask.affine), grads_join[[\"x\", \"y\", \"z\"]].values\n",
    ")).T.astype(int)\n",
    "\n",
    "gradient = np.zeros(mni_mask.shape)\n",
    "gradient[i, j, k] = pd.qcut(grads_join[0], 5, labels=np.arange(1, 6)) \n",
    "lpfc_mask = np.zeros_like(gradient)\n",
    "lpfc_mask[i, j, k] = 1\n",
    "gradient = nib.Nifti1Image(gradient, affine=mni_mask.affine)\n",
    "lpfc_mask = nib.Nifti1Image(lpfc_mask, affine=mni_mask.affine)\n",
    "\n",
    "plotting.plot_img_on_surf(gradient, mask_img=lpfc_mask, threshold=1, symmetric=False, cmap=\"gnuplot\")\n",
    "plt.show()\n",
    "\n",
    "plt.plot(gm.lambdas_ / gm.lambdas_.sum()); plt.show()"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.12"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
back to top