diff --git a/01_latent_neural_states.ipynb b/01_latent_neural_states.ipynb index 0cc1455..e91ee09 100644 --- a/01_latent_neural_states.ipynb +++ b/01_latent_neural_states.ipynb @@ -9,9 +9,10 @@ "\n", "Let's analyze some neural data using popular dimensionality reduction methods.\n", "We will use the folloiwng methods with progressively better modeling assumptions.\n", + "\n", "- PCA (Principal Components Analysis)\n", " - Gaussian observation\n", - " - Independent identical gaussian noise per neuron\n", + " - Independent identical gaussian noise per neuron \n", "- GPFA (Gaussian Process Factor Analysis)\n", " - Gaussian observation\n", " - Unequal magnitude of noise per neuron\n", @@ -219,16 +220,16 @@ "outputs": [], "source": [ "#Choose dataset\n", - "use_data = 'monkey'\n", + "use_data = 'vanderpol'\n", "\n", "if use_data=='vanderpol':\n", " vdp_data = h5py.File(\"vanderpol/data/poisson_obs.h5\", 'r')\n", " y = np.array(vdp_data['Y'])\n", + " dt = 5e-3 # time bin size\n", " gaussian_filter_sigma = 12.0\n", " nTrial = vdp_data['X'].shape[0]\n", " nT = vdp_data['X'].shape[1]\n", " nNeuron = vdp_data['Y'].shape[2]\n", - " dt = 5e-3 # time bin size\n", " n_latent = 2\n", "\n", "elif use_data=='monkey':\n", @@ -239,8 +240,10 @@ " nTrial = m5['pos-train'].shape[0]\n", " nT = m5['pos-train'].shape[1]\n", " nNeuron = m5['spk-train'].shape[2]\n", - " dt = 0.005 # 5 ms bin\n", " n_latent = n_latent_mc_maze\n", + " \n", + "else:\n", + " raise Exceptiontion(\"Typo?!\")\n", "\n", "T = dt * nT" ]