diff --git a/mlx/lora/finetune-experiments.ipynb b/mlx/lora/finetune-experiments.ipynb
index 09b946a28f4baf89d3eb8b0b6bf367251fea88e4..7d7ab0e0f83d33d161a463cbbb9cde10080cf75a 100644
--- a/mlx/lora/finetune-experiments.ipynb
+++ b/mlx/lora/finetune-experiments.ipynb
@@ -142,6 +142,75 @@
    },
    "id": "a9dff0d6c779882c"
   },
+  {
+   "cell_type": "markdown",
+   "source": [
+    "### Set paths for model"
+   ],
+   "metadata": {
+    "collapsed": false
+   },
+   "id": "f9ba088a74f8c557"
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "HF_MODEL_PATH=mistralai/Mistral-7B-Instruct-v0.2\n",
+      "LOCAL_MODEL_PATH=mlx_models/mistralai/Mistral-7B-Instruct-v0.2\n"
+     ]
+    }
+   ],
+   "source": [
+    "import os\n",
+    "HF_MODEL_PATH = 'mistralai/Mistral-7B-Instruct-v0.2'\n",
+    "LOCAL_MODEL_PATH = f'mlx_models/{HF_MODEL_PATH}'\n",
+    "os.environ['HF_MODEL_PATH'] = HF_MODEL_PATH\n",
+    "os.environ['LOCAL_MODEL_PATH'] = LOCAL_MODEL_PATH\n",
+    "print(f\"\"\"\n",
+    "HF_MODEL_PATH={HF_MODEL_PATH}\n",
+    "LOCAL_MODEL_PATH={LOCAL_MODEL_PATH}\n",
+    "\"\"\".strip())\n"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "ExecuteTime": {
+     "end_time": "2024-03-01T08:55:33.087209Z",
+     "start_time": "2024-03-01T08:55:33.080961Z"
+    }
+   },
+   "id": "203bf0c10dd860a5"
+  },
+  {
+   "cell_type": "markdown",
+   "source": [
+    "### Create a 4-Bit quantized model if necessary"
+   ],
+   "metadata": {
+    "collapsed": false
+   },
+   "id": "a52bdff5b0eae3bd"
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 20,
+   "outputs": [],
+   "source": [
+    "![ -d \"$LOCAL_MODEL_PATH\" ] || python convert.py --hf-path \"$HF_MODEL_PATH\" --mlx-path \"$LOCAL_MODEL_PATH\" -q"
+   ],
+   "metadata": {
+    "collapsed": false,
+    "ExecuteTime": {
+     "end_time": "2024-03-01T06:51:00.898588Z",
+     "start_time": "2024-03-01T06:51:00.764741Z"
+    }
+   },
+   "id": "fdb9ec6772be0c23"
+  },
   {
    "cell_type": "markdown",
    "source": [
@@ -208,7 +277,6 @@
    "metadata": {
     "collapsed": false,
     "ExecuteTime": {
-     "end_time": "2024-03-01T08:54:57.837466Z",
      "start_time": "2024-03-01T08:54:56.039305Z"
     }
    },
@@ -248,71 +316,11 @@
    "metadata": {
     "collapsed": false,
     "ExecuteTime": {
-     "end_time": "2024-03-01T08:54:59.410535Z",
      "start_time": "2024-03-01T08:54:59.402009Z"
     }
    },
    "id": "6181ba9486346975"
   },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "HF_MODEL_PATH=mistralai/Mistral-7B-Instruct-v0.2\n",
-      "LOCAL_MODEL_PATH=mlx_models/mistralai/Mistral-7B-Instruct-v0.2\n"
-     ]
-    }
-   ],
-   "source": [
-    "import os\n",
-    "HF_MODEL_PATH = 'mistralai/Mistral-7B-Instruct-v0.2'\n",
-    "LOCAL_MODEL_PATH = f'mlx_models/{HF_MODEL_PATH}'\n",
-    "os.environ['HF_MODEL_PATH'] = HF_MODEL_PATH\n",
-    "os.environ['LOCAL_MODEL_PATH'] = LOCAL_MODEL_PATH\n",
-    "print(f\"\"\"\n",
-    "HF_MODEL_PATH={HF_MODEL_PATH}\n",
-    "LOCAL_MODEL_PATH={LOCAL_MODEL_PATH}\n",
-    "\"\"\".strip())\n"
-   ],
-   "metadata": {
-    "collapsed": false,
-    "ExecuteTime": {
-     "end_time": "2024-03-01T08:55:33.087209Z",
-     "start_time": "2024-03-01T08:55:33.080961Z"
-    }
-   },
-   "id": "203bf0c10dd860a5"
-  },
-  {
-   "cell_type": "markdown",
-   "source": [
-    "### Create a 4-Bit quantized model if necessary"
-   ],
-   "metadata": {
-    "collapsed": false
-   },
-   "id": "a52bdff5b0eae3bd"
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 20,
-   "outputs": [],
-   "source": [
-    "![ -d \"$LOCAL_MODEL_PATH\" ] || python convert.py --hf-path \"$HF_MODEL_PATH\" --mlx-path \"$LOCAL_MODEL_PATH\" -q"
-   ],
-   "metadata": {
-    "collapsed": false,
-    "ExecuteTime": {
-     "end_time": "2024-03-01T06:51:00.898588Z",
-     "start_time": "2024-03-01T06:51:00.764741Z"
-    }
-   },
-   "id": "fdb9ec6772be0c23"
-  },
   {
    "cell_type": "markdown",
    "source": [