diff --git a/mlx/lora/finetune-experiments.ipynb b/mlx/lora/finetune-experiments.ipynb index c83d35976edb91ab0828bdbd59151604be1fea4e..b9733c321223379de6350f020308c1adeb10d58f 100644 --- a/mlx/lora/finetune-experiments.ipynb +++ b/mlx/lora/finetune-experiments.ipynb @@ -130,14 +130,34 @@ "id": "fd1a48e84474aaea" }, { - "cell_type": "code", - "execution_count": null, - "outputs": [], - "source": [], + "cell_type": "markdown", + "source": [ + "``` \n", + "$ python lora.py --model mlx_model/Mistral-7B-v0.1 --train --iters 600 --batch-size 1 --lora-layers 4\n", + "Loading pretrained model\n", + "Total parameters 1242.763M\n", + "Trainable parameters 0.426M\n", + "Loading datasets\n", + "Training\n", + "Iter 1: Val loss 1.805, Val took 93.856s\n", + "Iter 10: Train loss 1.275, It/sec 0.144, Tokens/sec 115.780\n", + "[WARNING] Some sequences are longer than 2048 tokens. Consider pre-splitting your data to save memory.\n", + "Iter 20: Train loss 1.052, It/sec 0.087, Tokens/sec 92.686\n", + "[WARNING] Some sequences are longer than 2048 tokens. Consider pre-splitting your data to save memory.\n", + "Iter 30: Train loss 1.230, It/sec 0.110, Tokens/sec 91.892\n", + "[WARNING] Some sequences are longer than 2048 tokens. Consider pre-splitting your data to save memory.\n", + "Iter 40: Train loss 1.032, It/sec 0.109, Tokens/sec 91.080\n", + "Iter 50: Train loss 0.977, It/sec 0.128, Tokens/sec 95.607\n", + "Iter 60: Train loss 1.021, It/sec 0.166, Tokens/sec 94.361\n", + "[WARNING] Some sequences are longer than 2048 tokens. Consider pre-splitting your data to save memory.\n", + "Iter 70: Train loss 1.077, It/sec 0.097, Tokens/sec 87.647\n", + "[WARNING] Some sequences are longer than 2048 tokens. Consider pre-splitting your data to save memory.\n", + "libc++abi: terminating due to uncaught exception of type std::runtime_error: [METAL] Command buffer execution failed: Insufficient Memory (00000008:kIOGPUCommandBufferCallbackErrorOutOfMemory)\n" + ], "metadata": { "collapsed": false }, - "id": "51b420d949a23c54" + "id": "7e10d007a2d411f0" } ], "metadata": {