{"id":4455,"date":"2024-08-15T13:08:26","date_gmt":"2024-08-15T05:08:26","guid":{"rendered":"https:\/\/www.aqwu.net\/wp\/?p=4455"},"modified":"2024-08-15T20:00:16","modified_gmt":"2024-08-15T12:00:16","slug":"%e4%bd%bf%e7%94%a8-unsloth-%e8%b6%85%e9%ab%98%e6%95%88%e5%be%ae%e8%b0%83-llama-3-1","status":"publish","type":"post","link":"https:\/\/www.aqwu.net\/wp\/?p=4455","title":{"rendered":"\u4f7f\u7528 Unsloth \u8d85\u9ad8\u6548\u5fae\u8c03 Llama 3.1"},"content":{"rendered":"\n<p><em>\u6700\u5148\u8fdb\u7684\u76d1\u7763\u5fae\u8c03\u521d\u5b66\u8005\u6307\u5357<\/em><\/p>\n\n\n\n<p>\u6700\u8fd1\u53d1\u5e03\u7684 Llama 3.1 \u4e3a\u6a21\u578b\u63d0\u4f9b\u4e86\u4ee4\u4eba\u96be\u4ee5\u7f6e\u4fe1\u7684\u6027\u80fd\u6c34\u5e73\uff0c\u7f29\u5c0f\u4e86\u95ed\u6e90\u548c\u5f00\u6e90\u6a21\u578b\u4e4b\u95f4\u7684\u5dee\u8ddd\u3002\u60a8\u53ef\u4ee5\u9488\u5bf9\u7279\u5b9a\u7528\u4f8b\u5fae\u8c03 Llama 3.1\uff0c\u800c\u4e0d\u662f\u4f7f\u7528\u51bb\u7ed3\u7684\u901a\u7528 LLM\uff0c\u4f8b\u5982 GPT-4o \u548c Claude 3.5\uff0c\u4ee5\u66f4\u4f4e\u7684\u6210\u672c\u5b9e\u73b0\u66f4\u597d\u7684\u6027\u80fd\u548c\u53ef\u5b9a\u5236\u6027\u3002<\/p>\n\n\n\n<figure class=\"wp-block-image size-large\"><img loading=\"lazy\" decoding=\"async\" width=\"1024\" height=\"705\" src=\"https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-3-1024x705.png\" alt=\"\" class=\"wp-image-4456\" srcset=\"https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-3-1024x705.png 1024w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-3-300x206.png 300w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-3-768x528.png 768w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-3-1536x1057.png 1536w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-3-2048x1409.png 2048w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-3-1320x908.png 1320w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-3-600x413.png 600w\" sizes=\"auto, (max-width: 1024px) 100vw, 1024px\" \/><\/figure>\n\n\n\n<p>\u5728\u672c\u6587\u4e2d\uff0c\u6211\u4eec\u5c06\u5bf9\u76d1\u7763\u5fae\u8c03\u8fdb\u884c\u5168\u9762\u6982\u8ff0\u3002\u6211\u4eec\u5c06\u5c06\u5176\u4e0e\u63d0\u793a\u5de5\u7a0b\u8fdb\u884c\u6bd4\u8f83\uff0c\u4ee5\u4e86\u89e3\u4f55\u65f6\u4f7f\u7528\u5b83\u6709\u610f\u4e49\uff0c \u8be6\u7ec6\u8bf4\u660e\u4e3b\u8981\u6280\u672f\u53ca\u5176\u4f18\u7f3a\u70b9\uff0c \u5e76\u4ecb\u7ecd\u4e3b\u8981\u6982\u5ff5\uff0c \u4f8b\u5982 LoRA \u8d85\u53c2\u6570\uff0c \u5b58\u50a8\u683c\u5f0f\uff0c \u548c\u804a\u5929\u6a21\u677f.\u6700\u540e\uff0c\u6211\u4eec\u5c06\u901a\u8fc7\u5728 Google Colab \u4e2d\u4f7f\u7528 Unsloth \u8fdb\u884c\u6700\u5148\u8fdb\u7684\u4f18\u5316\u6765\u5fae\u8c03 Llama 3.1 8B \u6765\u5728\u5b9e\u8df5\u4e2d\u5b9e\u73b0\u5b83\u3002<\/p>\n\n\n\n<p>\u672c\u6587\u4e2d\u4f7f\u7528\u7684\u6240\u6709\u4ee3\u7801\u90fd\u53ef\u4ee5\u5728&nbsp;<a href=\"https:\/\/colab.research.google.com\/drive\/164cg_O7SV7G8kZr_JXqLd6VC7pd86-1Z#scrollTo=PoPKQjga6obN\">Google Colab<\/a>&nbsp;\u548c&nbsp;<a href=\"https:\/\/github.com\/mlabonne\/llm-course\">LLM \u8bfe\u7a0b<\/a>\u4e2d\u627e\u5230\u3002\u7279\u522b\u611f\u8c22 Daniel Han \u56de\u7b54\u6211\u7684\u95ee\u9898\u3002<\/p>\n\n\n\n<h2 class=\"wp-block-heading\"><a href=\"https:\/\/huggingface.co\/blog\/mlabonne\/sft-llama3#%F0%9F%94%A7-supervised-fine-tuning\"><\/a>\ud83d\udd27 \u76d1\u7763\u5fae\u8c03<\/h2>\n\n\n\n<figure class=\"wp-block-image size-large\"><img loading=\"lazy\" decoding=\"async\" width=\"1024\" height=\"333\" src=\"https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-4-1024x333.png\" alt=\"\" class=\"wp-image-4457\" srcset=\"https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-4-1024x333.png 1024w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-4-300x98.png 300w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-4-768x250.png 768w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-4-1536x499.png 1536w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-4-1320x429.png 1320w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-4-600x195.png 600w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-4.png 1818w\" sizes=\"auto, (max-width: 1024px) 100vw, 1024px\" \/><\/figure>\n\n\n\n<p>\u76d1\u7763\u5fae\u8c03 \uff08SFT\uff09 \u662f\u4e00\u79cd<strong>\u6539\u8fdb\u548c\u5b9a\u5236<\/strong>\u9884\u8bad\u7ec3 LLM \u7684\u65b9\u6cd5\u3002\u5b83\u6d89\u53ca\u5728\u8f83\u5c0f\u7684\u6307\u4ee4\u548c\u7b54\u6848\u6570\u636e\u96c6\u4e0a\u91cd\u65b0\u8bad\u7ec3\u57fa\u7840\u6a21\u578b\u3002\u4e3b\u8981\u76ee\u6807\u662f\u5c06\u9884\u6d4b\u6587\u672c\u7684\u57fa\u672c\u6a21\u578b\u8f6c\u6362\u4e3a\u53ef\u4ee5\u9075\u5faa\u6307\u793a\u5e76\u56de\u7b54\u95ee\u9898\u7684\u52a9\u624b\u3002SFT \u8fd8\u53ef\u4ee5\u589e\u5f3a\u6a21\u578b\u7684\u6574\u4f53\u6027\u80fd\u3001\u6dfb\u52a0\u65b0\u77e5\u8bc6\u6216\u4f7f\u5176\u9002\u5e94\u7279\u5b9a\u4efb\u52a1\u548c\u9886\u57df\u3002\u7136\u540e\uff0c\u5fae\u8c03\u7684\u6a21\u578b\u53ef\u4ee5\u7ecf\u5386\u4e00\u4e2a\u53ef\u9009\u7684\u504f\u597d\u5bf9\u9f50\u9636\u6bb5\uff08\u8bf7\u53c2\u9605<a href=\"https:\/\/mlabonne.github.io\/blog\/posts\/Fine_tune_Mistral_7b_with_DPO.html\">\u6211\u5173\u4e8e DPO \u7684\u6587\u7ae0<\/a>\uff09\uff0c\u4ee5\u5220\u9664\u4e0d\u9700\u8981\u7684\u54cd\u5e94\u3001\u4fee\u6539\u5176\u6837\u5f0f\u7b49\u3002<\/p>\n\n\n\n<p>\u4e0b\u56fe\u663e\u793a\u4e86\u4e00\u4e2a\u6307\u4ee4\u793a\u4f8b\u3002\u5b83\u5305\u62ec\u7528\u4e8e\u5f15\u5bfc\u6a21\u578b\u7684\u7cfb\u7edf\u63d0\u793a\u3001\u7528\u4e8e\u63d0\u4f9b\u4efb\u52a1\u7684\u7528\u6237\u63d0\u793a\u4ee5\u53ca\u6a21\u578b\u9884\u671f\u751f\u6210\u7684\u8f93\u51fa\u3002\u60a8\u53ef\u4ee5\u5728 LLM \u6570\u636e\u96c6 GitHub \u5b58\u50a8\u5e93\u4e2d\u627e\u5230<a href=\"https:\/\/github.com\/mlabonne\/llm-datasets\">\ud83d\udcbe<\/a>\u9ad8\u8d28\u91cf\u7684\u5f00\u6e90\u6307\u4ee4\u6570\u636e\u96c6\u5217\u8868\u3002<\/p>\n\n\n\n<figure class=\"wp-block-image size-large\"><img loading=\"lazy\" decoding=\"async\" width=\"1024\" height=\"269\" src=\"https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-5-1024x269.png\" alt=\"\" class=\"wp-image-4458\" srcset=\"https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-5-1024x269.png 1024w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-5-300x79.png 300w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-5-768x202.png 768w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-5-1536x404.png 1536w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-5-2048x539.png 2048w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-5-1320x347.png 1320w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-5-600x158.png 600w\" sizes=\"auto, (max-width: 1024px) 100vw, 1024px\" \/><\/figure>\n\n\n\n<p>\u5728\u8003\u8651 SFT \u4e4b\u524d\uff0c\u6211\u5efa\u8bae\u5c1d\u8bd5\u63d0\u793a\u5de5\u7a0b\u6280\u672f\uff0c\u4f8b\u5982<strong>\u5c11\u6837\u672c\u63d0\u793a<\/strong>\u6216<strong>\u68c0\u7d22\u589e\u5f3a\u751f\u6210<\/strong>&nbsp;\uff08RAG\uff09\u3002\u5728\u5b9e\u8df5\u4e2d\uff0c\u8fd9\u4e9b\u65b9\u6cd5\u53ef\u4ee5\u89e3\u51b3\u8bb8\u591a\u95ee\u9898\uff0c\u800c\u65e0\u9700\u5fae\u8c03\uff0c\u4f7f\u7528\u95ed\u6e90\u6216\u5f00\u653e\u6743\u91cd\u6a21\u578b\uff08\u4f8b\u5982\uff0cLlama 3.1 Instruct\uff09\u3002\u5982\u679c\u8fd9\u79cd\u65b9\u6cd5\u4e0d\u80fd\u6ee1\u8db3\u60a8\u7684\u76ee\u6807\uff08\u5728\u8d28\u91cf\u3001\u6210\u672c\u3001\u5ef6\u8fdf\u7b49\u65b9\u9762\uff09\uff0c\u90a3\u4e48\u5f53\u6307\u4ee4\u6570\u636e\u53ef\u7528\u65f6\uff0cSFT \u5c31\u6210\u4e3a\u4e00\u4e2a\u53ef\u884c\u7684\u9009\u62e9\u3002\u8bf7\u6ce8\u610f\uff0cSFT \u8fd8\u63d0\u4f9b\u989d\u5916\u7684\u63a7\u5236\u548c\u53ef\u5b9a\u5236\u6027\u7b49\u4f18\u52bf\uff0c\u4ee5\u521b\u5efa\u4e2a\u6027\u5316\u7684 LLM\u3002<\/p>\n\n\n\n<p>\u4f46\u662f\uff0cSFT \u6709\u5c40\u9650\u6027\u3002\u5728\u5229\u7528\u57fa\u7840\u6a21\u578b\u4e2d\u5df2\u6709\u7684\u77e5\u8bc6\u65f6\uff0c\u5b83\u6548\u679c\u6700\u597d\u3002\u5b66\u4e60\u5168\u65b0\u7684\u4fe1\u606f\uff08\u5982\u672a\u77e5\u8bed\u8a00\uff09\u53ef\u80fd\u5177\u6709\u6311\u6218\u6027\uff0c\u5e76\u5bfc\u81f4\u66f4\u9891\u7e41\u7684\u5e7b\u89c9\u3002\u5bf9\u4e8e\u57fa\u7840\u6a21\u578b\u672a\u77e5\u7684\u65b0\u57df\uff0c\u5efa\u8bae\u5148\u5728\u539f\u59cb\u6570\u636e\u96c6\u4e0a\u6301\u7eed\u9884\u8bad\u7ec3\u5b83\u3002<\/p>\n\n\n\n<p>\u53e6\u4e00\u65b9\u9762\uff0c\u6307\u4ee4\u6a21\u578b\uff08\u5373\u5df2\u7ecf\u5fae\u8c03\u7684\u6a21\u578b\uff09\u5df2\u7ecf\u975e\u5e38\u63a5\u8fd1\u60a8\u7684\u9700\u6c42\u3002\u4f8b\u5982\uff0c\u4e00\u4e2a\u6a21\u578b\u53ef\u80fd\u8868\u73b0\u975e\u5e38\u597d\uff0c\u4f46\u58f0\u660e\u5b83\u662f\u7531 OpenAI \u6216 Meta \u800c\u4e0d\u662f\u60a8\u8bad\u7ec3\u7684\u3002\u5728\u8fd9\u79cd\u60c5\u51b5\u4e0b\uff0c\u60a8\u53ef\u80fd\u5e0c\u671b\u4f7f\u7528\u504f\u597d\u5bf9\u9f50\u6765\u7a0d\u5fae\u5f15\u5bfc\u6307\u793a\u6a21\u578b\u7684\u884c\u4e3a\u3002\u901a\u8fc7\u4e3a\u4e00\u5c0f\u7ec4\u6307\u4ee4\uff08100 \u5230 1000 \u4e2a\u6837\u672c\uff09\u63d0\u4f9b\u9009\u62e9\u548c\u62d2\u7edd\u7684\u6837\u672c\uff0c\u60a8\u53ef\u4ee5\u5f3a\u5236 LLM \u8bf4\u60a8\u8bad\u7ec3\u4e86\u5b83\u800c\u4e0d\u662f OpenAI\u3002<\/p>\n\n\n\n<h2 class=\"wp-block-heading\"><a href=\"https:\/\/huggingface.co\/blog\/mlabonne\/sft-llama3#%E2%9A%96%EF%B8%8F-sft-techniques\"><\/a>\u2696\ufe0f SFT\u6280\u672f<\/h2>\n\n\n\n<figure class=\"wp-block-image size-large\"><img loading=\"lazy\" decoding=\"async\" width=\"1024\" height=\"432\" src=\"https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-6-1024x432.png\" alt=\"\" class=\"wp-image-4459\" srcset=\"https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-6-1024x432.png 1024w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-6-300x127.png 300w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-6-768x324.png 768w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-6-1536x649.png 1536w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-6-1320x557.png 1320w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-6-600x253.png 600w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/08\/\u56fe\u7247-6.png 1582w\" sizes=\"auto, (max-width: 1024px) 100vw, 1024px\" \/><\/figure>\n\n\n\n<p>\u4e09\u79cd\u6700\u6d41\u884c\u7684 SFT \u6280\u672f\u662f\u5b8c\u5168\u5fae\u8c03\u3001LoRA \u548c QLoRA\u3002<\/p>\n\n\n\n<p><strong>\u5b8c\u5168\u5fae\u8c03<\/strong>\u662f\u6700\u76f4\u63a5\u7684 SFT \u6280\u672f\u3002\u5b83\u6d89\u53ca\u5728\u6307\u4ee4\u6570\u636e\u96c6\u4e0a\u91cd\u65b0\u8bad\u7ec3\u9884\u8bad\u7ec3\u6a21\u578b\u7684\u6240\u6709\u53c2\u6570\u3002\u8fd9\u79cd\u65b9\u6cd5\u901a\u5e38\u63d0\u4f9b\u6700\u4f73\u7ed3\u679c\uff0c\u4f46\u9700\u8981\u5927\u91cf\u7684\u8ba1\u7b97\u8d44\u6e90\uff08\u9700\u8981\u51e0\u4e2a\u9ad8\u7aef GPU \u6765\u5fae\u8c03 8B \u6a21\u578b\uff09\u3002\u56e0\u4e3a\u5b83\u4fee\u6539\u4e86\u6574\u4e2a\u6a21\u578b\uff0c\u6240\u4ee5\u5b83\u4e5f\u662f\u6700\u5177\u7834\u574f\u6027\u7684\u65b9\u6cd5\uff0c\u5e76\u53ef\u80fd\u5bfc\u81f4\u707e\u96be\u6027\u5730\u5fd8\u8bb0\u4ee5\u524d\u7684\u6280\u80fd\u548c\u77e5\u8bc6\u3002<\/p>\n\n\n\n<p><strong>\u4f4e\u79e9\u81ea\u9002\u5e94 \uff08LoRA\uff09<\/strong>&nbsp;\u662f\u4e00\u79cd\u6d41\u884c\u7684\u53c2\u6570\u9ad8\u6548\u5fae\u8c03\u6280\u672f\u3002\u5b83\u4e0d\u662f\u91cd\u65b0\u8bad\u7ec3\u6574\u4e2a\u6a21\u578b\uff0c\u800c\u662f\u51bb\u7ed3\u6743\u91cd\u5e76\u5728\u6bcf\u4e2a\u76ee\u6807\u5c42\u5f15\u5165\u5c0f\u9002\u914d\u5668\uff08\u4f4e\u79e9\u77e9\u9635\uff09\u3002\u8fd9\u4f7f\u5f97 LoRA \u80fd\u591f\u8bad\u7ec3\u8bb8\u591a\u53c2\u6570\uff0c\u8fd9\u4e9b\u53c2\u6570\u5927\u5927\u4f4e\u4e8e\u5b8c\u5168\u5fae\u8c03 \uff08\u5c0f\u4e8e 1%\uff09\uff0c \u51cf\u5c11\u4e86\u5185\u5b58\u4f7f\u7528\u548c\u8bad\u7ec3\u65f6\u95f4.\u8fd9\u79cd\u65b9\u6cd5\u662f\u975e\u7834\u574f\u6027\u7684\uff0c\u56e0\u4e3a\u539f\u59cb\u53c2\u6570\u88ab\u51bb\u7ed3\uff0c\u7136\u540e\u53ef\u4ee5\u968f\u610f\u5207\u6362\u6216\u7ec4\u5408\u9002\u914d\u5668\u3002<\/p>\n\n\n\n<p><strong>QLoRA\uff08\u91cf\u5316\u611f\u77e5\u4f4e\u79e9\u9002\u5e94\uff09<\/strong>\u662f LoRA \u7684\u6269\u5c55\uff0c\u53ef\u8282\u7701\u66f4\u5927\u7684\u5185\u5b58\u3002\u4e0e\u6807\u51c6 LoRA \u76f8\u6bd4\uff0c\u5b83\u53ef\u989d\u5916\u51cf\u5c11\u9ad8\u8fbe 33% \u7684\u5185\u5b58\uff0c\u56e0\u6b64\u5728 GPU \u5185\u5b58\u53d7\u9650\u65f6\u7279\u522b\u6709\u7528\u3002\u8fd9\u79cd\u6548\u7387\u7684\u63d0\u9ad8\u662f\u4ee5\u66f4\u957f\u7684\u8bad\u7ec3\u65f6\u95f4\u4e3a\u4ee3\u4ef7\u7684\uff0cQLoRA \u7684\u8bad\u7ec3\u65f6\u95f4\u901a\u5e38\u6bd4\u5e38\u89c4 LoRA \u591a 39%\u3002<\/p>\n\n\n\n<p>\u867d\u7136 QLoRA \u9700\u8981\u66f4\u591a\u7684\u8bad\u7ec3\u65f6\u95f4\uff0c\u4f46\u5176\u5927\u91cf\u5185\u5b58\u8282\u7701\u53ef\u4ee5\u4f7f\u5176\u6210\u4e3a GPU \u5185\u5b58\u6709\u9650\u7684\u573a\u666f\u4e2d\u552f\u4e00\u53ef\u884c\u7684\u9009\u62e9\u3002\u51fa\u4e8e\u8fd9\u4e2a\u539f\u56e0\uff0c\u6211\u4eec\u5c06\u5728\u4e0b\u4e00\u8282\u4e2d\u4f7f\u7528\u8fd9\u79cd\u6280\u672f\u6765\u5fae\u8c03 Google Colab \u4e0a\u7684 Llama 3.1 8B \u6a21\u578b\u3002<\/p>\n\n\n\n<h2 class=\"wp-block-heading\"><a href=\"https:\/\/huggingface.co\/blog\/mlabonne\/sft-llama3#%F0%9F%A6%99-fine-tune-llama-31-8b\"><\/a>\ud83e\udd99 \u5fae\u8c03 Llama 3.1 8B<\/h2>\n\n\n\n<p>\u4e3a\u4e86\u6709\u6548\u5730\u5fae\u8c03&nbsp;<a href=\"https:\/\/huggingface.co\/meta-llama\/Meta-Llama-3.1-8B\">Llama 3.1 8B<\/a>&nbsp;\u6a21\u578b\uff0c\u6211\u4eec\u5c06\u4f7f\u7528 Daniel \u548c Michael Han \u7684&nbsp;<a href=\"https:\/\/github.com\/unslothai\/unsloth\">Unsloth<\/a>&nbsp;\u5e93\u3002\u5f97\u76ca\u4e8e\u5176\u81ea\u5b9a\u4e49\u5185\u6838\uff0c\u4e0e\u5176\u4ed6\u9009\u9879\u76f8\u6bd4\uff0cUnsloth \u7684\u8bad\u7ec3\u901f\u5ea6\u63d0\u9ad8\u4e86 2 \u500d\uff0c\u5185\u5b58\u4f7f\u7528\u7387\u63d0\u9ad8\u4e86 60%\uff0c\u975e\u5e38\u9002\u5408 Colab \u7b49\u53d7\u9650\u73af\u5883\u3002\u4e0d\u5e78\u7684\u662f\uff0cUnsloth \u76ee\u524d\u4ec5\u652f\u6301\u5355 GPU \u8bbe\u7f6e\u3002\u5bf9\u4e8e\u591a GPU \u8bbe\u7f6e\uff0c\u6211\u63a8\u8350\u6d41\u884c\u7684\u66ff\u4ee3\u54c1\uff0c\u5982&nbsp;<a href=\"https:\/\/huggingface.co\/docs\/trl\/en\/index\">TRL<\/a>&nbsp;\u548c&nbsp;<a href=\"https:\/\/github.com\/OpenAccess-AI-Collective\/axolotl\">Axolotl<\/a>\uff08\u4e24\u8005\u90fd\u5305\u62ec Unsloth \u4f5c\u4e3a\u540e\u7aef\uff09\u3002<\/p>\n\n\n\n<p>\u5728\u6b64\u793a\u4f8b\u4e2d\uff0c\u6211\u4eec\u5c06\u5728&nbsp;<a href=\"https:\/\/huggingface.co\/datasets\/mlabonne\/FineTome-100k\">mlabonne\/FineTome-100k<\/a>&nbsp;\u6570\u636e\u96c6\u4e0a\u5bf9\u5176\u8fdb\u884c QLoRA \u5fae\u8c03\u3002\u8fd9\u662f\u6211\u4f7f\u7528&nbsp;<a href=\"https:\/\/huggingface.co\/HuggingFaceFW\/fineweb-edu-classifier\">HuggingFaceFW\/fineweb-edu-classifier<\/a>&nbsp;\u91cd\u65b0\u8fc7\u6ee4\u7684&nbsp;<a href=\"https:\/\/huggingface.co\/datasets\/arcee-ai\/The-Tome\">arcee-ai\/The-Tome<\/a>\uff08\u6ca1\u6709&nbsp;<a href=\"https:\/\/huggingface.co\/datasets\/arcee-ai\/qwen2-72b-magpie-en\">arcee-ai\/qwen2-72b-magpie-en<\/a>\uff09\u7684\u5b50\u96c6\u3002\u8bf7\u6ce8\u610f\uff0c\u6b64\u5206\u7c7b\u5668\u4e0d\u662f\u4e3a\u6307\u4ee4\u6570\u636e\u8d28\u91cf\u8bc4\u4f30\u800c\u8bbe\u8ba1\u7684\uff0c\u4f46\u6211\u4eec\u53ef\u4ee5\u5c06\u5176\u7528\u4f5c\u7c97\u7565\u7684\u4ee3\u7406\u3002\u7531\u6b64\u4ea7\u751f\u7684 FineTome \u662f\u4e00\u4e2a\u8d85\u9ad8\u8d28\u91cf\u7684\u6570\u636e\u96c6\uff0c\u5305\u62ec\u5bf9\u8bdd\u3001\u63a8\u7406\u95ee\u9898\u3001\u51fd\u6570\u8c03\u7528\u7b49\u3002<\/p>\n\n\n\n<p>\u8ba9\u6211\u4eec\u5148\u521b\u5efa\u73af\u5883\uff0c\u6d4b\u8bd5\u73af\u5883\u4e3a WSL2,Ubuntu 22.04.3 LTS\uff1a<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \" >conda create -yn unsloth python=3.10\nconda activate unsloth\n<\/pre><\/div>\n\n\n\n<p>\u5b89\u88c5\u6240\u6709\u5fc5\u9700\u7684\u5e93\uff1a<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:sh decode:true \">pip install \"unsloth[colab-new] @ git+https:\/\/github.com\/unslothai\/unsloth.git\"\npip install --no-deps \"xformers&lt;0.0.27\" \"trl&lt;0.9.0\" peft accelerate bitsandbytes\npip install torch==2.3.0+cu121 torchvision torchaudio --index-url https:\/\/download.pytorch.org\/whl\/cu121\npip install numpy==1.24.4\n<\/pre><\/div>\n\n\n\n<p>\u5b89\u88c5\u540e\uff0c\u6211\u4eec\u53ef\u4ee5\u6309\u4ee5\u4e0b\u6b65\u9aa4\u5bfc\u5165\u5b83\u4eec\u3002<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">import torch\nfrom trl import SFTTrainer\nfrom datasets import load_dataset\nfrom transformers import TrainingArguments, TextStreamer\nfrom unsloth.chat_templates import get_chat_template\nfrom unsloth import FastLanguageModel, is_bfloat16_supported\n<\/pre><\/div>\n\n\n\n<p>\u73b0\u5728\u8ba9\u6211\u4eec\u52a0\u8f7d\u6a21\u578b\u3002\u7531\u4e8e\u6211\u4eec\u60f3\u4f7f\u7528 QLoRA\uff0c\u6211\u9009\u62e9\u4e86\u9884\u91cf\u5316\u7684&nbsp;<a href=\"https:\/\/huggingface.co\/unsloth\/Meta-Llama-3.1-8B-bnb-4bit\">unsloth\/Meta-Llama-3.1-8B-bnb-4bit<\/a>\u3002\u4e0e\u539f\u59cb\u7684 16 \u4f4d\u7cbe\u5ea6\u6a21\u578b \uff0816 GB\uff09 \u76f8\u6bd4\uff0c\u8fd9\u6b3e&nbsp;<a href=\"https:\/\/huggingface.co\/blog\/mlabonne\/meta-llama\/Meta-Llama-3.1-8B\">meta-llama\/Meta-Llama-3.1-8B<\/a>&nbsp;\u7684 4 \u4f4d\u7cbe\u5ea6\u7248\u672c\u660e\u663e\u66f4\u5c0f \uff085.4 GB\uff09\uff0c\u4e0b\u8f7d\u901f\u5ea6\u66f4\u5feb\u3002\u6211\u4eec\u4f7f\u7528 bitsandbytes \u5e93\u4ee5 NF4 \u683c\u5f0f\u52a0\u8f7d\u3002<\/p>\n\n\n\n<p>\u52a0\u8f7d\u6a21\u578b\u65f6\uff0c\u6211\u4eec\u5fc5\u987b\u6307\u5b9a\u6700\u5927\u5e8f\u5217\u957f\u5ea6\uff0c\u8fd9\u4f1a\u9650\u5236\u5176\u4e0a\u4e0b\u6587\u7a97\u53e3\u3002Llama 3.1 \u652f\u6301\u9ad8\u8fbe 128k \u7684\u4e0a\u4e0b\u6587\u957f\u5ea6\uff0c\u4f46\u5728\u6b64\u793a\u4f8b\u4e2d\uff0c\u6211\u4eec\u5c06\u5c06\u5176\u8bbe\u7f6e\u4e3a 2,048\uff0c\u56e0\u4e3a\u5b83\u6d88\u8017\u66f4\u591a\u7684\u8ba1\u7b97\u548c VRAM\u3002\u6700\u540e\uff0c<code>dtype<\/code>&nbsp;\u53c2\u6570\u4f1a\u81ea\u52a8\u68c0\u6d4b\u60a8\u7684 GPU \u662f\u5426\u652f\u6301&nbsp;<a href=\"https:\/\/mlabonne.github.io\/blog\/posts\/Introduction_to_Weight_Quantization.html#background-on-floating-point-representation\">BF16 \u683c\u5f0f<\/a>\uff0c\u4ee5\u4fbf\u5728\u8bad\u7ec3\u671f\u95f4\u63d0\u9ad8\u7a33\u5b9a\u6027\uff08\u6b64\u529f\u80fd\u4ec5\u9650\u4e8e Ampere \u548c\u66f4\u65b0\u7684 GPU\uff09\u3002<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">max_seq_length = 2048\nmodel, tokenizer = FastLanguageModel.from_pretrained(\n    model_name=\"unsloth\/Meta-Llama-3.1-8B-bnb-4bit\",\n    max_seq_length=max_seq_length,\n    load_in_4bit=True,\n    dtype=None,\n)\n<\/pre><\/div>\n\n\n\n<p>\u73b0\u5728\u6211\u4eec\u7684\u6a21\u578b\u5df2\u4ee5 4 \u4f4d\u7cbe\u5ea6\u52a0\u8f7d\uff0c\u6211\u4eec\u5e0c\u671b\u4e3a\u4f7f\u7528 LoRA \u9002\u914d\u5668\u8fdb\u884c\u53c2\u6570\u9ad8\u6548\u5fae\u8c03\u505a\u597d\u51c6\u5907\u3002LoRA \u6709\u4e09\u4e2a\u91cd\u8981\u53c2\u6570\uff1a<\/p>\n\n\n\n<ul class=\"wp-block-list\">\n<li><strong>Rank&nbsp;(r)<\/strong>\uff0c \u786e\u5b9a LoRA \u77e9\u9635\u5927\u5c0f.\u7b49\u7ea7\u901a\u5e38\u4ece 8 \u5f00\u59cb\uff0c\u4f46\u53ef\u4ee5\u4e0a\u5347\u5230 256\u3002\u66f4\u9ad8\u7684\u7b49\u7ea7\u53ef\u4ee5\u5b58\u50a8\u66f4\u591a\u7684\u4fe1\u606f\uff0c\u4f46\u4f1a\u589e\u52a0 LoRA \u7684\u8ba1\u7b97\u548c\u5185\u5b58\u6210\u672c.\u6211\u4eec\u5728\u8fd9\u91cc\u5c06\u5176\u8bbe\u7f6e\u4e3a 16\u3002<\/li>\n\n\n\n<li><\/li>\n\n\n\n<li><strong>Alpha&nbsp;\uff08\u03b1\uff09<\/strong>\uff0c\u66f4\u65b0\u7684\u6bd4\u4f8b\u56e0\u5b50\u3002Alpha \u76f4\u63a5\u5f71\u54cd\u9002\u914d\u5668\u7684\u8d21\u732e\uff0c\u901a\u5e38\u8bbe\u7f6e\u4e3a <strong>Rank<\/strong> \u503c\u7684 1 \u500d\u6216 2 \u500d\u3002<\/li>\n\n\n\n<li><\/li>\n\n\n\n<li><strong>\u76ee\u6807\u6a21\u5757<\/strong>\uff1a LoRA\u53ef\u5e94\u7528\u4e8e\u5404\u79cd\u6a21\u578b\u7ec4\u4ef6\uff0c \u5305\u62ec\u6ce8\u610f\u529b\u673a\u5236 \uff08Q\uff0c K\uff0c V\u77e9\u9635\uff09\uff0c \u8f93\u51fa\u6295\u5f71\uff0c \u524d\u9988\u5757\uff0c \u548c\u7ebf\u6027\u8f93\u51fa\u5c42.\u867d\u7136\u6700\u521d\u4e13\u6ce8\u4e8e\u6ce8\u610f\u529b\u673a\u5236\uff0c \u5c06 LoRA \u6269\u5c55\u5230\u5176\u4ed6\u7ec4\u4ef6\u5df2\u7ecf\u663e\u793a\u51fa\u597d\u5904.\u4f46\u662f\uff0c\u9002\u914d\u66f4\u591a\u6a21\u5757\u4f1a\u589e\u52a0\u53ef\u8bad\u7ec3\u53c2\u6570\u7684\u6570\u91cf\u548c\u5185\u5b58\u9700\u6c42\u3002<\/li>\n<\/ul>\n\n\n\n<p>\u5728\u8fd9\u91cc\uff0c\u6211\u4eec\u8bbe\u7f6e r=16\uff0c\u03b1=16\uff0c\u5e76\u9488\u5bf9\u6bcf\u4e2a\u7ebf\u6027\u6a21\u5757\u4ee5\u6700\u5927\u9650\u5ea6\u5730\u63d0\u9ad8\u8d28\u91cf\u3002\u6211\u4eec\u4e0d\u4f7f\u7528\u8f8d\u5b66\u548c\u504f\u89c1\u6765\u52a0\u5feb\u8bad\u7ec3\u901f\u5ea6\u3002<\/p>\n\n\n\n<p>\u6b64\u5916\uff0c\u6211\u4eec\u5c06\u4f7f\u7528<a href=\"https:\/\/arxiv.org\/abs\/2312.03732\">\u79e9\u7a33\u5b9a LoRA<\/a>&nbsp;\uff08rsLoRA\uff09\uff0c\u5b83\u5c06 LoRA \u9002\u914d\u5668\u7684\u6bd4\u4f8b\u56e0\u5b50\u4fee\u6539\u4e3a 1\/\u221ar \u800c\u4e0d\u662f 1\/r\u3002\u8fd9\u53ef\u4ee5\u7a33\u5b9a\u5b66\u4e60\uff08\u7279\u522b\u662f\u5bf9\u4e8e\u66f4\u9ad8\u7684\u9002\u914d\u5668\u7b49\u7ea7\uff09\uff0c\u5e76\u5141\u8bb8\u968f\u7740\u7b49\u7ea7\u7684\u589e\u52a0\u63d0\u9ad8\u5fae\u8c03\u6027\u80fd\u3002\u68af\u5ea6\u68c0\u67e5\u70b9\u7531 Unsloth \u5904\u7406\uff0c\u7528\u4e8e\u5c06\u8f93\u5165\u548c\u8f93\u51fa\u5d4c\u5165\u5378\u8f7d\u5230\u78c1\u76d8\u5e76\u8282\u7701 VRAM\u3002<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">model = FastLanguageModel.get_peft_model(\n    model,\n    r=16,\n    lora_alpha=16,\n    lora_dropout=0,\n    target_modules=[\"q_proj\", \"k_proj\", \"v_proj\", \"up_proj\", \"down_proj\", \"o_proj\", \"gate_proj\"], \n    use_rslora=True,\n    use_gradient_checkpointing=\"unsloth\"\n)\n<\/pre><\/div>\n\n\n\n<p>\u4f7f\u7528\u6b64 LoRA \u914d\u7f6e\uff0c \u6211\u4eec\u53ea\u4f1a\u8bad\u7ec3 80 \u4ebf\u4e2a\u53c2\u6570\u4e2d\u7684 4200 \u4e07\u4e2a \uff080.5196%\uff09\u3002\u8fd9\u8868\u660e\u4e0e\u5b8c\u5168\u5fae\u8c03\u76f8\u6bd4\uff0cLoRA \u7684\u6548\u7387\u8981\u9ad8\u5f97\u591a.<\/p>\n\n\n\n<p>\u73b0\u5728\u8ba9\u6211\u4eec\u52a0\u8f7d\u5e76\u51c6\u5907\u6211\u4eec\u7684\u6570\u636e\u96c6\u3002\u6307\u4ee4\u6570\u636e\u96c6\u4ee5<strong>\u7279\u5b9a\u683c\u5f0f<\/strong>\u5b58\u50a8\uff1a\u5b83\u53ef\u4ee5\u662f Alpaca\u3001ShareGPT\u3001OpenAI \u7b49\u3002\u9996\u5148\uff0c\u6211\u4eec\u60f3\u8981\u89e3\u6790\u6b64\u683c\u5f0f\u4ee5\u68c0\u7d22\u6211\u4eec\u7684\u6307\u4ee4\u548c\u7b54\u6848\u3002\u6211\u4eec\u7684&nbsp;<a href=\"https:\/\/huggingface.co\/datasets\/mlabonne\/FineTome-100k\">mlabonne\/FineTome-100k<\/a>&nbsp;\u6570\u636e\u96c6\u4f7f\u7528 ShareGPT \u683c\u5f0f\uff0c\u5e26\u6709\u4e00\u4e2a\u72ec\u7279\u7684\u201c\u5bf9\u8bdd\u201d\u5217\uff0c\u5176\u4e2d\u5305\u542b JSONL \u4e2d\u7684\u6d88\u606f\u3002\u4e0e Alpaca \u7b49\u66f4\u7b80\u5355\u7684\u683c\u5f0f\u4e0d\u540c\uff0cShareGPT \u975e\u5e38\u9002\u5408\u5b58\u50a8\u591a\u8f6e\u5bf9\u8bdd\uff0c\u8fd9\u66f4\u63a5\u8fd1\u7528\u6237\u4e0e LLM \u7684\u4ea4\u4e92\u65b9\u5f0f\u3002<\/p>\n\n\n\n<p>\u4e00\u65e6\u6211\u4eec\u7684\u6307\u4ee4-\u7b54\u6848\u5bf9\u88ab\u89e3\u6790\uff0c\u6211\u4eec\u5c31\u60f3\u91cd\u65b0\u683c\u5f0f\u5316\u5b83\u4eec\u4ee5\u9075\u5faa<strong>\u804a\u5929\u6a21\u677f<\/strong>\u3002\u804a\u5929\u6a21\u677f\u662f\u4e00\u79cd\u6784\u5efa\u7528\u6237\u548c\u6a21\u578b\u4e4b\u95f4\u5bf9\u8bdd\u7684\u65b9\u6cd5\u3002\u5b83\u4eec\u901a\u5e38\u5305\u62ec\u7279\u6b8a\u4ee4\u724c\uff0c\u7528\u4e8e\u8bc6\u522b\u6d88\u606f\u7684\u5f00\u5934\u548c\u7ed3\u5c3e\u3001\u8c01\u5728\u8bf4\u8bdd\u7b49\u3002\u57fa\u7840\u6a21\u578b\u6ca1\u6709\u804a\u5929\u6a21\u677f\uff0c\u56e0\u6b64\u6211\u4eec\u53ef\u4ee5\u9009\u62e9\u4efb\u4f55\u6a21\u677f\uff1aChatML\u3001Llama3\u3001Mistral \u7b49\u3002\u5728\u5f00\u6e90\u793e\u533a\u4e2d\uff0cChatML \u6a21\u677f\uff08\u6700\u521d\u6765\u81ea OpenAI\uff09\u662f\u4e00\u4e2a\u6d41\u884c\u7684\u9009\u9879\u3002\u5b83\u53ea\u662f\u6dfb\u52a0\u4e24\u4e2a\u7279\u6b8a\u6807\u8bb0\uff08<code>&lt;|im_start|&gt;<\/code>&nbsp;\u548c&nbsp;<code>&lt;|im_end|&gt;<\/code>\uff09\u6765\u6307\u793a\u8c01\u5728\u8bf4\u8bdd\u3002<\/p>\n\n\n\n<p>\u5982\u679c\u6211\u4eec\u5c06\u6b64\u6a21\u677f\u5e94\u7528\u4e8e\u524d\u9762\u7684\u6307\u4ee4\u793a\u4f8b\uff0c\u6211\u4eec\u5c06\u5f97\u5230\u4ee5\u4e0b\u7ed3\u679c\uff1a<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">&lt;|im_start|&gt;system\nYou are a helpful assistant, who always provide explanation. Think like you are answering to a five year old.&lt;|im_end|&gt;\n&lt;|im_start|&gt;user\nRemove the spaces from the following sentence: It prevents users to suspect that there are some hidden products installed on theirs device.\n&lt;|im_end|&gt;\n&lt;|im_start|&gt;assistant\nItpreventsuserstosuspectthattherearesomehiddenproductsinstalledontheirsdevice.&lt;|im_end|&gt;\n<\/pre><\/div>\n\n\n\n<p>\u5728\u4e0b\u9762\u7684\u4ee3\u7801\u5757\u4e2d\uff0c\u6211\u4eec\u4f7f\u7528&nbsp;<code>mapping<\/code>&nbsp;\u53c2\u6570\u89e3\u6790 ShareGPT \u6570\u636e\u96c6\uff0c\u5e76\u5305\u542b ChatML \u6a21\u677f\u3002\u7136\u540e\uff0c\u6211\u4eec\u52a0\u8f7d\u5e76\u5904\u7406\u6574\u4e2a\u6570\u636e\u96c6\uff0c\u4ee5\u5c06\u804a\u5929\u6a21\u677f\u5e94\u7528\u4e8e\u6bcf\u4e2a\u5bf9\u8bdd\u3002<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">tokenizer = get_chat_template(\n    tokenizer,\n    mapping={\"role\": \"from\", \"content\": \"value\", \"user\": \"human\", \"assistant\": \"gpt\"},\n    chat_template=\"chatml\",\n)\n\ndef apply_template(examples):\n    messages = examples[\"conversations\"]\n    text = [tokenizer.apply_chat_template(message, tokenize=False, add_generation_prompt=False) for message in messages]\n    return {\"text\": text}\n\ndataset = load_dataset(\"mlabonne\/FineTome-100k\", split=\"train\")\ndataset = dataset.map(apply_template, batched=True)\n<\/pre><\/div>\n\n\n\n<p>\u73b0\u5728\uff0c\u6211\u4eec\u5df2\u51c6\u5907\u597d\u4e3a\u8fd0\u884c\u6307\u5b9a\u8bad\u7ec3\u53c2\u6570\u3002\u6211\u60f3\u7b80\u8981\u4ecb\u7ecd\u4e00\u4e0b\u6700\u91cd\u8981\u7684\u8d85\u53c2\u6570\uff1a<\/p>\n\n\n\n<ul class=\"wp-block-list\">\n<li><strong>\u5b66\u4e60\u7387<\/strong>(<strong>Learning rate<\/strong>)\uff1a\u5b83\u63a7\u5236\u6a21\u578b\u66f4\u65b0\u5176\u53c2\u6570\u7684\u5f3a\u5ea6\u3002\u592a\u4f4e\uff0c\u8bad\u7ec3\u4f1a\u5f88\u6162\uff0c\u5e76\u4e14\u53ef\u80fd\u4f1a\u5361\u5728\u5c40\u90e8\u6700\u5c0f\u503c\u3002\u592a\u9ad8\uff0c\u8bad\u7ec3\u53ef\u80fd\u4f1a\u53d8\u5f97\u4e0d\u7a33\u5b9a\u6216\u53d1\u6563\uff0c\u4ece\u800c\u964d\u4f4e\u8868\u73b0\u3002<\/li>\n\n\n\n<li><\/li>\n\n\n\n<li><strong>LR \u8c03\u5ea6\u5668<\/strong>(<strong>LR scheduler<\/strong>)\uff1a\u5b83\u5728\u8bad\u7ec3\u671f\u95f4\u8c03\u6574\u5b66\u4e60\u7387 \uff08LR\uff09\uff0c\u4ece\u8f83\u9ad8\u7684 LR \u5f00\u59cb\u4ee5\u5b9e\u73b0\u5feb\u901f\u7684\u521d\u59cb\u8fdb\u5ea6\uff0c\u7136\u540e\u5728\u540e\u671f\u9636\u6bb5\u964d\u4f4e\u5b83\u3002\u7ebf\u6027\u8c03\u5ea6\u5668\u548c\u4f59\u5f26\u8c03\u5ea6\u5668\u662f\u4e24\u79cd\u6700\u5e38\u89c1\u7684\u9009\u9879\u3002<\/li>\n\n\n\n<li><\/li>\n\n\n\n<li><strong>\u6279\u91cf\u5927\u5c0f<\/strong>(<strong>Batch size<\/strong>)\uff1a\u5728\u6743\u91cd\u66f4\u65b0\u4e4b\u524d\u5904\u7406\u7684\u6837\u672c\u6570\u91cf\u3002\u8f83\u5927\u7684\u6279\u91cf\u5927\u5c0f\u901a\u5e38\u4f1a\u5bfc\u81f4\u66f4\u7a33\u5b9a\u7684\u68af\u5ea6\u4f30\u8ba1\uff0c\u5e76\u53ef\u4ee5\u63d0\u9ad8\u8bad\u7ec3\u901f\u5ea6\uff0c\u4f46\u5b83\u4eec\u4e5f\u9700\u8981\u66f4\u591a\u7684\u5185\u5b58\u3002\u68af\u5ea6\u7d2f\u79ef\u901a\u8fc7\u5728\u66f4\u65b0\u6a21\u578b\u4e4b\u524d\u5728\u591a\u4e2a\u524d\/\u540e\u4f20\u9012\u4e0a\u7d2f\u79ef\u68af\u5ea6\uff0c\u4ece\u800c\u6709\u6548\u5730\u5b9e\u73b0\u66f4\u5927\u7684\u6279\u91cf\u5927\u5c0f\u3002<\/li>\n\n\n\n<li><\/li>\n\n\n\n<li><strong>Num epochs<\/strong>\uff1a\u901a\u8fc7\u8bad\u7ec3\u6570\u636e\u96c6\u7684\u5b8c\u6210\u4f20\u9012\u6b21\u6570\u3002\u66f4\u591a\u7684\u65f6\u671f\u4f7f\u6a21\u578b\u80fd\u591f\u66f4\u591a\u5730\u67e5\u770b\u6570\u636e\uff0c\u4ece\u800c\u53ef\u80fd\u5e26\u6765\u66f4\u597d\u7684\u6027\u80fd\u3002\u4f46\u662f\uff0c\u8fc7\u591a\u7684 epoch \u4f1a\u5bfc\u81f4\u8fc7\u62df\u5408\u3002<\/li>\n\n\n\n<li><\/li>\n\n\n\n<li><strong>\u4f18\u5316\u5668<\/strong>(<strong>Optimizer<\/strong>)\uff1a\u7528\u4e8e\u8c03\u6574\u6a21\u578b\u53c2\u6570\u4ee5\u6700\u5c0f\u5316\u635f\u5931\u51fd\u6570\u7684\u7b97\u6cd5\u3002\u5728\u5b9e\u8df5\u4e2d\uff0c\u5f3a\u70c8\u5efa\u8bae\u4f7f\u7528 AdamW 8 \u4f4d\uff1a\u5b83\u7684\u6027\u80fd\u4e0e 32 \u4f4d\u7248\u672c\u4e00\u6837\u597d\uff0c\u540c\u65f6\u4f7f\u7528\u8f83\u5c11\u7684 GPU \u5185\u5b58\u3002AdamW \u7684\u5206\u9875\u7248\u672c\u4ec5\u5728\u5206\u5e03\u5f0f\u8bbe\u7f6e\u4e2d\u624d\u6709\u610f\u4e49\u3002<\/li>\n\n\n\n<li><\/li>\n\n\n\n<li><strong>\u6743\u91cd\u8870\u51cf<\/strong>(<strong>Weight decay<\/strong>)\uff1a\u4e00\u79cd\u6b63\u5219\u5316\u6280\u672f\uff0c\u5c06\u5927\u6743\u91cd\u7684\u60e9\u7f5a\u6dfb\u52a0\u5230\u635f\u5931\u51fd\u6570\u4e2d\u3002\u5b83\u901a\u8fc7\u9f13\u52b1\u6a21\u578b\u5b66\u4e60\u66f4\u7b80\u5355\u3001\u66f4\u53ef\u63a8\u5e7f\u7684\u7279\u5f81\u6765\u5e2e\u52a9\u9632\u6b62\u8fc7\u5ea6\u62df\u5408\u3002\u7136\u800c\uff0c\u8fc7\u591a\u7684\u6743\u91cd\u8870\u51cf\u4f1a\u963b\u788d\u5b66\u4e60\u3002<\/li>\n\n\n\n<li><\/li>\n\n\n\n<li><strong>\u70ed\u8eab\u6b65\u9aa4<\/strong>(<strong>Warmup steps<\/strong>)\uff1a\u8bad\u7ec3\u5f00\u59cb\u65f6\u7684\u4e00\u6bb5\u65f6\u95f4\uff0c\u5b66\u4e60\u7387\u4ece\u5c0f\u503c\u9010\u6e10\u589e\u52a0\u5230\u521d\u59cb\u5b66\u4e60\u7387\u3002\u9884\u70ed\u53ef\u4ee5\u5e2e\u52a9\u7a33\u5b9a\u65e9\u671f\u8bad\u7ec3\uff0c\u5c24\u5176\u662f\u5728\u5b66\u4e60\u7387\u5927\u6216\u6279\u91cf\u5927\u7684\u60c5\u51b5\u4e0b\uff0c\u5141\u8bb8\u6a21\u578b\u5728\u8fdb\u884c\u5927\u89c4\u6a21\u66f4\u65b0\u4e4b\u524d\u9002\u5e94\u6570\u636e\u5206\u5e03\u3002<\/li>\n\n\n\n<li><\/li>\n\n\n\n<li><strong>\u5305\u88c5<\/strong>(<strong>Packing<\/strong>)\uff1a\u6279\u6b21\u5177\u6709\u9884\u5b9a\u4e49\u7684\u5e8f\u5217\u957f\u5ea6\u3002\u6211\u4eec\u53ef\u4ee5\u5c06\u591a\u4e2a\u5c0f\u6837\u54c1\u5408\u5e76\u5230\u4e00\u4e2a\u6279\u6b21\u4e2d\uff0c\u800c\u4e0d\u662f\u4e3a\u6bcf\u4e2a\u6837\u54c1\u5206\u914d\u4e00\u4e2a\u6279\u6b21\uff0c\u4ece\u800c\u63d0\u9ad8\u4e86\u6548\u7387\u3002<\/li>\n<\/ul>\n\n\n\n<p>\u6211\u5728 Google Colab \u4e0a\u4f7f\u7528 A100 GPU\uff0840 GB VRAM\uff09\u5728\u6574\u4e2a\u6570\u636e\u96c6\uff08100k \u4e2a\u6837\u672c\uff09\u4e0a\u8bad\u7ec3\u4e86\u6a21\u578b\u3002\u57f9\u8bad\u5386\u65f64\u5c0f\u65f645\u5206\u949f\u3002\u5f53\u7136\uff0c\u60a8\u53ef\u4ee5\u4f7f\u7528\u5177\u6709\u8f83\u5c11 VRAM \u548c\u8f83\u5c0f\u6279\u91cf\u5927\u5c0f\u7684\u8f83\u5c0f GPU\uff0c\u4f46\u5b83\u4eec\u7684\u901f\u5ea6\u5e76\u4e0d\u5feb\u3002\u4f8b\u5982\uff0cL4 \u5927\u7ea6\u9700\u8981 19 \u5c0f\u65f6 40 \u5206\u949f\uff0c\u800c\u7a7a\u95f2 T4 \u5219\u9700\u8981\u9ad8\u8fbe 47 \u5c0f\u65f6\u3002<\/p>\n\n\n\n<p>\u5728\u8fd9\u79cd\u60c5\u51b5\u4e0b\uff0c\u6211\u5efa\u8bae\u53ea\u52a0\u8f7d\u6570\u636e\u96c6\u7684\u4e00\u4e2a\u5b50\u96c6\u4ee5\u52a0\u5feb\u8bad\u7ec3\u901f\u5ea6\u3002\u60a8\u53ef\u4ee5\u901a\u8fc7\u4fee\u6539\u524d\u9762\u7684\u4ee3\u7801\u5757\u6765\u5b9e\u73b0\u6b64\u64cd\u4f5c\uff0c\u4f8b\u5982&nbsp;<code>dataset = load_dataset\uff08\u201cmlabonne\/FineTome-100k\u201d\uff0c split=\u201ctrain[\uff1a10000]\u201d\uff09<\/code>&nbsp;\u4ee5\u4ec5\u52a0\u8f7d 10k \u4e2a\u6837\u672c\u3002\u6216\u8005\uff0c\u60a8\u53ef\u4ee5\u4f7f\u7528\u66f4\u4fbf\u5b9c\u7684\u4e91 GPU \u63d0\u4f9b\u5546\uff0c\u4f8b\u5982 Paperspace\u3001RunPod \u6216 Lambda Labs\u3002<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">trainer=SFTTrainer(\n    model=model,\n    tokenizer=tokenizer,\n    train_dataset=dataset,\n    dataset_text_field=\"text\",\n    max_seq_length=max_seq_length,\n    dataset_num_proc=2,\n    packing=True,\n    args=TrainingArguments(\n        learning_rate=3e-4,\n        lr_scheduler_type=\"linear\",\n        per_device_train_batch_size=8,\n        gradient_accumulation_steps=2,\n        num_train_epochs=1,\n        fp16=not is_bfloat16_supported(),\n        bf16=is_bfloat16_supported(),\n        logging_steps=1,\n        optim=\"adamw_8bit\",\n        weight_decay=0.01,\n        warmup_steps=10,\n        output_dir=\"output\",\n        seed=0,\n    ),\n)\n\ntrainer.train()\n<\/pre><\/div>\n\n\n\n<p>\u4e0b\u9762\u662f\u4ee3\u7801\u4e2d <code>SFTTrainer<\/code> \u548c <code>TrainingArguments<\/code> \u7684\u53c2\u6570\u8be6\u7ec6\u89e3\u91ca\uff1a<\/p>\n\n\n\n<h5 class=\"wp-block-heading\"><code>SFTTrainer<\/code> \u53c2\u6570\uff1a<\/h5>\n\n\n\n<ol class=\"wp-block-list\">\n<li><strong><code>model=model<\/code><\/strong>: \u6307\u5b9a\u8981\u8bad\u7ec3\u7684\u6a21\u578b\uff0c\u8fd9\u901a\u5e38\u662f\u4e00\u4e2a\u9884\u8bad\u7ec3\u7684\u8bed\u8a00\u6a21\u578b\uff08\u5982 GPT\u3001BERT \u7b49\uff09\u3002<\/li>\n\n\n\n<li><strong><code>tokenizer=tokenizer<\/code><\/strong>: \u6307\u5b9a\u4e0e\u6a21\u578b\u914d\u5957\u7684\u5206\u8bcd\u5668\uff0c\u786e\u4fdd\u6587\u672c\u88ab\u6b63\u786e\u5730\u7f16\u7801\u548c\u89e3\u7801\u3002<\/li>\n\n\n\n<li><strong><code>train_dataset=dataset<\/code><\/strong>: \u6307\u5b9a\u8bad\u7ec3\u4f7f\u7528\u7684\u6570\u636e\u96c6\u3002<\/li>\n\n\n\n<li><strong><code>dataset_text_field=\"text\"<\/code><\/strong>: \u6307\u5b9a\u6570\u636e\u96c6\u4e2d\u5305\u542b\u6587\u672c\u7684\u5b57\u6bb5\u540d\u3002\u5728\u6570\u636e\u96c6\u4e2d\uff0c\u8fd9\u4e2a\u5b57\u6bb5\u5305\u542b\u4e86\u8981\u7528\u6765\u8bad\u7ec3\u6a21\u578b\u7684\u6587\u672c\u5185\u5bb9\u3002<\/li>\n\n\n\n<li><strong><code>max_seq_length=max_seq_length<\/code><\/strong>: \u6307\u5b9a\u8f93\u5165\u5e8f\u5217\u7684\u6700\u5927\u957f\u5ea6\u3002\u5e8f\u5217\u8d85\u8fc7\u6b64\u957f\u5ea6\u5c06\u88ab\u622a\u65ad\uff0c\u77ed\u4e8e\u6b64\u957f\u5ea6\u7684\u5c06\u88ab\u586b\u5145\u3002<\/li>\n\n\n\n<li><strong><code>dataset_num_proc=2<\/code><\/strong>: \u6307\u5b9a\u5728\u6570\u636e\u5904\u7406\u65f6\u4f7f\u7528\u7684\u5e76\u884c\u8fdb\u7a0b\u6570\u3002\u5728\u6570\u636e\u9884\u5904\u7406\u548c\u52a0\u8f7d\u65f6\u4f7f\u7528\u591a\u4e2a\u8fdb\u7a0b\u4ee5\u52a0\u5feb\u901f\u5ea6\u3002<\/li>\n\n\n\n<li><strong><code>packing=True<\/code><\/strong>: \u5f00\u542f\u6570\u636e\u6253\u5305\u6a21\u5f0f\uff0c\u8fd9\u6709\u52a9\u4e8e\u66f4\u6709\u6548\u5730\u5229\u7528\u6279\u6b21\u4e2d\u7684\u7a7a\u95f4\uff0c\u5c06\u591a\u4e2a\u77ed\u53e5\u5b50\u6253\u5305\u5728\u540c\u4e00\u4e2a\u5e8f\u5217\u4e2d\uff0c\u63d0\u9ad8\u8bad\u7ec3\u6548\u7387\u3002<\/li>\n<\/ol>\n\n\n\n<h5 class=\"wp-block-heading\"><code>TrainingArguments<\/code> \u53c2\u6570\uff1a<\/h5>\n\n\n\n<ol class=\"wp-block-list\">\n<li><strong><code>learning_rate=3e-4<\/code><\/strong>: \u8bbe\u7f6e\u5b66\u4e60\u7387\uff0c\u63a7\u5236\u6a21\u578b\u5728\u6bcf\u4e00\u6b65\u8bad\u7ec3\u65f6\u6743\u91cd\u7684\u66f4\u65b0\u901f\u5ea6\u30023e-4 \u662f\u4e00\u4e2a\u5e38\u7528\u7684\u521d\u59cb\u503c\uff0c\u53ef\u4ee5\u6839\u636e\u5177\u4f53\u9700\u6c42\u8c03\u6574\u3002<\/li>\n\n\n\n<li><strong><code>lr_scheduler_type=\"linear\"<\/code><\/strong>: \u6307\u5b9a\u5b66\u4e60\u7387\u8c03\u5ea6\u5668\u7c7b\u578b\u3002<code>linear<\/code> \u8868\u793a\u5b66\u4e60\u7387\u7ebf\u6027\u4e0b\u964d\u3002<\/li>\n\n\n\n<li><strong><code>per_device_train_batch_size=4<\/code><\/strong>: \u6bcf\u4e2a\u8bbe\u5907\uff08\u5982 GPU\uff09\u4e0a\u8bad\u7ec3\u65f6\u7684\u6279\u6b21\u5927\u5c0f\u3002\u8fd9\u91cc\u6bcf\u6b21\u5904\u7406 4 \u4e2a\u6837\u672c\u3002<\/li>\n\n\n\n<li><strong><code>gradient_accumulation_steps=4<\/code><\/strong>: \u7d2f\u79ef\u68af\u5ea6\u7684\u6b65\u9aa4\u6570\u3002\u5373\u6bcf 4 \u4e2a\u6279\u6b21\u8ba1\u7b97\u4e00\u6b21\u68af\u5ea6\u66f4\u65b0\uff0c\u8fd9\u6837\u76f8\u5f53\u4e8e\u5c06\u6279\u6b21\u5927\u5c0f\u6269\u5c55\u5230 4\u00d74=16\u3002<\/li>\n\n\n\n<li><strong><code>num_train_epochs=1<\/code><\/strong>: \u8bad\u7ec3\u7684\u8f6e\u6b21\uff0c\u5373\u904d\u5386\u8bad\u7ec3\u6570\u636e\u96c6\u7684\u6b21\u6570\u3002\u8fd9\u91cc\u8bbe\u7f6e\u4e3a 1\u3002<\/li>\n\n\n\n<li><strong><code>fp16=not is_bfloat16_supported()<\/code><\/strong>: \u662f\u5426\u4f7f\u7528\u6df7\u5408\u7cbe\u5ea6\u8bad\u7ec3\uff08FP16\uff09\u3002\u5982\u679c\u7cfb\u7edf\u4e0d\u652f\u6301 BFloat16\uff08\u8f83\u65b0\u786c\u4ef6\u652f\u6301\uff09\uff0c\u5219\u4f7f\u7528 FP16 \u8bad\u7ec3\u6765\u52a0\u901f\u3002<\/li>\n\n\n\n<li><strong><code>bf16=is_bfloat16_supported()<\/code><\/strong>: \u5982\u679c\u786c\u4ef6\u652f\u6301 BFloat16\uff08\u5982\u8f83\u65b0\u7684 NVIDIA GPU\uff09\uff0c\u5219\u4f7f\u7528 BFloat16 \u8fdb\u884c\u8bad\u7ec3\u3002<\/li>\n\n\n\n<li><strong><code>logging_steps=1<\/code><\/strong>: \u6bcf\u8bad\u7ec3 1 \u4e2a\u6b65\u9aa4\u8bb0\u5f55\u4e00\u6b21\u65e5\u5fd7\uff0c\u8fd9\u53ef\u4ee5\u5e2e\u52a9\u4f60\u8ddf\u8e2a\u8bad\u7ec3\u8fdb\u5ea6\u3002<\/li>\n\n\n\n<li><strong><code>optim=\"adamw_8bit\"<\/code><\/strong>: \u4f7f\u7528 8-bit \u7cbe\u5ea6\u7684 <code>AdamW<\/code> \u4f18\u5316\u5668\uff0c\u53ef\u4ee5\u51cf\u5c11\u663e\u5b58\u4f7f\u7528\u5e76\u63d0\u5347\u8bad\u7ec3\u901f\u5ea6\u3002<\/li>\n\n\n\n<li><strong><code>weight_decay=0.01<\/code><\/strong>: \u6743\u91cd\u8870\u51cf\uff08L2 \u6b63\u5219\u5316\uff09\u7cfb\u6570\uff0c\u6709\u52a9\u4e8e\u9632\u6b62\u6a21\u578b\u8fc7\u62df\u5408\u3002<\/li>\n\n\n\n<li><strong><code>warmup_steps=10<\/code><\/strong>: \u5b66\u4e60\u7387\u9884\u70ed\u7684\u6b65\u9aa4\u6570\uff0c\u5728\u8bad\u7ec3\u5f00\u59cb\u65f6\u9010\u6e10\u589e\u52a0\u5b66\u4e60\u7387\uff0c\u907f\u514d\u8bad\u7ec3\u521d\u671f\u4e0d\u7a33\u5b9a\u3002<\/li>\n\n\n\n<li><strong><code>output_dir=\"output\"<\/code><\/strong>: \u6a21\u578b\u8f93\u51fa\u4fdd\u5b58\u7684\u8def\u5f84\u3002\u8bad\u7ec3\u540e\u7684\u6a21\u578b\u548c\u65e5\u5fd7\u5c06\u4fdd\u5b58\u5230\u8be5\u76ee\u5f55\u3002<\/li>\n\n\n\n<li><strong><code>seed=0<\/code><\/strong>: \u968f\u673a\u79cd\u5b50\uff0c\u7528\u4e8e\u786e\u4fdd\u8bad\u7ec3\u7684\u53ef\u91cd\u590d\u6027\u3002\u4e0d\u540c\u7684\u79cd\u5b50\u53ef\u80fd\u4f1a\u5bfc\u81f4\u4e0d\u540c\u7684\u8bad\u7ec3\u7ed3\u679c\u3002<\/li>\n<\/ol>\n\n\n\n<p>\u73b0\u5728\u6a21\u578b\u5df2\u7ecf\u8bad\u7ec3\u597d\u4e86\uff0c\u8ba9\u6211\u4eec\u7528\u4e00\u4e2a\u7b80\u5355\u7684\u63d0\u793a\u6765\u6d4b\u8bd5\u5b83\u3002\u8fd9\u4e0d\u662f\u4e00\u4e2a\u4e25\u683c\u7684\u8bc4\u4f30\uff0c\u800c\u53ea\u662f\u4e00\u4e2a\u5feb\u901f\u68c0\u67e5\uff0c\u4ee5\u53d1\u73b0\u6f5c\u5728\u7684\u95ee\u9898\u3002\u6211\u4eec\u4f7f\u7528&nbsp;<code>FastLanguageModel.for_inference\uff08\uff09<\/code>&nbsp;\u6765\u83b7\u5f97 2 \u500d\u7684\u63a8\u7406\u901f\u5ea6\u3002<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">model = FastLanguageModel.for_inference(model)\n\nmessages = [\n    {\"from\": \"human\", \"value\": \"Is 9.11 larger than 9.9?\"},\n]\ninputs = tokenizer.apply_chat_template(\n    messages,\n    tokenize=True,\n    add_generation_prompt=True,\n    return_tensors=\"pt\",\n).to(\"cuda\")\n\ntext_streamer = TextStreamer(tokenizer)\n_ = model.generate(input_ids=inputs, streamer=text_streamer, max_new_tokens=128, use_cache=True)\n<\/pre><\/div>\n\n\n\n<p>\u6a21\u578b\u7684\u54cd\u5e94\u662f\u201c9.9\u201d\uff0c\u8fd9\u662f\u6b63\u786e\u7684\uff01<\/p>\n\n\n\n<p>\u73b0\u5728\u8ba9\u6211\u4eec\u4fdd\u5b58\u6211\u4eec\u8bad\u7ec3\u597d\u7684\u6a21\u578b\u3002\u5982\u679c\u60a8\u8fd8\u8bb0\u5f97\u6709\u5173 LoRA \u548c QLoRA \u7684\u90e8\u5206\uff0c\u6211\u4eec\u8bad\u7ec3\u7684\u4e0d\u662f\u6a21\u578b\u672c\u8eab\uff0c\u800c\u662f\u4e00\u7ec4\u9002\u914d\u5668\u3002Unsloth \u4e2d\u6709\u4e09\u79cd\u4fdd\u5b58\u65b9\u6cd5\uff1a&nbsp;<code>lora<\/code>&nbsp;\u4ec5\u4fdd\u5b58\u9002\u914d\u5668\uff0c \u548c&nbsp;<code>merged_16bit<\/code>\/<code>merged_4bit<\/code>&nbsp;\u4ee5 16 \u4f4d\/4 \u4f4d\u7cbe\u5ea6\u5c06\u9002\u914d\u5668\u4e0e\u6a21\u578b\u5408\u5e76.<\/p>\n\n\n\n<p>\u5728\u4e0b\u6587\u4e2d\uff0c\u6211\u4eec\u5c06\u4ee5 16 \u4f4d\u7cbe\u5ea6\u5c06\u5b83\u4eec\u5408\u5e76\uff0c\u4ee5\u6700\u5927\u9650\u5ea6\u5730\u63d0\u9ad8\u8d28\u91cf\u3002\u6211\u4eec\u9996\u5148\u5c06\u5176\u4fdd\u5b58\u5728\u672c\u5730\u7684\u201cmodel\u201d\u76ee\u5f55\u4e2d\uff0c\u7136\u540e\u5c06\u5176\u4e0a\u4f20\u5230Hugging Face Hub\u3002\u60a8\u53ef\u4ee5\u5728&nbsp;<a href=\"https:\/\/huggingface.co\/mlabonne\/FineLlama-3.1-8B\">mlabonne\/FineLlama-3.1-8B<\/a>&nbsp;\u4e0a\u627e\u5230\u7ecf\u8fc7\u8bad\u7ec3\u7684\u6a21\u578b\u3002<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">model.save_pretrained_merged(\"model\", tokenizer, save_method=\"merged_16bit\")\nmodel.push_to_hub_merged(\"mlabonne\/FineLlama-3.1-8B\", tokenizer, save_method=\"merged_16bit\")\n<\/pre><\/div>\n\n\n\n<p>Unsloth \u8fd8\u5141\u8bb8\u60a8\u76f4\u63a5\u5c06\u6a21\u578b\u8f6c\u6362\u4e3a GGUF \u683c\u5f0f\u3002\u8fd9\u662f\u4e3a llama.cpp \u521b\u5efa\u7684\u91cf\u5316\u683c\u5f0f\uff0c\u4e0e\u5927\u591a\u6570\u63a8\u7406\u5f15\u64ce\u517c\u5bb9\uff0c\u4f8b\u5982&nbsp;<a href=\"https:\/\/lmstudio.ai\/\">LM Studio<\/a>\u3001<a href=\"https:\/\/ollama.com\/\">Ollama<\/a>&nbsp;\u548c oobabooga \u7684&nbsp;<a href=\"https:\/\/github.com\/oobabooga\/text-generation-webui\">text-generation-webui<\/a>\u3002\u7531\u4e8e\u60a8\u53ef\u4ee5\u6307\u5b9a\u4e0d\u540c\u7684\u7cbe\u5ea6\uff08\u8bf7\u53c2\u9605<a href=\"https:\/\/mlabonne.github.io\/blog\/posts\/Quantize_Llama_2_models_using_ggml.html\">\u6211\u5173\u4e8e GGUF \u548c llama.cpp \u7684\u6587\u7ae0<\/a>\uff09\uff0c\u6211\u4eec\u5c06\u904d\u5386\u4e00\u4e2a\u5217\u8868\u4ee5<code>q2_k<\/code>\u3001<code>q3_k_m<\/code>\u3001<code>q4_k_m<\/code>\u3001<code>q5_k_m<\/code>\u3001<code>q6_k\u3001q8_0<\/code>\u5bf9\u5176\u8fdb\u884c\u91cf\u5316\uff0c\u5e76\u5c06\u8fd9\u4e9b\u91cf\u5316\u4e0a\u4f20\u5230 Hugging Face\u3002<a href=\"https:\/\/huggingface.co\/mlabonne\/FineLlama-3.1-8B-GGUF\">mlabonne\/FineLlama-3.1-8B-GGUF<\/a>&nbsp;\u5305\u542b\u6211\u4eec\u6240\u6709\u7684 GGUF\u3002<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">quant_methods = [\"q2_k\", \"q3_k_m\", \"q4_k_m\", \"q5_k_m\", \"q6_k\", \"q8_0\"]\nfor quant in quant_methods:\n    model.push_to_hub_gguf(\"mlabonne\/FineLlama-3.1-8B-GGUF\", tokenizer, quant)\n<\/pre><\/div>\n\n\n\n<p>\u606d\u559c\uff0c\u6211\u4eec\u4ece\u5934\u5f00\u59cb\u5fae\u8c03\u4e86\u4e00\u4e2a\u6a21\u578b\uff0c\u5e76\u4e0a\u4f20\u4e86\u91cf\u5316\u6570\u636e\uff0c\u60a8\u73b0\u5728\u53ef\u4ee5\u5728\u60a8\u6700\u559c\u6b22\u7684\u63a8\u7406\u5f15\u64ce\u4e2d\u4f7f\u7528\u3002\u968f\u610f\u5c1d\u8bd5&nbsp;<a href=\"https:\/\/huggingface.co\/mlabonne\/FineLlama-3.1-8B-GGUF\">mlabonne\/FineLlama-3.1-8B-GGUF<\/a>&nbsp;\u4e0a\u53ef\u7528\u7684\u6700\u7ec8\u6a21\u578b\u3002\u73b0\u5728\u8be5\u600e\u4e48\u529e\uff1f\u4ee5\u4e0b\u662f\u6709\u5173\u5982\u4f55\u4f7f\u7528\u6a21\u578b\u7684\u4e00\u4e9b\u60f3\u6cd5\uff1a<\/p>\n\n\n\n<ul class=\"wp-block-list\">\n<li>\u5728&nbsp;<a href=\"https:\/\/huggingface.co\/spaces\/open-llm-leaderboard\/open_llm_leaderboard\">Open LLM \u6392\u884c\u699c<\/a>\u4e0a<strong>\u8fdb\u884c\u8bc4\u4f30<\/strong>\uff08\u60a8\u53ef\u4ee5\u514d\u8d39\u63d0\u4ea4\uff09\u6216\u4f7f\u7528\u5176\u4ed6\u8bc4\u4f30\uff0c\u4f8b\u5982\u5728&nbsp;<a href=\"https:\/\/github.com\/mlabonne\/llm-autoeval\">LLM AutoEval<\/a>&nbsp;\u4e2d\u3002<\/li>\n\n\n\n<li>\u4f7f\u7528\u504f\u597d\u6570\u636e\u96c6\uff08\u5982&nbsp;<a href=\"https:\/\/huggingface.co\/datasets\/mlabonne\/orpo-dpo-mix-40k\">mlabonne\/orpo-dpo-mix-40k<\/a>\uff09\u5c06\u5176\u4e0e\u76f4\u63a5\u504f\u597d\u4f18\u5316<strong>\u5bf9\u9f50<\/strong>\uff0c\u4ee5\u63d0\u9ad8\u6027\u80fd\u3002<\/li>\n\n\n\n<li>\u4f7f\u7528&nbsp;<a href=\"https:\/\/colab.research.google.com\/drive\/1b6nqC7UZVt8bx4MksX7s656GXPM-eWw4?usp=sharing\">AutoQuant<\/a>&nbsp;\u4ee5\u5176\u4ed6\u683c\u5f0f\uff08\u5982 EXL2\u3001AWQ\u3001GPTQ \u6216 HQQ\uff09\u5bf9\u5176\u8fdb\u884c<strong>\u91cf\u5316<\/strong>\uff0c\u4ee5\u5b9e\u73b0\u66f4\u5feb\u7684\u63a8\u7406\u6216\u66f4\u4f4e\u7684\u7cbe\u5ea6\u3002<\/li>\n\n\n\n<li>\u4f7f\u7528&nbsp;<a href=\"https:\/\/colab.research.google.com\/drive\/1LcVUW5wsJTO2NGmozjji5CkC--646LgC\">ZeroChat<\/a>&nbsp;\u5c06\u5176<strong>\u90e8\u7f72\u5230<\/strong>&nbsp;Hugging Face Space \u4e0a\uff0c\u7528\u4e8e\u7ecf\u8fc7\u5145\u5206\u8bad\u7ec3\u4ee5\u9075\u5faa\u804a\u5929\u6a21\u677f\u7684\u6a21\u578b\uff08~20k \u6837\u672c\uff09\u3002<\/li>\n<\/ul>\n\n\n\n<h2 class=\"wp-block-heading\"><a href=\"https:\/\/huggingface.co\/blog\/mlabonne\/sft-llama3#conclusion\"><\/a>\u7ed3\u8bba<\/h2>\n\n\n\n<p>\u672c\u6587\u5168\u9762\u6982\u8ff0\u4e86\u76d1\u7763\u5fae\u8c03\u4ee5\u53ca\u5982\u4f55\u5728\u5b9e\u8df5\u4e2d\u5c06\u5176\u5e94\u7528\u4e8e Llama 3.1 8B \u6a21\u578b\u3002\u901a\u8fc7\u5229\u7528 QLoRA \u7684\u9ad8\u6548\u5185\u5b58\u4f7f\u7528\uff0c\u6211\u4eec\u8bbe\u6cd5\u5728 GPU \u8d44\u6e90\u6709\u9650\u7684\u8d85\u9ad8\u8d28\u91cf\u6570\u636e\u96c6\u4e0a\u5fae\u8c03 8B LLM\u3002\u6211\u4eec\u8fd8\u4e3a\u66f4\u5927\u89c4\u6a21\u7684\u8fd0\u884c\u63d0\u4f9b\u4e86\u66f4\u6709\u6548\u7684\u66ff\u4ee3\u65b9\u6848\uff0c\u5e76\u4e3a\u8fdb\u4e00\u6b65\u6b65\u9aa4\u63d0\u4f9b\u4e86\u5efa\u8bae\uff0c\u5305\u62ec\u8bc4\u4f30\u3001\u504f\u597d\u5bf9\u9f50\u3001\u91cf\u5316\u548c\u90e8\u7f72\u3002<\/p>\n\n\n\n<p>\u6211\u5e0c\u671b\u672c\u6307\u5357\u6709\u7528\u3002\u5982\u679c\u4f60\u6709\u5174\u8da3\u4e86\u89e3\u66f4\u591a\u5173\u4e8eLLMs\u7684\u4fe1\u606f\uff0c\u6211\u5efa\u8bae\u4f60\u67e5\u770b<a href=\"https:\/\/github.com\/mlabonne\/llm-course\">LLM\u8bfe\u7a0b<\/a>\u3002\u5982\u679c\u60a8\u559c\u6b22\u8fd9\u7bc7\u6587\u7ae0\uff0c\u8bf7\u5728 X&nbsp;<a href=\"https:\/\/x.com\/maximelabonne\">@maximelabonne<\/a>&nbsp;\u548c Hugging Face&nbsp;<a href=\"https:\/\/huggingface.co\/mlabonne\">@mlabonne<\/a>&nbsp;\u4e0a\u5173\u6ce8\u6211\u3002\u795d\u4f60\u597d\u8fd0\uff0c\u5fae\u8c03\u6a21\u578b\uff01<\/p>\n\n\n\n<p>\u539f\u6587\u94fe\u63a5\uff1a<a href=\"https:\/\/huggingface.co\/blog\/mlabonne\/sft-llama3\">\u4f7f\u7528 Unsloth \u8d85\u9ad8\u6548\u5fae\u8c03 Llama 3.1 (huggingface.co)<\/a><\/p>\n\n\n\n<h2 class=\"wp-block-heading\">\u5b8c\u6574\u7684\u4ee3\u7801<\/h2>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"left-set:true lang:python decode:true \">#!pip install -qqq \"unsloth[colab-new] @ git+https:\/\/github.com\/unslothai\/unsloth.git\" --progress-bar off\n#!pip install -qqq --no-deps \"xformers&lt;0.0.27\" \"trl&lt;0.9.0\" peft accelerate bitsandbytes --progress-bar off\n\nimport torch\nfrom trl import SFTTrainer\nfrom datasets import load_dataset\nfrom transformers import TrainingArguments, TextStreamer\nfrom unsloth.chat_templates import get_chat_template\nfrom unsloth import FastLanguageModel, is_bfloat16_supported\n\n# 1. Load model for PEFT\n# Load model\nmax_seq_length = 2048\nmodel, tokenizer = FastLanguageModel.from_pretrained(\n    model_name=\"unsloth\/Meta-Llama-3.1-8B-bnb-4bit\",\n    max_seq_length=max_seq_length,\n    load_in_4bit=True,\n    dtype=None,\n)\n\n# Prepare model for PEFT\nmodel = FastLanguageModel.get_peft_model(\n    model,\n    r=16,\n    lora_alpha=16,\n    lora_dropout=0,\n    target_modules=[\"q_proj\", \"k_proj\", \"v_proj\", \"up_proj\", \"down_proj\", \"o_proj\", \"gate_proj\"],\n    use_rslora=True,\n    use_gradient_checkpointing=\"unsloth\"\n)\nprint(model.print_trainable_parameters())\n\n# 2. Prepare data and tokenizer\ntokenizer = get_chat_template(\n    tokenizer,\n    chat_template=\"chatml\",\n    mapping={\"role\" : \"from\", \"content\" : \"value\", \"user\" : \"human\", \"assistant\" : \"gpt\"}\n)\n\ndef apply_template(examples):\n    messages = examples[\"conversations\"]\n    text = [tokenizer.apply_chat_template(message, tokenize=False, add_generation_prompt=False) for message in messages]\n    return {\"text\": text}\n\ndataset = load_dataset(\"mlabonne\/FineTome-100k\", split=\"train\")\ndataset = dataset.map(apply_template, batched=True)\n\n# 3. Training\n\ntrainer=SFTTrainer(\n    model=model,\n    tokenizer=tokenizer,\n    train_dataset=dataset,\n    dataset_text_field=\"text\",\n    max_seq_length=max_seq_length,\n    dataset_num_proc=2,\n    packing=True,\n    args=TrainingArguments(\n        learning_rate=3e-4,\n        lr_scheduler_type=\"linear\",\n        per_device_train_batch_size=4,\n        gradient_accumulation_steps=4,\n        num_train_epochs=1,\n        fp16=not is_bfloat16_supported(),\n        bf16=is_bfloat16_supported(),\n        logging_steps=1,\n        optim=\"adamw_8bit\",\n        weight_decay=0.01,\n        warmup_steps=10,\n        output_dir=\"output\",\n        seed=0,\n    ),\n)\n\ntrainer.train()\n\n# 4. Inference\n# Load model for inference\nmodel = FastLanguageModel.for_inference(model)\n\nmessages = [\n    {\"from\": \"human\", \"value\": \"Is 9.11 larger than 9.9?\"},\n]\ninputs = tokenizer.apply_chat_template(\n    messages,\n    tokenize=True,\n    add_generation_prompt=True,\n    return_tensors=\"pt\",\n).to(\"cuda\")\n\ntext_streamer = TextStreamer(tokenizer)\n_ = model.generate(input_ids=inputs, streamer=text_streamer, max_new_tokens=128, use_cache=True)\n\n# 5. Save trained model\nmodel.save_pretrained_merged(\"model\", tokenizer, save_method=\"merged_16bit\")\n#model.push_to_hub_merged(\"mlabonne\/FineLlama-3.1-8B\", tokenizer, save_method=\"merged_16bit\")\n\nmodel.save_pretrained_gguf(\"model\", tokenizer, \"q8_0\")\n#quant_methods = [\"q2_k\", \"q3_k_m\", \"q4_k_m\", \"q5_k_m\", \"q6_k\", \"q8_0\"]\n#for quant in quant_methods:\n#    model.push_to_hub_gguf(\"mlabonne\/FineLlama-3.1-8B-GGUF\", tokenizer, quant)\n    <\/pre><\/div>\n\n\n\n<h2 class=\"wp-block-heading\">\u5b9e\u9645\u8fd0\u884c\u7ed3\u679c<\/h2>\n\n\n\n<p>\u5355\u5361\uff0cRTX4090 \u5927\u6982\u9700\u89816\u4e2a\u534a\u5c0f\u65f6\u5de6\u53f3<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:sh decode:true \" >CUDA_VISIBLE_DEVICES=2 python test_ft.py\n\ud83e\udda5 Unsloth: Will patch your computer to enable 2x faster free finetuning.\n==((====))==  Unsloth 2024.8: Fast Llama patching. Transformers = 4.44.0.\n   \\\\   \/|    GPU: NVIDIA GeForce RTX 4090. Max memory: 23.988 GB. Platform = Linux.\nO^O\/ \\_\/ \\    Pytorch: 2.3.0+cu121. CUDA = 8.9. CUDA Toolkit = 12.1.\n\\        \/    Bfloat16 = TRUE. FA [Xformers = 0.0.26.post1. FA2 = False]\n \"-____-\"     Free Apache license: http:\/\/github.com\/unslothai\/unsloth\nUnsloth 2024.8 patched 32 layers with 32 QKV layers, 32 O layers and 32 MLP layers.\ntrainable params: 41,943,040 || all params: 8,072,204,288 || trainable%: 0.5196\nNone\nUnsloth: Will map &lt;|im_end|&gt; to EOS = &lt;|end_of_text|&gt;.\n\/home\/tony\/anaconda3\/envs\/unsloth\/lib\/python3.10\/site-packages\/transformers\/tokenization_utils_base.py:1601: FutureWarning: `clean_up_tokenization_spaces` was not set. It will be set to `True` by default. This behavior will be depracted in transformers v4.45, and will be then set to `False` by default. For more details check this issue: https:\/\/github.com\/huggingface\/transformers\/issues\/31884\n  warnings.warn(\n==((====))==  Unsloth - 2x faster free finetuning | Num GPUs = 1\n   \\\\   \/|    Num examples = 29,003 | Num Epochs = 1\nO^O\/ \\_\/ \\    Batch size per device = 4 | Gradient Accumulation steps = 4\n\\        \/    Total batch size = 16 | Total steps = 1,812\n \"-____-\"     Number of trainable parameters = 41,943,040\n{'loss': 1.198, 'grad_norm': 1.8002166748046875, 'learning_rate': 2.9999999999999997e-05, 'epoch': 0.0}\n{'loss': 1.1385, 'grad_norm': 1.5958515405654907, 'learning_rate': 5.9999999999999995e-05, 'epoch': 0.0}\n{'loss': 1.1491, 'grad_norm': 1.6960703134536743, 'learning_rate': 8.999999999999999e-05, 'epoch': 0.0}\n{'loss': 1.1331, 'grad_norm': 0.6497035622596741, 'learning_rate': 0.00011999999999999999, 'epoch': 0.0}\n{'loss': 1.0014, 'grad_norm': 0.45659250020980835, 'learning_rate': 0.00015, 'epoch': 0.0}\n...\n{'loss': 0.7404, 'grad_norm': 0.2969115376472473, 'learning_rate': 0.00019428412874583796, 'epoch': 0.36}\n{'loss': 0.7173, 'grad_norm': 0.26957347989082336, 'learning_rate': 0.0001941176470588235, 'epoch': 0.36}\n{'loss': 0.6269, 'grad_norm': 0.2675934433937073, 'learning_rate': 0.0001939511653718091, 'epoch': 0.36}\n...\n 36%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588                           | 647\/1812 [2:18:57&lt;4:09:20, 12.84s\/it]\n....<\/pre><\/div>\n","protected":false},"excerpt":{"rendered":"<p>\u6700\u5148\u8fdb\u7684\u76d1\u7763\u5fae\u8c03\u521d\u5b66\u8005\u6307\u5357 \u6700\u8fd1\u53d1\u5e03\u7684 Llama 3.1 \u4e3a\u6a21\u578b\u63d0\u4f9b\u4e86\u4ee4\u4eba\u96be\u4ee5\u7f6e\u4fe1\u7684\u6027\u80fd\u6c34\u5e73\uff0c\u7f29\u5c0f\u4e86\u95ed\u6e90\u548c\u5f00 [&hellip;]<\/p>\n","protected":false},"author":1,"featured_media":0,"comment_status":"open","ping_status":"open","sticky":false,"template":"","format":"standard","meta":{"site-sidebar-layout":"default","site-content-layout":"","ast-site-content-layout":"default","site-content-style":"default","site-sidebar-style":"default","ast-global-header-display":"","ast-banner-title-visibility":"","ast-main-header-display":"","ast-hfb-above-header-display":"","ast-hfb-below-header-display":"","ast-hfb-mobile-header-display":"","site-post-title":"","ast-breadcrumbs-content":"","ast-featured-img":"","footer-sml-layout":"","theme-transparent-header-meta":"","adv-header-id-meta":"","stick-header-meta":"","header-above-stick-meta":"","header-main-stick-meta":"","header-below-stick-meta":"","astra-migrate-meta-layouts":"set","ast-page-background-enabled":"default","ast-page-background-meta":{"desktop":{"background-color":"var(--ast-global-color-4)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"tablet":{"background-color":"","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"mobile":{"background-color":"","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""}},"ast-content-background-meta":{"desktop":{"background-color":"var(--ast-global-color-5)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"tablet":{"background-color":"var(--ast-global-color-5)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"mobile":{"background-color":"var(--ast-global-color-5)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""}},"_jetpack_memberships_contains_paid_content":false,"footnotes":""},"categories":[43],"tags":[535],"class_list":["post-4455","post","type-post","status-publish","format-standard","hentry","category-infoarticle","tag-llama3-1-8b"],"views":3686,"jetpack_sharing_enabled":true,"jetpack_featured_media_url":"","_links":{"self":[{"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/posts\/4455","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=%2Fwp%2Fv2%2Fcomments&post=4455"}],"version-history":[{"count":16,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/posts\/4455\/revisions"}],"predecessor-version":[{"id":4478,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/posts\/4455\/revisions\/4478"}],"wp:attachment":[{"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=%2Fwp%2Fv2%2Fmedia&parent=4455"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=%2Fwp%2Fv2%2Fcategories&post=4455"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=%2Fwp%2Fv2%2Ftags&post=4455"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}