{"id":2544,"date":"2024-03-15T08:52:01","date_gmt":"2024-03-15T00:52:01","guid":{"rendered":"https:\/\/www.aqwu.net\/wp\/?p=2544"},"modified":"2024-03-15T09:48:04","modified_gmt":"2024-03-15T01:48:04","slug":"google-gemma-7b-%e7%9a%84%e5%87%a0%e7%a7%8d%e6%8e%a8%e7%90%86%e6%96%b9%e6%b3%95","status":"publish","type":"post","link":"https:\/\/www.aqwu.net\/wp\/?p=2544","title":{"rendered":"google\/gemma-7b \u7684\u51e0\u79cd\u63a8\u7406\u65b9\u6cd5"},"content":{"rendered":"\n<p>Gemma \u662f Google \u7684\u4e00\u7cfb\u5217\u8f7b\u91cf\u7ea7\u3001\u6700\u5148\u8fdb\u7684\u5f00\u653e\u5f0f\u6a21\u578b\uff0c \u57fa\u4e8e\u7528\u4e8e\u521b\u5efa\u53cc\u5b50\u5ea7\u6a21\u578b\uff08state-of-the-art\uff09\u7684\u76f8\u540c\u7814\u7a76\u548c\u6280\u672f\u6784\u5efa\u3002 \u5b83\u4eec\u662f\u6587\u672c\u5230\u6587\u672c\u3001\u4ec5\u89e3\u7801\u5668\u7684\u5927\u578b\u8bed\u8a00\u6a21\u578b\uff0c\u63d0\u4f9b\u82f1\u6587\u7248\u672c\u3001 \u5177\u6709\u5f00\u653e\u6743\u91cd\u3001\u9884\u8bad\u7ec3\u53d8\u4f53\u548c\u6307\u4ee4\u8c03\u6574\u53d8\u4f53\u3002Gemma \u6a21\u578b\u975e\u5e38\u9002\u5408\u5404\u79cd\u6587\u672c\u751f\u6210\u4efb\u52a1\uff0c\u5305\u62ec \u95ee\u7b54\u3001\u603b\u7ed3\u548c\u63a8\u7406\u3002\u5b83\u4eec\u7684\u5c3a\u5bf8\u76f8\u5bf9\u8f83\u5c0f \u53ef\u4ee5\u5c06\u5b83\u4eec\u90e8\u7f72\u5728\u8d44\u6e90\u6709\u9650\u7684\u73af\u5883\u4e2d\uff0c\u4f8b\u5982 \u7b14\u8bb0\u672c\u7535\u8111\u3001\u53f0\u5f0f\u673a\u6216\u60a8\u81ea\u5df1\u7684\u4e91\u57fa\u7840\u8bbe\u65bd\uff0c\u4f7f\u5bf9 \u6700\u5148\u8fdb\u7684 AI \u6a21\u578b\uff0c\u5e2e\u52a9\u4fc3\u8fdb\u6bcf\u4e2a\u4eba\u7684\u521b\u65b0\u3002<\/p>\n\n\n\n<p>\u4e0b\u9762\u6211\u4eec\u5c06\u5206\u4eab\u4e00\u4e9b\u5173\u4e8e\u5982\u4f55\u5feb\u901f\u5f00\u59cb\u8fd0\u884c\u6a21\u578b\u7684\u4ee3\u7801\u7247\u6bb5\u3002\u9996\u5148\u786e\u4fdd \uff0c\u7136\u540e\u4ece\u4e0e\u60a8\u7684\u7528\u4f8b\u76f8\u5173\u7684\u90e8\u5206\u590d\u5236\u4ee3\u7801\u6bb5\u3002<code>pip install -U transformers<\/code><\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">pip install -U transformers<\/pre><\/div>\n\n\n\n<h2 class=\"wp-block-heading\"><strong>1. \u5728 CPU \u4e0a\u8fd0\u884c\u6a21\u578b<\/strong><\/h2>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">from transformers import AutoTokenizer, AutoModelForCausalLM\n\ntokenizer = AutoTokenizer.from_pretrained(\"google\/gemma-7b\")\nmodel = AutoModelForCausalLM.from_pretrained(\"google\/gemma-7b\")\n\ninput_text = \"Write me a poem about Machine Learning.\"\ninput_ids = tokenizer(input_text, return_tensors=\"pt\")\n\noutputs = model.generate(**input_ids)\nprint(tokenizer.decode(outputs[0]))<\/pre><\/div>\n\n\n\n<h2 class=\"wp-block-heading\"><strong>2. \u5728\u5355\u4e2a\/\u591a\u4e2a GPU \u4e0a\u8fd0\u884c\u6a21\u578b<\/strong><\/h2>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \"># pip install accelerate\nfrom transformers import AutoTokenizer, AutoModelForCausalLM\n\ntokenizer = AutoTokenizer.from_pretrained(\"google\/gemma-7b\")\nmodel = AutoModelForCausalLM.from_pretrained(\"google\/gemma-7b\", device_map=\"auto\")\n\ninput_text = \"Write me a poem about Machine Learning.\"\ninput_ids = tokenizer(input_text, return_tensors=\"pt\").to(\"cuda\")\n\noutputs = model.generate(**input_ids)\nprint(tokenizer.decode(outputs[0]))\n<\/pre><\/div>\n\n\n\n<h2 class=\"wp-block-heading\"><strong>3. \u4f7f\u7528\u4e0d\u540c\u7684\u7cbe\u5ea6\u5728 GPU \u4e0a\u8fd0\u884c\u6a21\u578b<\/strong><\/h2>\n\n\n\n<h3 class=\"wp-block-heading\"><strong>3.1 \u4f7f\u7528&nbsp;<code>torch.float16<\/code><\/strong><\/h3>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \" ># pip install accelerate\nfrom transformers import AutoTokenizer, AutoModelForCausalLM\nimport torch\n\ntokenizer = AutoTokenizer.from_pretrained(\"google\/gemma-7b\")\nmodel = AutoModelForCausalLM.from_pretrained(\"google\/gemma-7b\", device_map=\"auto\", torch_dtype=torch.float16)\n\ninput_text = \"Write me a poem about Machine Learning.\"\ninput_ids = tokenizer(input_text, return_tensors=\"pt\").to(\"cuda\")\n\noutputs = model.generate(**input_ids)\nprint(tokenizer.decode(outputs[0]))\n<\/pre><\/div>\n\n\n\n<h3 class=\"wp-block-heading\"><strong>3.2 \u4f7f\u7528&nbsp;<code>torch.bfloat16<\/code><\/strong><\/h3>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \" ># pip install accelerate\nfrom transformers import AutoTokenizer, AutoModelForCausalLM\nimport torch\n\ntokenizer = AutoTokenizer.from_pretrained(\"google\/gemma-7b\")\nmodel = AutoModelForCausalLM.from_pretrained(\"google\/gemma-7b\", device_map=\"auto\", torch_dtype=torch.bfloat16)\n\ninput_text = \"Write me a poem about Machine Learning.\"\ninput_ids = tokenizer(input_text, return_tensors=\"pt\").to(\"cuda\")\n\noutputs = model.generate(**input_ids)\nprint(tokenizer.decode(outputs[0]))\n<\/pre><\/div>\n\n\n\n<h2 class=\"wp-block-heading\"><strong>4. \u91cf\u5316\u7248\u672c\u901a\u8fc7<code>bitsandbytes<\/code><\/strong><\/h2>\n\n\n\n<h3 class=\"wp-block-heading\"><strong>4.1 \u4f7f\u7528 8 \u4f4d\u7cbe\u5ea6 \uff08int8\uff09<\/strong><\/h3>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \"># pip install bitsandbytes accelerate\nfrom transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig\n\nquantization_config = BitsAndBytesConfig(load_in_8bit=True)\n\ntokenizer = AutoTokenizer.from_pretrained(\"google\/gemma-7b\")\nmodel = AutoModelForCausalLM.from_pretrained(\"google\/gemma-7b\", quantization_config=quantization_config)\n\ninput_text = \"Write me a poem about Machine Learning.\"\ninput_ids = tokenizer(input_text, return_tensors=\"pt\").to(\"cuda\")\n\noutputs = model.generate(**input_ids)\nprint(tokenizer.decode(outputs[0]))\n<\/pre><\/div>\n\n\n\n<h3 class=\"wp-block-heading\"><strong>4.2 \u4f7f\u7528 8 \u4f4d\u7cbe\u5ea6 \uff08int4\uff09<\/strong><\/h3>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \"># pip install bitsandbytes accelerate\nfrom transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig\n\nquantization_config = BitsAndBytesConfig(load_in_4bit=True)\n\ntokenizer = AutoTokenizer.from_pretrained(\"google\/gemma-7b\")\nmodel = AutoModelForCausalLM.from_pretrained(\"google\/gemma-7b\", quantization_config=quantization_config)\n\ninput_text = \"Write me a poem about Machine Learning.\"\ninput_ids = tokenizer(input_text, return_tensors=\"pt\").to(\"cuda\")\n\noutputs = model.generate(**input_ids)\nprint(tokenizer.decode(outputs[0]))\n<\/pre><\/div>\n\n\n\n<h2 class=\"wp-block-heading\"><strong>5. flash-attn <\/strong><\/h2>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">model = AutoModelForCausalLM.from_pretrained(\n    model_id, \n    torch_dtype=torch.float16, \n+   attn_implementation=\"flash_attention_2\"\n).to(0)\n<\/pre><\/div>\n\n\n\n<h2 class=\"wp-block-heading\"><strong>6.\u804a\u5929\u6a21\u677f<\/strong><\/h2>\n\n\n\n<p>\u6307\u4ee4\u4f18\u5316\u6a21\u578b\u4f7f\u7528\u804a\u5929\u6a21\u677f\uff0c\u5fc5\u987b\u9075\u5b88\u8be5\u6a21\u677f\u624d\u80fd\u8fdb\u884c\u5bf9\u8bdd\u4f7f\u7528\u3002\u5e94\u7528\u5b83\u7684\u6700\u7b80\u5355\u65b9\u6cd5\u662f\u4f7f\u7528\u5206\u8bcd\u5668\u7684\u5185\u7f6e\u804a\u5929\u6a21\u677f\uff0c\u5982\u4ee5\u4e0b\u4ee3\u7801\u7247\u6bb5\u6240\u793a\u3002<\/p>\n\n\n\n<p>\u8ba9\u6211\u4eec\u52a0\u8f7d\u6a21\u578b\u5e76\u5c06\u804a\u5929\u6a21\u677f\u5e94\u7528\u4e8e\u5bf9\u8bdd\u3002\u5728\u6b64\u793a\u4f8b\u4e2d\uff0c\u6211\u4eec\u5c06\u4ece\u5355\u4e2a\u7528\u6237\u4ea4\u4e92\u5f00\u59cb\uff1a<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">from transformers import AutoTokenizer, AutoModelForCausalLM\nimport transformers\nimport torch\n\nmodel_id = \"gg-hf\/gemma-7b-it\"\ndtype = torch.bfloat16\n\ntokenizer = AutoTokenizer.from_pretrained(model_id)\nmodel = AutoModelForCausalLM.from_pretrained(\n    model_id,\n    device_map=\"cuda\",\n    torch_dtype=dtype,\n)\n\nchat = [\n    { \"role\": \"user\", \"content\": \"Write a hello world program\" },\n]\nprompt = tokenizer.apply_chat_template(chat, tokenize=False, add_generation_prompt=True)\n<\/pre><\/div>\n\n\n\n<p>\u6b64\u65f6\uff0c\u63d0\u793a\u5305\u542b\u4ee5\u4e0b\u6587\u672c\uff1a<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:sh decode:true \">&lt;bos&gt;&lt;start_of_turn&gt;user\nWrite a hello world program&lt;end_of_turn&gt;\n&lt;start_of_turn&gt;model\n<\/pre><\/div>\n\n\n\n<p>\u5982\u60a8\u6240\u89c1\uff0c\u6bcf\u4e2a\u56de\u5408\u524d\u9762\u90fd\u6709\u4e00\u4e2a&nbsp;<code>&lt;start_of_turn&gt;<\/code>&nbsp;\u5206\u9694\u7b26\uff0c\u7136\u540e\u662f\u5b9e\u4f53\u7684\u89d2\u8272\uff08&nbsp;<code>user<\/code>&nbsp;\u5bf9\u4e8e\u7528\u6237\u63d0\u4f9b\u7684\u5185\u5bb9\uff0c\u6216\u8005&nbsp;<code>model<\/code>&nbsp;\u7528\u4e8eLLM\u54cd\u5e94\uff09\u3002\u56de\u5408\u5b8c\u6210&nbsp;<code>&lt;end_of_turn&gt;<\/code>&nbsp;\u4ee4\u724c\u3002<\/p>\n\n\n\n<p>\u5982\u679c\u9700\u8981\u5728\u6ca1\u6709\u5206\u8bcd\u5668\u7684\u804a\u5929\u6a21\u677f\u7684\u60c5\u51b5\u4e0b\u624b\u52a8\u6784\u5efa\u63d0\u793a\uff0c\u53ef\u4ee5\u6309\u7167\u6b64\u683c\u5f0f\u624b\u52a8\u6784\u5efa\u63d0\u793a\u3002<\/p>\n\n\n\n<p>\u63d0\u793a\u51c6\u5907\u597d\u540e\uff0c\u53ef\u4ee5\u50cf\u8fd9\u6837\u6267\u884c\u751f\u6210\uff1a<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">inputs = tokenizer.encode(prompt, add_special_tokens=False, return_tensors=\"pt\")\noutputs = model.generate(input_ids=inputs.to(model.device), max_new_tokens=150)\nprint(tokenizer.decode(outputs[0]))\n<\/pre><\/div>\n","protected":false},"excerpt":{"rendered":"<p>Gemma \u662f Google \u7684\u4e00\u7cfb\u5217\u8f7b\u91cf\u7ea7\u3001\u6700\u5148\u8fdb\u7684\u5f00\u653e\u5f0f\u6a21\u578b\uff0c \u57fa\u4e8e\u7528\u4e8e\u521b\u5efa\u53cc\u5b50\u5ea7\u6a21\u578b\uff08state-of- [&hellip;]<\/p>\n","protected":false},"author":1,"featured_media":0,"comment_status":"open","ping_status":"open","sticky":false,"template":"","format":"standard","meta":{"site-sidebar-layout":"default","site-content-layout":"","ast-site-content-layout":"default","site-content-style":"default","site-sidebar-style":"default","ast-global-header-display":"","ast-banner-title-visibility":"","ast-main-header-display":"","ast-hfb-above-header-display":"","ast-hfb-below-header-display":"","ast-hfb-mobile-header-display":"","site-post-title":"","ast-breadcrumbs-content":"","ast-featured-img":"","footer-sml-layout":"","theme-transparent-header-meta":"","adv-header-id-meta":"","stick-header-meta":"","header-above-stick-meta":"","header-main-stick-meta":"","header-below-stick-meta":"","astra-migrate-meta-layouts":"set","ast-page-background-enabled":"default","ast-page-background-meta":{"desktop":{"background-color":"var(--ast-global-color-4)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"tablet":{"background-color":"","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"mobile":{"background-color":"","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""}},"ast-content-background-meta":{"desktop":{"background-color":"var(--ast-global-color-5)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"tablet":{"background-color":"var(--ast-global-color-5)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"mobile":{"background-color":"var(--ast-global-color-5)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""}},"_jetpack_memberships_contains_paid_content":false,"footnotes":""},"categories":[313,289,312],"tags":[242,314],"class_list":["post-2544","post","type-post","status-publish","format-standard","hentry","category-chatgpt","category-gpt","category-openai","tag-chatgpt","tag-openai-api"],"views":3981,"jetpack_sharing_enabled":true,"jetpack_featured_media_url":"","_links":{"self":[{"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/posts\/2544","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=%2Fwp%2Fv2%2Fcomments&post=2544"}],"version-history":[{"count":13,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/posts\/2544\/revisions"}],"predecessor-version":[{"id":2559,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/posts\/2544\/revisions\/2559"}],"wp:attachment":[{"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=%2Fwp%2Fv2%2Fmedia&parent=2544"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=%2Fwp%2Fv2%2Fcategories&post=2544"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=%2Fwp%2Fv2%2Ftags&post=2544"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}