{"id":2853,"date":"2024-04-05T23:42:36","date_gmt":"2024-04-05T15:42:36","guid":{"rendered":"https:\/\/www.aqwu.net\/wp\/?p=2853"},"modified":"2024-04-28T19:59:03","modified_gmt":"2024-04-28T11:59:03","slug":"%e4%ba%86%e8%a7%a3-cohereforai-c4ai-command-r-plus","status":"publish","type":"post","link":"https:\/\/www.aqwu.net\/wp\/?p=2853","title":{"rendered":"\u4e86\u89e3 CohereForAI\/c4ai-command-r-plus"},"content":{"rendered":"\n<p>C4AI Command R+ \u662f 104B \u4ebf\u53c2\u6570\u6a21\u578b\u7684\u5f00\u653e\u6743\u91cd\u7814\u7a76\u7248\u672c\uff0c\u5177\u6709\u9ad8\u5ea6\u5148\u8fdb\u7684\u529f\u80fd\uff0c\u5176\u4e2d\u5305\u62ec\u68c0\u7d22\u589e\u5f3a\u751f\u6210 \uff08RAG\uff09 \u548c\u7528\u4e8e\u81ea\u52a8\u6267\u884c\u590d\u6742\u4efb\u52a1\u7684\u5de5\u5177\u3002\u6b64\u6a21\u578b\u751f\u6210\u4e2d\u7684\u5de5\u5177\u4f7f\u7528\u652f\u6301\u591a\u6b65\u9aa4\u5de5\u5177\u4f7f\u7528\uff0c\u8fd9\u5141\u8bb8\u6a21\u578b\u5728\u591a\u4e2a\u6b65\u9aa4\u4e2d\u7ec4\u5408\u591a\u4e2a\u5de5\u5177\u4ee5\u5b8c\u6210\u56f0\u96be\u7684\u4efb\u52a1\u3002C4AI Command R+ \u662f\u4e00\u4e2a\u591a\u8bed\u8a00\u6a21\u578b\uff0c\u4ee5 10 \u79cd\u8bed\u8a00\u8bc4\u4f30\u6027\u80fd\uff1a\u82f1\u8bed\u3001\u6cd5\u8bed\u3001\u897f\u73ed\u7259\u8bed\u3001\u610f\u5927\u5229\u8bed\u3001\u5fb7\u8bed\u3001\u5df4\u897f\u8461\u8404\u7259\u8bed\u3001\u65e5\u8bed\u3001\u97e9\u8bed\u3001\u963f\u62c9\u4f2f\u8bed\u548c\u7b80\u4f53\u4e2d\u6587\u3002Command R+ \u9488\u5bf9\u5404\u79cd\u7528\u4f8b\u8fdb\u884c\u4e86\u4f18\u5316\uff0c\u5305\u62ec\u63a8\u7406\u3001\u6458\u8981\u548c\u95ee\u7b54\u3002<\/p>\n\n\n\n<p>\u6a21\u578b\u5927\u5c0f\uff1a1040\u4ebf\u4e2a\u53c2\u6570<\/p>\n\n\n\n<p>\u4e0a\u4e0b\u6587\u957f\u5ea6\uff1a128K<\/p>\n\n\n\n<p>\u6709\u975e\u91cf\u5316\u7248\u672c\uff1a<a href=\"https:\/\/huggingface.co\/CohereForAI\/c4ai-command-r-plus\">CohereForAI\/c4ai-command-r-plus<\/a><\/p>\n\n\n\n<p>\u548c\u91cf\u5316\u7248\u672c\uff1a<a href=\"https:\/\/huggingface.co\/CohereForAI\/c4ai-command-r-plus-4bit\">c4ai-command-r-plus-4bit<\/a><\/p>\n\n\n\n<p>\u9700\u8981\u66f4\u65b0\u5230\u6700\u65b0\u7684 transformers<\/p>\n\n\n\n<p>pip install &#8216;git+https:\/\/github.com\/huggingface\/transformers.git&#8217; bitsandbytes accelerate<\/p>\n\n\n\n<h2 class=\"wp-block-heading\"><strong>1. c4ai-command-r-plus-4bit<\/strong><\/h2>\n\n\n\n<p>\u4e0b\u9762\u7684\u4ee3\u7801\u662f\u5728wsl2 \u4e0b\uff0c6GB\u7684\u663e\u5361\u6d4b\u8bd5\u7684<\/p>\n\n\n\n<p>\u9700\u8981\u66f4\u65b0\u5230\u6700\u65b0\u7684 transformers\uff0c\u67e5\u770bconfig.json \u5185\u5bb9<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">{\n  \"_name_or_path\": \"CohereForAI\/c4ai-command-r-plus\",\n  \"architectures\": [\n    \"CohereForCausalLM\"\n  ],\n  \"attention_bias\": false,\n  \"attention_dropout\": 0.0,\n  \"bos_token_id\": 5,\n  \"eos_token_id\": 255001,\n  \"hidden_act\": \"silu\",\n  \"hidden_size\": 12288,\n  \"initializer_range\": 0.02,\n  \"intermediate_size\": 33792,\n  \"layer_norm_eps\": 1e-05,\n  \"logit_scale\": 0.8333333333333334,\n  \"max_position_embeddings\": 8192,\n  \"model_max_length\": 131072,\n  \"model_type\": \"cohere\",\n  \"num_attention_heads\": 96,\n  \"num_hidden_layers\": 64,\n  \"num_key_value_heads\": 8,\n  \"pad_token_id\": 0,\n  \"quantization_config\": {\n    \"_load_in_4bit\": true,\n    \"_load_in_8bit\": false,\n    \"bnb_4bit_compute_dtype\": \"float16\",\n    \"bnb_4bit_quant_storage\": \"uint8\",\n    \"bnb_4bit_quant_type\": \"fp4\",\n    \"bnb_4bit_use_double_quant\": false,\n    \"llm_int8_enable_fp32_cpu_offload\": false,\n    \"llm_int8_has_fp16_weight\": false,\n    \"llm_int8_skip_modules\": null,\n    \"llm_int8_threshold\": 6.0,\n    \"load_in_4bit\": true,\n    \"load_in_8bit\": false,\n    \"quant_method\": \"bitsandbytes\"\n  },\n  \"rope_theta\": 75000000.0,\n  \"torch_dtype\": \"float16\",\n  \"transformers_version\": \"4.40.0.dev0\",\n  \"use_cache\": true,\n  \"use_qk_norm\": true,\n  \"vocab_size\": 256000\n}\n<\/pre><\/div>\n\n\n\n<p>\u53ef\u4ee5\u5f97\u77e5\uff1a&#8221;transformers_version&#8221;: &#8220;4.40.0.dev0&#8221;<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">from transformers import AutoModelForCausalLM\n\nmodel_id = \"CohereForAI\/c4ai-command-r-plus-4bit\"\nmodel = AutoModelForCausalLM.from_pretrained(model_id, device_map=\"cpu\")\nprint(model)\n<\/pre><\/div>\n\n\n\n<p>\u8fd0\u884c\u7ed3\u679c\uff1a<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">python test01.py\nUnused kwargs: ['_load_in_4bit', '_load_in_8bit', 'quant_method']. These kwargs are not used in &lt;class 'transformers.utils.quantization_config.BitsAndBytesConfig'&gt;.\nLoading checkpoint shards: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 13\/13 [00:07&lt;00:00,  1.69it\/s]\nCohereForCausalLM(\n  (model): CohereModel(\n    (embed_tokens): Embedding(256000, 12288, padding_idx=0)\n    (layers): ModuleList(\n      (0-63): 64 x CohereDecoderLayer(\n        (self_attn): CohereSdpaAttention(\n          (q_norm): CohereLayerNorm()\n          (k_norm): CohereLayerNorm()\n          (q_proj): Linear4bit(in_features=12288, out_features=12288, bias=False)\n          (k_proj): Linear4bit(in_features=12288, out_features=1024, bias=False)\n          (v_proj): Linear4bit(in_features=12288, out_features=1024, bias=False)\n          (o_proj): Linear4bit(in_features=12288, out_features=12288, bias=False)\n          (rotary_emb): CohereRotaryEmbedding()\n        )\n        (mlp): CohereMLP(\n          (gate_proj): Linear4bit(in_features=12288, out_features=33792, bias=False)\n          (up_proj): Linear4bit(in_features=12288, out_features=33792, bias=False)\n          (down_proj): Linear4bit(in_features=33792, out_features=12288, bias=False)\n          (act_fn): SiLU()\n        )\n        (input_layernorm): CohereLayerNorm()\n      )\n    )\n    (norm): CohereLayerNorm()\n  )\n  (lm_head): Linear(in_features=12288, out_features=256000, bias=False)\n)<\/pre><\/div>\n\n\n\n<h2 class=\"wp-block-heading\"><strong>2. c4ai-command-r-plus<\/strong><\/h2>\n\n\n\n<p>\u67e5\u770b config.json \u6587\u4ef6\uff0c&#8221;transformers_version&#8221;: &#8220;4.40.0.dev0&#8221;<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">{\n  \"architectures\": [\n    \"CohereForCausalLM\"\n  ],\n  \"attention_bias\": false,\n  \"attention_dropout\": 0.0,\n  \"bos_token_id\": 5,\n  \"eos_token_id\": 255001,\n  \"hidden_act\": \"silu\",\n  \"hidden_size\": 12288,\n  \"initializer_range\": 0.02,\n  \"intermediate_size\": 33792,\n  \"layer_norm_eps\": 1e-05,\n  \"logit_scale\": 0.8333333333333334,\n  \"max_position_embeddings\": 8192,\n  \"model_max_length\": 131072,\n  \"model_type\": \"cohere\",\n  \"num_attention_heads\": 96,\n  \"num_hidden_layers\": 64,\n  \"num_key_value_heads\": 8,\n  \"pad_token_id\": 0,\n  \"rope_theta\": 75000000.0,\n  \"torch_dtype\": \"float16\",\n  \"transformers_version\": \"4.40.0.dev0\",\n  \"use_cache\": true,\n  \"use_qk_norm\": true,\n  \"vocab_size\": 256000\n}\n<\/pre><\/div>\n\n\n\n<p>\u4e0b\u9762\u7684\u4ee3\u7801\u548c\u4e0a\u9762\u7684test01.py \u5176\u5b9e\u662f\u76f8\u540c\u7684\uff0c\u4f46\u662f\u6267\u884c\u8fc7\u7a0b\u4e0d\u4e00\u6837<\/p>\n\n\n\n<p>\u91cf\u5316\u7248\u672c\uff1a4bit \u4f1a\u4f7f\u7528 GPU \u5185\u5b58\u548cCPU\u5185\u5b58<\/p>\n\n\n\n<p>\u975e\u91cf\u5316\u7248\u672c\u4e0d\u4f1a\u4f7f\u7528GPU \u5185\u5b58\uff0c\u800c\u53ea\u662f\u4f7f\u7528CPU\u5185\u5b58<\/p>\n\n\n\n<p><\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">from transformers import AutoModelForCausalLM\n\nmodel_id = \"CohereForAI\/c4ai-command-r-plus\"\n\nmodel = AutoModelForCausalLM.from_pretrained(model_id, device_map=\"cpu\")\nprint(model)\n<\/pre><\/div>\n\n\n\n<p>\u8fd0\u884c\u7ed3\u679c\uff1a<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \" > python test02.py\nLoading checkpoint shards: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 44\/44 [39:00&lt;00:00, 53.20s\/it]\nCohereForCausalLM(\n  (model): CohereModel(\n    (embed_tokens): Embedding(256000, 12288, padding_idx=0)\n    (layers): ModuleList(\n      (0-63): 64 x CohereDecoderLayer(\n        (self_attn): CohereSdpaAttention(\n          (q_norm): CohereLayerNorm()\n          (k_norm): CohereLayerNorm()\n          (q_proj): Linear(in_features=12288, out_features=12288, bias=False)\n          (k_proj): Linear(in_features=12288, out_features=1024, bias=False)\n          (v_proj): Linear(in_features=12288, out_features=1024, bias=False)\n          (o_proj): Linear(in_features=12288, out_features=12288, bias=False)\n          (rotary_emb): CohereRotaryEmbedding()\n        )\n        (mlp): CohereMLP(\n          (gate_proj): Linear(in_features=12288, out_features=33792, bias=False)\n          (up_proj): Linear(in_features=12288, out_features=33792, bias=False)\n          (down_proj): Linear(in_features=33792, out_features=12288, bias=False)\n          (act_fn): SiLU()\n        )\n        (input_layernorm): CohereLayerNorm()\n      )\n    )\n    (norm): CohereLayerNorm()\n  )\n  (lm_head): Linear(in_features=12288, out_features=256000, bias=False)\n)<\/pre><\/div>\n","protected":false},"excerpt":{"rendered":"<p>C4AI Command R+ \u662f 104B \u4ebf\u53c2\u6570\u6a21\u578b\u7684\u5f00\u653e\u6743\u91cd\u7814\u7a76\u7248\u672c\uff0c\u5177\u6709\u9ad8\u5ea6\u5148\u8fdb\u7684\u529f\u80fd\uff0c\u5176\u4e2d\u5305\u62ec\u68c0\u7d22\u589e [&hellip;]<\/p>\n","protected":false},"author":1,"featured_media":0,"comment_status":"open","ping_status":"open","sticky":false,"template":"","format":"standard","meta":{"site-sidebar-layout":"default","site-content-layout":"","ast-site-content-layout":"default","site-content-style":"default","site-sidebar-style":"default","ast-global-header-display":"","ast-banner-title-visibility":"","ast-main-header-display":"","ast-hfb-above-header-display":"","ast-hfb-below-header-display":"","ast-hfb-mobile-header-display":"","site-post-title":"","ast-breadcrumbs-content":"","ast-featured-img":"","footer-sml-layout":"","theme-transparent-header-meta":"","adv-header-id-meta":"","stick-header-meta":"","header-above-stick-meta":"","header-main-stick-meta":"","header-below-stick-meta":"","astra-migrate-meta-layouts":"set","ast-page-background-enabled":"default","ast-page-background-meta":{"desktop":{"background-color":"var(--ast-global-color-4)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"tablet":{"background-color":"","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"mobile":{"background-color":"","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""}},"ast-content-background-meta":{"desktop":{"background-color":"var(--ast-global-color-5)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"tablet":{"background-color":"var(--ast-global-color-5)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"mobile":{"background-color":"var(--ast-global-color-5)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""}},"_jetpack_memberships_contains_paid_content":false,"footnotes":""},"categories":[443,442],"tags":[415,414,416,417],"class_list":["post-2853","post","type-post","status-publish","format-standard","hentry","category-llm","category-llms","tag-c4ai-command-r-plus","tag-cohereforai","tag-416","tag-417"],"views":1879,"jetpack_sharing_enabled":true,"jetpack_featured_media_url":"","_links":{"self":[{"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/posts\/2853","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=%2Fwp%2Fv2%2Fcomments&post=2853"}],"version-history":[{"count":17,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/posts\/2853\/revisions"}],"predecessor-version":[{"id":2873,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/posts\/2853\/revisions\/2873"}],"wp:attachment":[{"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=%2Fwp%2Fv2%2Fmedia&parent=2853"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=%2Fwp%2Fv2%2Fcategories&post=2853"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=%2Fwp%2Fv2%2Ftags&post=2853"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}