{"id":4568,"date":"2024-08-23T14:45:51","date_gmt":"2024-08-23T06:45:51","guid":{"rendered":"https:\/\/www.aqwu.net\/wp\/?p=4568"},"modified":"2024-08-23T16:03:04","modified_gmt":"2024-08-23T08:03:04","slug":"ai21-jamba-1-5-%e7%b3%bb%e5%88%97%e6%a8%a1%e5%9e%8b","status":"publish","type":"post","link":"https:\/\/www.aqwu.net\/wp\/?p=4568","title":{"rendered":"AI21 Jamba 1.5 \u7cfb\u5217\u6a21\u578b"},"content":{"rendered":"\n<p>AI21 Jamba 1.5 \u7cfb\u5217\u6a21\u578b\u662f\u6700\u5148\u8fdb\u7684\u6df7\u5408 SSM-Transformer \u6307\u4ee4\uff0c\u9075\u5faa\u57fa\u7840\u6a21\u578b\u3002Jamba \u6a21\u578b\u662f\u5e02\u573a\u4e0a\u6700\u5f3a\u5927\u3001\u6700\u9ad8\u6548\u7684\u957f\u4e0a\u4e0b\u6587\u6a21\u578b\uff0c\u5176\u63a8\u7406\u901f\u5ea6\u6bd4\u540c\u7b49\u5927\u5c0f\u7684\u9886\u5148\u6a21\u578b\u5feb 2.5 \u500d\u3002<\/p>\n\n\n\n<p>\u8fd9\u4e9b\u6a21\u578b\u5c55\u793a\u4e86\u5353\u8d8a\u7684\u957f\u4e0a\u4e0b\u6587\u5904\u7406\u3001\u901f\u5ea6\u548c\u8d28\u91cf\u3002\u5b83\u4eec\u6807\u5fd7\u7740\u975e\u53d8\u538b\u5668\u6a21\u578b\u9996\u6b21\u6210\u529f\u5730\u6269\u5c55\u5230\u5e02\u573a\u9886\u5148\u6a21\u578b\u7684\u8d28\u91cf\u548c\u5f3a\u5ea6\u3002<\/p>\n\n\n\n<p><a href=\"https:\/\/huggingface.co\/ai21labs\/AI21-Jamba-1.5-Mini\">Jamba 1.5 Mini<\/a>\uff0812B \u6d3b\u52a8\/52B \u603b\u8ba1\uff09\u548c&nbsp;<a href=\"https:\/\/huggingface.co\/ai21labs\/AI21-Jamba-1.5-Large\">Jamba 1.5 Large<\/a>\uff0894B \u6d3b\u52a8\/398B \u603b\u8ba1\uff09\u8fd8\u9488\u5bf9\u4e1a\u52a1\u7528\u4f8b\u548c\u529f\u80fd\u8fdb\u884c\u4e86\u4f18\u5316\uff0c\u4f8b\u5982\u51fd\u6570\u8c03\u7528\u3001\u7ed3\u6784\u5316\u8f93\u51fa \uff08JSON\uff09 \u548c\u63a5\u5730\u751f\u6210\u3002<\/p>\n\n\n\n<p>\u8fd9\u4e9b\u6a21\u578b\u662f\u5728&nbsp;<a href=\"https:\/\/www.ai21.com\/licenses\/jamba-open-model-license\">Jamba \u5f00\u653e\u6a21\u578b\u8bb8\u53ef\u8bc1<\/a>\u4e0b\u53d1\u5e03\u7684\uff0c\u8be5\u8bb8\u53ef\u8bc1\u662f\u4e00\u79cd\u5bbd\u677e\u7684\u8bb8\u53ef\u8bc1\uff0c\u5141\u8bb8\u6839\u636e\u8bb8\u53ef\u6761\u6b3e\u8fdb\u884c\u5168\u9762\u7814\u7a76\u4f7f\u7528\u548c\u5546\u4e1a\u7528\u9014\u3002\u5982\u679c\u60a8\u9700\u8981\u6839\u636e\u81ea\u5df1\u7684\u9700\u6c42\u8bb8\u53ef\u8be5\u6a21\u578b\uff0c<a href=\"https:\/\/www.ai21.com\/talk-to-us\">\u8bf7\u4e0e\u6211\u4eec\u8054\u7cfb<\/a>\u3002<\/p>\n\n\n\n<h2 class=\"wp-block-heading\">Model Details&nbsp;&nbsp;\u578b\u53f7\u8be6\u7ec6\u4fe1\u606f<\/h2>\n\n\n\n<ul class=\"wp-block-list\">\n<li><strong>Developed by:<\/strong>&nbsp;<a href=\"https:\/\/www.ai21.com\/\">AI21<\/a><br><strong>\u5f00\u53d1\u8005\uff1a<\/strong>&nbsp;<a href=\"https:\/\/www.ai21.com\/\">AI21<\/a><\/li>\n\n\n\n<li><strong>Model type:<\/strong>&nbsp;Joint Attention and Mamba (Jamba)<br><strong>\u578b\u53f7\u7c7b\u578b\uff1a<\/strong>\u8054\u5408\u5173\u6ce8\u548c\u66fc\u5df4 \uff08Jamba\uff09<\/li>\n\n\n\n<li><strong>License:<\/strong>&nbsp;<a href=\"https:\/\/www.ai21.com\/licenses\/jamba-open-model-license\">Jamba Open Model License<\/a><br><strong>\u8bb8\u53ef\u8bc1\uff1a<\/strong><a href=\"https:\/\/www.ai21.com\/licenses\/jamba-open-model-license\">Jamba Open Model License<\/a><\/li>\n\n\n\n<li><strong>Context length:<\/strong>&nbsp;256K<br><strong>\u4e0a\u4e0b\u6587\u957f\u5ea6\uff1a<\/strong>256K<\/li>\n\n\n\n<li><strong>Knowledge cutoff date:<\/strong>&nbsp;March 5, 2024<br><strong>\u77e5\u8bc6\u622a\u6b62\u65e5\u671f\uff1a<\/strong>3\u6708 5\uff0c 2024<\/li>\n\n\n\n<li><strong>Supported languages:<\/strong>&nbsp;English, Spanish, French, Portuguese, Italian, Dutch, German, Arabic and Hebrew<br><strong>\u652f\u6301\u7684\u8bed\u8a00\uff1a<\/strong>\u82f1\u8bed\u3001\u897f\u73ed\u7259\u8bed\u3001\u6cd5\u8bed\u3001\u8461\u8404\u7259\u8bed\u3001\u610f\u5927\u5229\u8bed\u3001\u8377\u5170\u8bed\u3001\u5fb7\u8bed\u3001\u963f\u62c9\u4f2f\u8bed\u548c\u5e0c\u4f2f\u6765\u8bed<\/li>\n<\/ul>\n\n\n\n<h1 class=\"wp-block-heading\">\u7528\u6cd5<\/h1>\n\n\n\n<h2 class=\"wp-block-heading\"><a href=\"https:\/\/huggingface.co\/ai21labs\/AI21-Jamba-1.5-Mini#prerequisites\"><\/a>\u5148\u51b3\u6761\u4ef6<\/h2>\n\n\n\n<p>\u4e3a\u4e86\u8fd0\u884c\u4f18\u5316\u7684 Mamba \u5b9e\u73b0\uff0c\u60a8\u9996\u5148\u9700\u8981\u5b89\u88c5\u5e76\uff1a<code>mamba-ssm<\/code><code>causal-conv1d<\/code><\/p>\n\n\n\n<p>\u60a8\u8fd8\u5fc5\u987b\u5728 CUDA \u8bbe\u5907\u4e0a\u62e5\u6709\u6a21\u578b\u3002<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:sh decode:true \">pip install -U mamba-ssm \npip install -U causal-conv1d\n<\/pre><\/div>\n\n\n\n<h2 class=\"wp-block-heading\"><a href=\"https:\/\/huggingface.co\/ai21labs\/AI21-Jamba-1.5-Mini#run-the-model-with-vllm\"><\/a>\u4f7f\u7528 vLLM \u8fd0\u884c\u6a21\u578b<\/h2>\n\n\n\n<p>\u4f7f\u7528 Jamba 1.5 Mini \u6267\u884c\u9ad8\u6548\u63a8\u7406\u7684\u63a8\u8350\u65b9\u6cd5\u662f\u4f7f\u7528&nbsp;<a href=\"https:\/\/docs.vllm.ai\/en\/latest\/\">vLLM<\/a>\u3002\u9996\u5148\uff0c\u786e\u4fdd\u5b89\u88c5 vLLM\uff08\u9700\u8981 0.5.4 \u6216\u66f4\u9ad8\u7248\u672c\uff09<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:sh decode:true \">pip install -U vllm\n<\/pre><\/div>\n\n\n\n<p>\u8fd9\u91cc\u5efa\u8bae\u4ece\u6e90\u7801\u6784\u5efa vLLM<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:sh decode:true \">git clone https:\/\/github.com\/vllm-project\/vllm.git\ncd vllm\npip install -e .  # This may take 5-10 minutes.\n<\/pre><\/div>\n\n\n\n<p>\u5728\u4e0b\u9762\u7684\u793a\u4f8b\u4e2d\uff0c<code>number_gpus<\/code>\u5e94\u4e0e\u8981\u90e8\u7f72 Jamba 1.5 Mini \u7684 GPU \u6570\u91cf\u5339\u914d\u3002\u81f3\u5c11\u9700\u8981 2 \u4e2a 80GB GPU\uff0c\u6216\u662f8\u4e2a24G GPU.<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">from vllm import LLM, SamplingParams\nfrom transformers import AutoTokenizer\n\nmodel = \"ai21labs\/AI21-Jamba-1.5-Mini\"\nnumber_gpus = 2\n\nllm = LLM(model=model,\n          max_model_len=200*1024,\n          tensor_parallel_size=number_gpus)\n\ntokenizer = AutoTokenizer.from_pretrained(model)\n\nmessages = [\n   {\"role\": \"system\", \"content\": \"You are an ancient oracle who speaks in cryptic but wise phrases, always hinting at deeper meanings.\"},\n   {\"role\": \"user\", \"content\": \"Hello!\"},\n]\n\nprompts = tokenizer.apply_chat_template(messages, add_generation_prompt=True, tokenize=False)\n\nsampling_params = SamplingParams(temperature=0.4, top_p=0.95, max_tokens=100) \noutputs = llm.generate(prompts, sampling_params)\n\ngenerated_text = outputs[0].outputs[0].text\nprint(generated_text)\n#Output: Seek and you shall find. The path is winding, but the journey is enlightening. What wisdom do you seek from the ancient echoes?\n<\/pre><\/div>\n\n\n\n<p><strong>\u57288\u4e2a RTX 4090(24G)\u7684\u73af\u5883\u4e0b\uff0cmax_model_len \u9700\u8981\u51cf\u5c11\uff0cmax_model_len \u8d8a\u5927\uff0c\u65f6\u95f4\u8d8a\u957f\u3002\u6a21\u578b\u572858k\u7684\u60c5\u51b5\u4e0b\u52a0\u8f7d\u65f6\u95f4\u5927\u6982\u534a\u4e2a\u5c0f\u65f6\u5de6\u53f3\uff0c<\/strong><\/p>\n\n\n\n<p>\u572824G GPU \u5185\u5b58\u4e0b\uff0cmax_model_len \u8fc7\u5927\u5219\u65e0\u6cd5\u6267\u884c\uff0c\u4e0b\u9762\u662f\u4e00\u4e2a\u7b80\u5355\u7684\u5bf9\u8bdd\u4ee3\u7801\uff1a<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">from vllm import LLM, SamplingParams\nfrom transformers import AutoTokenizer\n\nmodel = \"ai21labs\/AI21-Jamba-1.5-Mini\"\nnumber_gpus = 8\n\nllm = LLM(model=model,\n          max_model_len=58*1024, # 200*1024,\n          tensor_parallel_size=number_gpus)\n\ntokenizer = AutoTokenizer.from_pretrained(model)\n\n# \u521d\u59cb\u5316\u7cfb\u7edf\u6d88\u606f\nsystem_message = {\"role\": \"system\", \"content\": \"You are an ancient oracle who speaks in cryptic but wise phrases, always hinting at deeper meanings.\"}\n\nwhile True:\n    # \u83b7\u53d6\u7528\u6237\u8f93\u5165\n    user_input = input(\"You: \")\n    \n    # \u5982\u679c\u8f93\u5165\u4e3a\u7a7a\u6216\u8005\u7528\u6237\u8f93\u5165 'exit' \u5219\u9000\u51fa\u5faa\u73af\n    if not user_input.strip() or user_input.lower() == \"exit\":\n        print(\"Exiting...\")\n        break\n\n    # \u6784\u5efa\u6d88\u606f\u5217\u8868\n    messages = [\n        system_message,\n        {\"role\": \"user\", \"content\": user_input},\n    ]\n\n    # \u751f\u6210 prompts\n    prompts = tokenizer.apply_chat_template(messages, add_generation_prompt=True, tokenize=False)\n\n    # \u8bbe\u7f6e\u751f\u6210\u53c2\u6570\n    sampling_params = SamplingParams(temperature=0.4, top_p=0.95, max_tokens=100) \n\n    # \u751f\u6210\u56de\u7b54\n    outputs = llm.generate(prompts, sampling_params)\n\n    # \u83b7\u53d6\u5e76\u8f93\u51fa\u751f\u6210\u7684\u6587\u672c\n    generated_text = outputs[0].outputs[0].text\n    print(f\"Oracle: {generated_text}\")\n<\/pre><\/div>\n\n\n\n<p>\u4f7f\u7528 2 \u4e2a 80GB A100 GPU \u4e0a\u7684\u9ed8\u8ba4 BF16 \u7cbe\u5ea6\u548c\u9ed8\u8ba4\u7684 vLLM \u914d\u7f6e\uff0c\u60a8\u5c06\u80fd\u591f\u5bf9\u957f\u8fbe 200K \u4ee4\u724c\u7684\u63d0\u793a\u6267\u884c\u63a8\u7406\u3002\u5728\u8d85\u8fc7 2 \u4e2a 80GB GPU \u4e0a\uff0c\u60a8\u53ef\u4ee5\u8f7b\u677e\u9002\u5e94\u5b8c\u6574\u7684 256K \u73af\u5883\u3002<\/p>\n\n\n\n<p>\u6ce8\u610f\uff1avLLM&nbsp;<code>\u7684\u4e3b<\/code>\u5206\u652f\u6709\u4e00\u4e9b\u7279\u5b9a\u4e8e Jamba \u67b6\u6784\u7684\u5185\u5b58\u5229\u7528\u7387\u6539\u8fdb\uff0c\u5141\u8bb8\u5728 2 \u4e2a 80 GPU \u4e0a\u4f7f\u7528\u5b8c\u6574\u7684 256K \u4e0a\u4e0b\u6587\u957f\u5ea6\u3002\u5982\u679c\u60a8\u5e0c\u671b\u4f7f\u7528\u5b83\u4eec\uff0c\u60a8\u53ef\u4ee5\u4ece<a href=\"https:\/\/docs.vllm.ai\/en\/latest\/getting_started\/installation.html#build-from-source\">\u6e90\u4ee3\u7801\u6784\u5efa vLLM<\/a>\u3002<\/p>\n\n\n\n<h3 class=\"wp-block-heading\">ExpertsInt8 \u91cf\u5316<\/h3>\n\n\n\n<p>\u6211\u4eec\u5f00\u53d1\u4e86\u4e00\u79cd\u521b\u65b0\u4e14\u9ad8\u6548\u7684\u91cf\u5316\u6280\u672f ExpertsInt8\uff0c\u4e13\u4e3a vLLM \u4e2d\u90e8\u7f72\u7684 MoE \u6a21\u578b\uff08\u5305\u62ec Jamba \u6a21\u578b\uff09\u800c\u8bbe\u8ba1\u3002\u4f7f\u7528\u5b83\uff0c\u60a8\u5c06\u80fd\u591f\u5728\u5355\u4e2a80 GB GPU \u4e0a\u90e8\u7f72 Jamba 1.5 Mini \u3002<\/p>\n\n\n\n<p>ExpertsInt8 \u5728\u6700\u65b0\u7684 vLLM \u7248\u672c\u4e0a\u5c1a\u4e0d\u53ef\u7528\uff0c\u4f46\u5b83\u5df2\u5408\u5e76\u5230&nbsp;<code>main<\/code>&nbsp;\u5206\u652f\u3002\u8981\u4f7f\u7528\u5b83\uff0c<a href=\"https:\/\/docs.vllm.ai\/en\/latest\/getting_started\/installation.html#build-from-source\">\u8bf7\u4ece\u6e90\u4ee3\u7801\u6784\u5efa vLLM<\/a>\u3002<\/p>\n\n\n\n<p>\u4f7f\u7528\u9ed8\u8ba4\u7684 vLLM \u914d\u7f6e\uff0c\u60a8\u53ef\u4ee5\u5728\u5355\u4e2a 80GB A100 GPU \u4e0a\u5b89\u88c5\u9ad8\u8fbe 100K \u7684\u63d0\u793a\uff1a<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">import os\nos.environ['VLLM_FUSED_MOE_CHUNK_SIZE']='32768'    # This is a workaround a bug in vLLM's fused_moe kernel\n\nfrom vllm import LLM\nllm = LLM(model=\"ai21labs\/AI21-Jamba-1.5-Mini\",\n          max_model_len=100*1024,\n          quantization=\"experts_int8\")\n<\/pre><\/div>\n\n\n\n<h2 class=\"wp-block-heading\">\u4f7f\u7528<code>transformers<\/code>\u8fd0\u884c\u6a21\u578b<\/h2>\n\n\n\n<p>\u4ee5\u4e0b\u793a\u4f8b\u4ee5 BF16 \u7cbe\u5ea6\u5c06 Jamba 1.5 Mini \u52a0\u8f7d\u5230 GPU\uff0c\u4f7f\u7528\u4f18\u5316\u7684&nbsp;<a href=\"https:\/\/github.com\/Dao-AILab\/flash-attention\">FlashAttention2<\/a>&nbsp;\u548c Mamba \u5185\u6838\uff0c\u5e76\u4f7f\u7528&nbsp;<a href=\"https:\/\/huggingface.co\/docs\/accelerate\/index\">Accelerate<\/a>&nbsp;\u5728\u591a\u4e2a GPU \u4e0a\u5e76\u884c\u5316\u6a21\u578b\u3002\u8bf7\u6ce8\u610f\uff0c\u5728\u534a\u7cbe\u5ea6 \uff08FP16\/BF16\uff09 \u4e0b\uff0cJamba 1.5 Mini \u592a\u5927\uff0c\u65e0\u6cd5\u5b89\u88c5\u5728\u5355\u4e2a 80GB GPU \u4e0a\uff0c\u56e0\u6b64\u60a8\u81f3\u5c11\u9700\u8981 2 \u4e2a\u8fd9\u6837\u7684 GPU\u3002<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">import torch\nfrom transformers import AutoModelForCausalLM, AutoTokenizer\n\nmodel = AutoModelForCausalLM.from_pretrained(\"ai21labs\/AI21-Jamba-1.5-Mini\",\n                                             torch_dtype=torch.bfloat16,\n                                             attn_implementation=\"flash_attention_2\",\n                                             device_map=\"auto\")\n\ntokenizer = AutoTokenizer.from_pretrained(\"ai21labs\/AI21-Jamba-1.5-Mini\")\n\nmessages = [\n   {\"role\": \"system\", \"content\": \"You are an ancient oracle who speaks in cryptic but wise phrases, always hinting at deeper meanings.\"},\n   {\"role\": \"user\", \"content\": \"Hello!\"},\n]\n\ninput_ids = tokenizer.apply_chat_template(messages, add_generation_prompt=True, return_tensors='pt').to(model.device)\n\noutputs = model.generate(input_ids, max_new_tokens=216)\n\n# Decode the output\nconversation = tokenizer.decode(outputs[0], skip_special_tokens=True)\n\n# Split the conversation to get only the assistant's response\nassistant_response = conversation.split(messages[-1]['content'])[1].strip()\nprint(assistant_response)\n# Output: Seek and you shall find. The path is winding, but the journey is enlightening. What wisdom do you seek from the ancient echoes?\n<\/pre><\/div>\n\n\n\n<p>\u6ce8\u610f\uff1a<code>transformers <\/code>\u7248\u672c 4.44.0 \u548c 4.44.1 \u5b58\u5728\u4e00\u4e2a bug\uff0c\u8be5 bug \u9650\u5236\u4e86\u8fd0\u884c Jamba \u67b6\u6784\u7684\u80fd\u529b\u3002\u8bf7\u786e\u4fdd\u60a8\u672a\u4f7f\u7528\u8fd9\u4e9b\u7248\u672c\u3002<\/p>\n\n\n\n<p>\u6ce8\u610f\uff1a\u5982\u679c\u60a8\u5728\u4e3a\u4f18\u5316\u7684 Mamba \u5185\u6838\u5b89\u88c5&nbsp;<code>mamba-ssm<\/code>&nbsp;\u548c&nbsp;<code>causal-conv1d<\/code>&nbsp;\u65f6\u9047\u5230\u95ee\u9898\uff0c\u60a8\u53ef\u4ee5\u5728\u6ca1\u6709\u5b83\u4eec\u7684\u60c5\u51b5\u4e0b\u8fd0\u884c Jamba 1.5 Mini\uff0c\u4f46\u4ee3\u4ef7\u662f\u989d\u5916\u7684\u5ef6\u8fdf\u3002\u4e3a\u6b64\uff0c\u8bf7\u5728\u901a\u8fc7&nbsp;<code>AutoModelForCausalLM.from_pretained\uff08\uff09<\/code>&nbsp;\u52a0\u8f7d\u6a21\u578b\u65f6\u6dfb\u52a0 kwarg&nbsp;<code>use_mamba_kernels=False<\/code>\u3002<\/p>\n\n\n\n<h3 class=\"wp-block-heading\"><strong>\u4ee5 8 \u4f4d\u52a0\u8f7d\u6a21\u578b<\/strong><\/h3>\n\n\n\n<p><strong>\u4f7f\u7528 8 \u4f4d\u7cbe\u5ea6\uff0c\u53ef\u4ee5\u5728\u5355\u4e2a 80GB GPU \u4e0a\u9002\u5e94\u9ad8\u8fbe 140K \u7684\u5e8f\u5217\u957f\u5ea6\u3002<\/strong>\u60a8\u53ef\u4ee5\u4f7f\u7528&nbsp;<a href=\"https:\/\/huggingface.co\/docs\/bitsandbytes\/index\">bitsandbytes<\/a>&nbsp;\u8f7b\u677e\u5730\u5c06\u6a21\u578b\u91cf\u5316\u4e3a 8 \u4f4d\u3002\u4e3a\u4e86\u4e0d\u964d\u4f4e\u6a21\u578b\u8d28\u91cf\uff0c\u6211\u4eec\u5efa\u8bae\u4ece\u91cf\u5316\u4e2d\u6392\u9664 Mamba \u5757\uff1a<\/p>\n\n\n\n<p><strong>\u4f7f\u7528 8 \u4f4d\u7cbe\u5ea6\uff0c\u53ef\u4ee5\u5728\u5355\u4e2a 80GB GPU \u4e0a\u9002\u5e94\u9ad8\u8fbe 140K \u7684\u5e8f\u5217\u957f\u5ea6\u3002<\/strong>\u60a8\u53ef\u4ee5\u4f7f\u7528&nbsp;<a href=\"https:\/\/huggingface.co\/docs\/bitsandbytes\/index\">bitsandbytes<\/a>&nbsp;\u8f7b\u677e\u5730\u5c06\u6a21\u578b\u91cf\u5316\u4e3a 8 \u4f4d\u3002\u4e3a\u4e86\u4e0d\u964d\u4f4e\u6a21\u578b\u8d28\u91cf\uff0c\u6211\u4eec\u5efa\u8bae\u4ece\u91cf\u5316\u4e2d\u6392\u9664 Mamba \u5757\uff1a<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">from transformers import AutoModelForCausalLM, BitsAndBytesConfig\nquantization_config = BitsAndBytesConfig(load_in_8bit=True,\n                                         llm_int8_skip_modules=[\"mamba\"])\nmodel = AutoModelForCausalLM.from_pretrained(\"ai21labs\/AI21-Jamba-1.5-Mini\",\n                                             torch_dtype=torch.bfloat16,\n                                             attn_implementation=\"flash_attention_2\",\n                                             quantization_config=quantization_config)\n<\/pre><\/div>\n\n\n\n<h3 class=\"wp-block-heading\"><strong>\u5728 CPU \u4e0a\u52a0\u8f7d\u6a21\u578b<\/strong><\/h3>\n\n\n\n<p>\u5982\u679c\u60a8\u65e0\u6cd5\u8bbf\u95ee GPU\uff0c\u60a8\u8fd8\u53ef\u4ee5\u5728 CPU \u4e0a\u52a0\u8f7d\u548c\u8fd0\u884c Jamba 1.5 Mini\u3002\u8bf7\u6ce8\u610f\uff0c\u8fd9\u5c06\u5bfc\u81f4\u63a8\u7406\u6027\u80fd\u4e0d\u4f73\u3002<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">from transformers import AutoModelForCausalLM\nmodel = AutoModelForCausalLM.from_pretrained(\"ai21labs\/AI21-Jamba-1.5-Mini\",\n                                             use_mamba_kernels=False)\n<\/pre><\/div>\n\n\n\n<h2 class=\"wp-block-heading\">\u6a21\u578b\u7279\u6027<\/h2>\n\n\n\n<h3 class=\"wp-block-heading\">\u4e0e Jamba \u4e00\u8d77\u4f7f\u7528\u7684\u5de5\u5177<\/h3>\n\n\n\n<p>Jamba 1.5 \u6839\u636e Huggingface \u7684\u5de5\u5177\u4f7f\u7528 API\uff0c\u652f\u6301\u5de5\u5177\u4f7f\u7528\u80fd\u529b\u3002\u7528\u6237\u5b9a\u4e49\u7684\u5de5\u5177\u5165\u5230\u804a\u5929\u6a21\u677f\u7684\u4e13\u7528\u90e8\u5206\u4e2d\uff0c\u6a21\u578b\u7ecf\u8fc7\u8bad\u7ec3\u53ef\u4ee5\u8bc6\u522b\u8be5\u90e8\u5206\u3002<\/p>\n\n\n\n<p>\u7ed9\u5b9a\u5305\u542b\u5de5\u5177\u7684\u5bf9\u8bdd\uff0c\u6a21\u578b\u53ef\u4ee5\u8f93\u51fa\u5185\u5bb9\u548c\/\u6216\u5de5\u5177\u8c03\u7528\u3002\u5de5\u5177\u8c03\u7528\u5728\u52a9\u624b\u6d88\u606f\u4e2d\u88ab\u683c\u5f0f\u5316\u4e3a json \u683c\u5f0f\u7684\u8bcd\u5178\u5217\u8868\uff0c\u5305\u88c5\u5728\u4e13\u7528\u7684\u7279\u6b8a\u4ee4\u724c\u4e2d\uff0c\u5982\u4ee5\u4e0b\u793a\u4f8b\u6240\u793a\u3002<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">from transformers import AutoTokenizer\n\ntokenizer = AutoTokenizer.from_pretrained(\"ai21labs\/AI21-Jamba-1.5-Mini\")\n\nmessages = [\n    {\n        \"role\": \"user\", \n        \"content\": \"What's the weather like right now in Jerusalem and in London?\"\n    }\n]\n\ntools = [\n    {\n        'type': 'function', \n        'function': {\n            'name': 'get_current_weather', \n            'description': 'Get the current weather', \n            'parameters': {\n                'type': 'object', \n                'properties': {\n                    'location': {'type': 'string', 'description': 'The city and state, e.g. San Francisco, CA'}, \n                    'format': {'type': 'string', 'enum': ['celsius', 'fahrenheit'], 'description': 'The temperature unit to use. Infer this from the users location.'}\n                }, \n                'required': ['location', 'format']\n            }\n        }\n    }\n]\n\nprompt = tokenizer.apply_chat_template(\n    messages,\n    tools=tools,\n    tokenize=False,\n)\n<\/pre><\/div>\n\n\n\n<p>&nbsp;<br>\u8f93\u51fa\uff1a<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">&lt;tool_calls&gt;[\n    {\"name\": \"get_current_weather\", \"arguments\": {\"location\": \"Jerusalem\", \"format\": \"celsius\"}},\n    {\"name\": \"get_current_weather\", \"arguments\": {\"location\": \"celsius\", \"format\": \"celsius\"}}\n]&lt;\/tool_calls&gt;\n<\/pre><\/div>\n\n\n\n<h3 class=\"wp-block-heading\"><strong>\u5c06\u5de5\u5177\u54cd\u5e94\u53cd\u9988\u5230\u6a21\u578b\u4e2d<\/strong><\/h3>\n\n\n\n<p>\u73b0\u5728\u6a21\u578b\u8c03\u7528\u4e86\u5de5\u5177\uff0c\u6211\u4eec\u9700\u8981\u5c06\u5de5\u5177\u54cd\u5e94\u53cd\u9988\u7ed9\u6a21\u578b\u3002\u60a8\u53ef\u4ee5\u4ece\u6a21\u578b\u7684\u54cd\u5e94\u4e2d\u89e3\u6790\u5de5\u5177\u8c03\u7528\uff0c\u5e76\u5728 assistant \u6d88\u606f\u4e2d\u4f20\u64ad&nbsp;<code>tool_calls<\/code>&nbsp;\u5b57\u6bb5\uff0c\u6216\u8005\u53ea\u662f\u5c06\u683c\u5f0f\u5316\u7684\u54cd\u5e94\u4fdd\u7559\u5728&nbsp;<code>content<\/code>&nbsp;\u5b57\u6bb5\u4e2d\u3002\u8bf7\u6ce8\u610f\uff0c\u54cd\u5e94\u7684\u987a\u5e8f\u5e94\u4e0e\u76f8\u5e94 Assistant \u6d88\u606f\u4e2d\u5de5\u5177\u8c03\u7528\u7684\u987a\u5e8f\u4e00\u81f4\u3002\u8fd9\u90e8\u5206\u4ecb\u7ecd\u5982\u4f55\u64cd\u4f5c\u3002<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">from transformers import AutoTokenizer\n\ntokenizer = AutoTokenizer.from_pretrained(\"ai21labs\/AI21-Jamba-1.5-Mini\")\n\n# Note that you must send the tool responses in the same order as the model called the tools:\nmessages = [\n    {\n        \"role\": \"user\",\n        \"content\": \"What's the weather like right now in Jerusalem and in London?\"\n    },\n    {\n        \"role\": \"assistant\",\n        \"content\": null,\n        \"tool_calls\": [\n            {\n                \"name\": \"get_current_weather\",\n                \"arguments\": \"{\\\"location\\\": \\\"Jerusalem\\\", \\\"format\\\": \\\"celsius\\\"}\"\n            },\n            {\n                \"name\": \"get_current_weather\",\n                \"arguments\": \"{\\\"location\\\": \\\"London\\\", \\\"format\\\": \\\"celsium\\\"}\"\n            }\n        ]\n    },\n    {\n        \"role\": \"tool\",\n        \"content\": \"The weather in Jerusalem is 18 degrees celsius.\"\n    },\n    {\n        \"role\": \"tool\",\n        \"content\": \"The weather in London is 8 degrees celsius.\"\n    }\n]\n\ntool_use_prompt = tokenizer.apply_chat_template(\n    messages,\n    tools=tools,\n    tokenize=False,\n)\n<\/pre><\/div>\n\n\n\n<p>\u8f93\u51fa\u793a\u4f8b\uff1a<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">The weather in Jerusalem is currently 18 degrees Celsius. In London, it is 8 degrees Celsius.\n<\/pre><\/div>\n\n\n\n<h3 class=\"wp-block-heading\">\u4e0eJamba\u63a5\u5730\u7684\u4e00\u4ee3<\/h3>\n\n\n\n<p>\u4e00\u4e2a\u5e38\u89c1\u7684\u7528\u4f8bLLMs\u662f\u63a5\u5730\u751f\u6210\u548c RAG\uff0c\u5176\u4e2d\u6a21\u578b\u9700\u8981\u6839\u636e\u7ed9\u5b9a\u7684\u6587\u6863\u96c6\u6216\u6587\u6863\u7247\u6bb5\u56de\u7b54\u95ee\u9898\u6216\u9075\u5faa\u8bf4\u660e\u3002\u4e3a\u4e86\u6807\u51c6\u5316\u8fd9\u4e00\u8fc7\u7a0b\uff0cJamba \u5728\u5176\u804a\u5929\u6a21\u677f\u4e2d\u63a5\u53d7\u4e86\u7279\u5b9a\u7684 \u201cdocuments\u201d \u90e8\u5206\u7684\u8bad\u7ec3\u3002\u8be5\u6a21\u578b\u7ecf\u8fc7\u8bad\u7ec3\u6765\u5904\u7406\u6b64\u90e8\u5206\uff0c\u5f53\u4ee5\u8fd9\u79cd\u65b9\u5f0f\u683c\u5f0f\u5316\u4efb\u52a1\u65f6\uff0c\u63a5\u5730\u751f\u6210\u4efb\u52a1\u663e\u793a\u51fa\u66f4\u597d\u7684\u6027\u80fd\u3002<\/p>\n\n\n\n<p>\u4e0e\u5de5\u5177\u7c7b\u4f3c\uff0c\u9664\u4e86\u5bf9\u8bdd\u4e4b\u5916\uff0c\u5de5\u5177\u8fd8\u4f5c\u4e3a\u6a21\u578b\u7684\u5916\u90e8\u53c2\u6570\u63d0\u4f9b\uff0c\u6587\u6863\u4e5f\u4ee5\u7c7b\u4f3c\u7684\u65b9\u5f0f\u63d0\u4f9b\u3002\u4e3a\u4e86\u652f\u6301\u6587\u6863\u7ea7\u5143\u6570\u636e\uff0c\u6587\u6863\u88ab\u5b9a\u4e49\u4e3a\u5177\u6709\u60a8\u9009\u62e9\u7684\u952e\u503c\u7684\u5b57\u5178\u3002\u8fd9\u4e9b\u5728 chat \u6a21\u677f\u4e2d\u8fdb\u884c\u4e86\u683c\u5f0f\u5316\u3002\u83b7\u5f97\u7279\u6b8a\u5904\u7406\u7684\u4e24\u4e2a\u952e\u662f \u201ctitle\u201d \u548c \u201ctext\u201d \uff0c\u524d\u8005\u7684\u683c\u5f0f\u663e\u793a\u5728\u6587\u6863\u9876\u90e8\uff0c\u540e\u8005\u662f\u5fc5\u586b\u5b57\u6bb5\uff0c\u7528\u4e8e\u5b9a\u4e49\u6587\u6863\u7684\u5b9e\u9645\u6587\u672c\u3002<\/p>\n\n\n\n<h3 class=\"wp-block-heading\"><strong data-immersive-translate-walked=\"59178417-c37e-40b5-8559-9a888acf8f9d\" data-immersive-translate-paragraph=\"1\" style=\"box-sizing: border-box; border-width: 0px; border-style: solid; border-color: rgb(229, 231, 235); --tw-border-spacing-x: 0; --tw-border-spacing-y: 0; --tw-translate-x: 0; --tw-translate-y: 0; --tw-rotate: 0; --tw-skew-x: 0; --tw-skew-y: 0; --tw-scale-x: 1; --tw-scale-y: 1; --tw-pan-x: ; --tw-pan-y: ; --tw-pinch-zoom: ; --tw-scroll-snap-strictness: proximity; --tw-gradient-from-position: ; --tw-gradient-via-position: ; --tw-gradient-to-position: ; --tw-ordinal: ; --tw-slashed-zero: ; --tw-numeric-figure: ; --tw-numeric-spacing: ; --tw-numeric-fraction: ; --tw-ring-inset: ; --tw-ring-offset-width: 0px; --tw-ring-offset-color: #fff; --tw-ring-color: rgb(59 130 246 \/ .5); --tw-ring-offset-shadow: 0 0 #0000; --tw-ring-shadow: 0 0 #0000; --tw-shadow: 0 0 #0000; --tw-shadow-colored: 0 0 #0000; --tw-blur: ; --tw-brightness: ; --tw-contrast: ; --tw-grayscale: ; --tw-hue-rotate: ; --tw-invert: ; --tw-saturate: ; --tw-sepia: ; --tw-drop-shadow: ; --tw-backdrop-blur: ; --tw-backdrop-brightness: ; --tw-backdrop-contrast: ; --tw-backdrop-grayscale: ; --tw-backdrop-hue-rotate: ; --tw-backdrop-invert: ; --tw-backdrop-opacity: ; --tw-backdrop-saturate: ; --tw-backdrop-sepia: ; --tw-contain-size: ; --tw-contain-layout: ; --tw-contain-paint: ; --tw-contain-style: ; font-weight: 600; color: rgb(31 41 55 \/ var(--tw-text-opacity)); --tw-text-opacity: 1; font-family: &quot;Source Sans Pro&quot;, ui-sans-serif, system-ui, sans-serif, &quot;Apple Color Emoji&quot;, &quot;Segoe UI Emoji&quot;, &quot;Segoe UI Symbol&quot;, &quot;Noto Color Emoji&quot;; font-size: 16.8px; font-style: normal; font-variant-ligatures: normal; font-variant-caps: normal; letter-spacing: normal; orphans: 2; text-align: start; text-indent: 0px; text-transform: none; widows: 2; word-spacing: 0px; -webkit-text-stroke-width: 0px; white-space: normal; background-color: rgb(255, 255, 255); text-decoration-thickness: initial; text-decoration-style: initial; text-decoration-color: initial;\"><font class=\"notranslate immersive-translate-target-wrapper\" data-immersive-translate-translation-element-mark=\"1\" lang=\"zh-CN\" style=\"box-sizing: border-box; border-width: 0px; border-style: solid; border-color: rgb(229, 231, 235); --tw-border-spacing-x: 0; --tw-border-spacing-y: 0; --tw-translate-x: 0; --tw-translate-y: 0; --tw-rotate: 0; --tw-skew-x: 0; --tw-skew-y: 0; --tw-scale-x: 1; --tw-scale-y: 1; --tw-pan-x: ; --tw-pan-y: ; --tw-pinch-zoom: ; --tw-scroll-snap-strictness: proximity; --tw-gradient-from-position: ; --tw-gradient-via-position: ; --tw-gradient-to-position: ; --tw-ordinal: ; --tw-slashed-zero: ; --tw-numeric-figure: ; --tw-numeric-spacing: ; --tw-numeric-fraction: ; --tw-ring-inset: ; --tw-ring-offset-width: 0px; --tw-ring-offset-color: #fff; --tw-ring-color: rgb(59 130 246 \/ .5); --tw-ring-offset-shadow: 0 0 #0000; --tw-ring-shadow: 0 0 #0000; --tw-shadow: 0 0 #0000; --tw-shadow-colored: 0 0 #0000; --tw-blur: ; --tw-brightness: ; --tw-contrast: ; --tw-grayscale: ; --tw-hue-rotate: ; --tw-invert: ; --tw-saturate: ; --tw-sepia: ; --tw-drop-shadow: ; --tw-backdrop-blur: ; --tw-backdrop-brightness: ; --tw-backdrop-contrast: ; --tw-backdrop-grayscale: ; --tw-backdrop-hue-rotate: ; --tw-backdrop-invert: ; --tw-backdrop-opacity: ; --tw-backdrop-saturate: ; --tw-backdrop-sepia: ; --tw-contain-size: ; --tw-contain-layout: ; --tw-contain-paint: ; --tw-contain-style: ; word-break: break-word;\"><font class=\"notranslate immersive-translate-target-translation-theme-none immersive-translate-target-translation-block-wrapper-theme-none immersive-translate-target-translation-block-wrapper\" data-immersive-translate-translation-element-mark=\"1\" style=\"box-sizing: border-box; border-width: 0px; border-style: solid; border-color: rgb(229, 231, 235); --tw-border-spacing-x: 0; --tw-border-spacing-y: 0; --tw-translate-x: 0; --tw-translate-y: 0; --tw-rotate: 0; --tw-skew-x: 0; --tw-skew-y: 0; --tw-scale-x: 1; --tw-scale-y: 1; --tw-pan-x: ; --tw-pan-y: ; --tw-pinch-zoom: ; --tw-scroll-snap-strictness: proximity; --tw-gradient-from-position: ; --tw-gradient-via-position: ; --tw-gradient-to-position: ; --tw-ordinal: ; --tw-slashed-zero: ; --tw-numeric-figure: ; --tw-numeric-spacing: ; --tw-numeric-fraction: ; --tw-ring-inset: ; --tw-ring-offset-width: 0px; --tw-ring-offset-color: #fff; --tw-ring-color: rgb(59 130 246 \/ .5); --tw-ring-offset-shadow: 0 0 #0000; --tw-ring-shadow: 0 0 #0000; --tw-shadow: 0 0 #0000; --tw-shadow-colored: 0 0 #0000; --tw-blur: ; --tw-brightness: ; --tw-contrast: ; --tw-grayscale: ; --tw-hue-rotate: ; --tw-invert: ; --tw-saturate: ; --tw-sepia: ; --tw-drop-shadow: ; --tw-backdrop-blur: ; --tw-backdrop-brightness: ; --tw-backdrop-contrast: ; --tw-backdrop-grayscale: ; --tw-backdrop-hue-rotate: ; --tw-backdrop-invert: ; --tw-backdrop-opacity: ; --tw-backdrop-saturate: ; --tw-backdrop-sepia: ; --tw-contain-size: ; --tw-contain-layout: ; --tw-contain-paint: ; --tw-contain-style: ; display: inline-block; margin: 8px 0px !important;\"><font class=\"notranslate immersive-translate-target-inner immersive-translate-target-translation-theme-none-inner\" data-immersive-translate-translation-element-mark=\"1\" style=\"box-sizing: border-box; border-width: 0px; border-style: solid; border-color: rgb(229, 231, 235); --tw-border-spacing-x: 0; --tw-border-spacing-y: 0; --tw-translate-x: 0; --tw-translate-y: 0; --tw-rotate: 0; --tw-skew-x: 0; --tw-skew-y: 0; --tw-scale-x: 1; --tw-scale-y: 1; --tw-pan-x: ; --tw-pan-y: ; --tw-pinch-zoom: ; --tw-scroll-snap-strictness: proximity; --tw-gradient-from-position: ; --tw-gradient-via-position: ; --tw-gradient-to-position: ; --tw-ordinal: ; --tw-slashed-zero: ; --tw-numeric-figure: ; --tw-numeric-spacing: ; --tw-numeric-fraction: ; --tw-ring-inset: ; --tw-ring-offset-width: 0px; --tw-ring-offset-color: #fff; --tw-ring-color: rgb(59 130 246 \/ .5); --tw-ring-offset-shadow: 0 0 #0000; --tw-ring-shadow: 0 0 #0000; --tw-shadow: 0 0 #0000; --tw-shadow-colored: 0 0 #0000; --tw-blur: ; --tw-brightness: ; --tw-contrast: ; --tw-grayscale: ; --tw-hue-rotate: ; --tw-invert: ; --tw-saturate: ; --tw-sepia: ; --tw-drop-shadow: ; --tw-backdrop-blur: ; --tw-backdrop-brightness: ; --tw-backdrop-contrast: ; --tw-backdrop-grayscale: ; --tw-backdrop-hue-rotate: ; --tw-backdrop-invert: ; --tw-backdrop-opacity: ; --tw-backdrop-saturate: ; --tw-backdrop-sepia: ; --tw-contain-size: ; --tw-contain-layout: ; --tw-contain-paint: ; --tw-contain-style: ; font-family: inherit;\">\u5c06\u6587\u6863\u9644\u52a0\u5230 Jamba 1.5 \u63d0\u793a\u7b26<\/font><\/font><\/font><\/strong><\/h3>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">from transformers import AutoTokenizer\n\ntokenizer = AutoTokenizer.from_pretrained(\"ai21labs\/AI21-Jamba-1.5-Mini\")\n\nmessages = [\n        {\n            \"role\": \"user\",\n            \"content\": \"Who wrote Harry Potter?\"\n        }\n]\n\ndocuments = [\n        {\n            \"text\": \"Harry Potter is a series of seven fantasy novels written by British author J. K. Rowling.\",\n            \"title\": \"Harry Potter\"\n        },\n        {\n            \"text\": \"The Great Gatsby is a novel by American writer F. Scott Fitzgerald.\",\n            \"title\": \"The Great Gatsby\",\n            \"country\": \"United States\",\n            \"genre\": \"Novel\"\n\n        }\n]\n\nprompt = tokenizer.apply_chat_template(\n    messages,\n    documents=documents,\n    tokenize=False,\n)\n\n# Output: J. K. Rowling\n<\/pre><\/div>\n\n\n\n<h3 class=\"wp-block-heading\">JSON \u6a21\u5f0f<\/h3>\n\n\n\n<p>Jamba 1.5 \u4f7f\u7528\u7279\u5b9a\u7684 \u201c\u65cb\u94ae\u201d \u8fdb\u884c\u8bad\u7ec3\uff0c\u8fd9\u6709\u52a9\u4e8e\u5f15\u5bfc\u6a21\u578b\u5b9e\u73b0\u5e38\u89c1\u7684\u8bf7\u6c42\u884c\u4e3a\u3002\u901a\u8fc7\u5728\u7cfb\u7edf\u6d88\u606f\u4e2d\u5305\u542b\u7279\u5b9a\u7684\u9884\u5b9a\u4e49\u6587\u672c\u6765\u542f\u7528\u6bcf\u79cd\u884c\u4e3a\u3002\u4e3a\u4e86\u4fbf\u4e8e\u4f7f\u7528\uff0c\u6211\u4eec\u5df2\u5c06\u5b83\u4eec\u4f5c\u4e3a\u6807\u5fd7\u5305\u542b\u5728 Jamba 1.5 \u7684\u804a\u5929\u6a21\u677f\u4e2d\uff0c\u56e0\u6b64\u53ef\u4ee5\u901a\u8fc7\u5411\u804a\u5929\u6a21\u677f\u4f20\u9012\u9002\u5f53\u7684\u53c2\u6570\u6765\u5207\u6362\u5b83\u4eec\u3002<\/p>\n\n\n\n<p>Jamba 1.5 \u7ecf\u8fc7\u8bad\u7ec3\uff0c\u53ef\u4ee5\u5728\u8bf7\u6c42\u65f6\u751f\u6210\u6709\u6548\u7684 JSON\u3002\u5b83\u81ea\u7136\u800c\u7136\u5730\u8fd9\u6837\u505a\uff0c\u4f46\u662f\u5f53 JSON \u6a21\u5f0f\u65cb\u94ae\u88ab\u6fc0\u6d3b\u65f6\uff0c\u6709\u6548 json \u7684\u53ef\u80fd\u6027\u4f1a\u5927\u5927\u589e\u52a0\u3002\u5728 JSON \u6a21\u5f0f\u4e0b\uff0cJamba 1.5 \u5c06\u5c1d\u8bd5\u8f93\u51fa\u6709\u6548\u7684 JSON\uff0c\u800c\u4e0d\u7ba1\u7528\u6237\u8bf7\u6c42\u5982\u4f55\u3002\u4f46\u662f\uff0c\u5f3a\u70c8\u5efa\u8bae\u5728\u7528\u6237\u8bf7\u6c42\u6216\u7cfb\u7edf\u6d88\u606f\u4e2d\u6307\u5b9a\u6709\u5173\u9884\u671f json \u67b6\u6784\u7684\u4fe1\u606f\uff0c\u4ee5\u83b7\u5f97\u6700\u4f73\u7ed3\u679c\uff0c\u5982\u4ee5\u4e0b\u793a\u4f8b\u6240\u793a\u3002<\/p>\n\n\n\n<p><strong>Jamba 1.5 \u4e2d JSON \u65cb\u94ae\u7684\u4f7f\u7528<\/strong><\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">from transformers import AutoTokenizer\n\ntokenizer = AutoTokenizer.from_pretrained(\"ai21labs\/AI21-Jamba-1.5-Mini\")\nmessages = [\n    {'role':'user', \n     'content':'Describe the first American president. Include year of birth (number) and name (string).'}\n    ]\nprompt = tokenizer.apply_chat_template(messages,\n                                       tokenize=False,\n                                       add_generation_prompt=False,\n                                       knobs={\"response_format\": \"json_object\", \"is_set\": True})\n\n#Output: \"{ \"year of birth\": 1732, \"name\": \"George Washington.\" }\"\n<\/pre><\/div>\n\n\n\n<h2 class=\"wp-block-heading\">\u5fae\u8c03\u793a\u4f8b<\/h2>\n\n\n\n<p>\u4ee5\u4e0b\u793a\u4f8b\u4f7f\u7528&nbsp;<a href=\"https:\/\/github.com\/huggingface\/trl\">huggingface\/trl<\/a>&nbsp;\u4e2d\u7684&nbsp;<code>SFTTrainer<\/code>\uff0c\u56e0\u6b64\u8bf7\u786e\u4fdd\u5df2\u5b89\u88c5\u5b83\uff1a<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">pip install trl\n<\/pre><\/div>\n\n\n\n<p>\u4ee5\u4e0b\u662f\u5728 bfloat16 \u4e2d\u4f7f\u7528 LoRA PEFT \u8fdb\u884c\u5fae\u8c03\u7684\u793a\u4f8b\uff08\u9700\u8981 ~130GB GPU RAM\uff0c\u4f8b\u5982 2xA100 80GB\uff09\uff1a<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">import torch\nfrom transformers import AutoTokenizer, AutoModelForCausalLM\nfrom datasets import load_dataset\nfrom trl import SFTTrainer, SFTConfig\nfrom peft import LoraConfig\n\ntokenizer = AutoTokenizer.from_pretrained(\"ai21labs\/AI21-Jamba-1.5-Mini\")\nmodel = AutoModelForCausalLM.from_pretrained(\n    \"ai21labs\/AI21-Jamba-1.5-Mini\",\n    device_map=\"auto\",\n    torch_dtype=torch.bfloat16,\n    attn_implementation=\"flash_attention_2\",\n)\n\nlora_config = LoraConfig(\n    r=8,\n    target_modules=[\n        \"embed_tokens\",\n        \"x_proj\", \"in_proj\", \"out_proj\", # mamba\n        \"gate_proj\", \"up_proj\", \"down_proj\", # mlp\n        \"q_proj\", \"k_proj\", \"v_proj\", \"o_proj\", # attention\n    ],\n    task_type=\"CAUSAL_LM\",\n    bias=\"none\",\n)\n\ndataset = load_dataset(\"philschmid\/dolly-15k-oai-style\", split=\"train\")\ntraining_args = SFTConfig(\n    output_dir=\"\/dev\/shm\/results\",\n    logging_dir=\".\/logs\",\n    num_train_epochs=2,\n    per_device_train_batch_size=4,\n    learning_rate=1e-5,\n    logging_steps=10,\n    gradient_checkpointing=True,\n    max_seq_length=4096,\n    save_steps=100,\n)\ntrainer = SFTTrainer(\n    model=model,\n    tokenizer=tokenizer,\n    args=training_args,\n    peft_config=lora_config,\n    train_dataset=dataset,\n)\ntrainer.train()\n<\/pre><\/div>\n\n\n\n<p>\u8bf7\u6ce8\u610f\uff0c\u793a\u4f8b\u4e2d\u7684\u6570\u636e\u96c6\u4f7f\u7528\u5bf9\u8bdd\u683c\u5f0f\uff08\u5e26\u6709<code>\u6d88\u606f<\/code>\u5217\uff09\uff0c\u56e0\u6b64&nbsp;<code>SFTTrainer<\/code>&nbsp;\u4f1a\u81ea\u52a8\u5e94\u7528 Jamba \u7684\u804a\u5929\u6a21\u677f\uff0c\u5982&nbsp;<a href=\"https:\/\/huggingface.co\/docs\/trl\/main\/en\/sft_trainer#dataset-format-support\">TRL \u6587\u6863<\/a>\u4e2d\u6240\u8ff0\u3002<\/p>\n\n\n\n<h3 class=\"wp-block-heading\">QLoRA \u793a\u4f8b<\/h3>\n\n\n\n<p>\u8981\u5728\u5355\u4e2a 80GB GPU \u4e0a\u8fdb\u884c\u5fae\u8c03\uff0c\u60a8\u53ef\u4ee5\u8bfb\u53d6&nbsp;<a href=\"https:\/\/arxiv.org\/abs\/2305.14314\">QLoRA<\/a>\uff0c\u5b83\u5c06 LoRA \u4e0e\u91cf\u5316\u4e3a 4 \u4f4d\u7684\u51bb\u7ed3\u6a21\u578b\u76f8\u7ed3\u5408\uff1a<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">import torch\nfrom transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig\nfrom datasets import load_dataset\nfrom trl import SFTTrainer, SFTConfig\nfrom peft import LoraConfig\n\ntokenizer = AutoTokenizer.from_pretrained(\"ai21labs\/AI21-Jamba-1.5-Mini\")\nquantization_config = BitsAndBytesConfig(\n    load_in_4bit=True,\n    bnb_4bit_quant_type=\"nf4\",\n    bnb_4bit_compute_dtype=torch.bfloat16,\n)\nmodel = AutoModelForCausalLM.from_pretrained(\n    \"ai21labs\/AI21-Jamba-1.5-Mini\",\n    device_map=\"auto\",\n    quantization_config=quantization_config,\n    torch_dtype=torch.bfloat16,\n    attn_implementation=\"flash_attention_2\",\n)\nlora_config = LoraConfig(\n    r=8,\n    target_modules=[\n        \"embed_tokens\", \"x_proj\", \"in_proj\", \"out_proj\", # mamba\n        \"gate_proj\", \"up_proj\", \"down_proj\",  # mlp\n        \"q_proj\", \"k_proj\", \"v_proj\", \"o_proj\", # attention\n    ],\n    task_type=\"CAUSAL_LM\",\n    bias=\"none\",\n)\n\ndataset = load_dataset(\"philschmid\/dolly-15k-oai-style\", split=\"train\")\ntraining_args = SFTConfig(\n    output_dir=\".\/results\",\n    logging_dir=\".\/logs\",\n    num_train_epochs=2,\n    per_device_train_batch_size=8,\n    learning_rate=1e-5,\n    logging_steps=1,\n    gradient_checkpointing=True,\n    gradient_checkpointing_kwargs={\"use_reentrant\": False},\n    save_steps=100,\n    max_seq_length=4096,\n)\ntrainer = SFTTrainer(\n    model=model,\n    tokenizer=tokenizer,\n    args=training_args,\n    peft_config=lora_config,\n    train_dataset=dataset,\n)\ntrainer.train()\n<\/pre><\/div>\n\n\n\n<p>\u6ce8\u610f\uff1a\u4e0a\u9762\u7684\u793a\u4f8b\u9700\u8981 4 \u4f4d\u91cf\u5316\u7684&nbsp;<code>bitsandbytes<\/code>&nbsp;\u5305\uff1a<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:sh decode:true \">pip install bitsandbytes\n<\/pre><\/div>\n","protected":false},"excerpt":{"rendered":"<p>AI21 Jamba 1.5 \u7cfb\u5217\u6a21\u578b\u662f\u6700\u5148\u8fdb\u7684\u6df7\u5408 SSM-Transformer \u6307\u4ee4\uff0c\u9075\u5faa\u57fa\u7840\u6a21\u578b\u3002Ja [&hellip;]<\/p>\n","protected":false},"author":1,"featured_media":0,"comment_status":"open","ping_status":"open","sticky":false,"template":"","format":"standard","meta":{"site-sidebar-layout":"default","site-content-layout":"","ast-site-content-layout":"default","site-content-style":"default","site-sidebar-style":"default","ast-global-header-display":"","ast-banner-title-visibility":"","ast-main-header-display":"","ast-hfb-above-header-display":"","ast-hfb-below-header-display":"","ast-hfb-mobile-header-display":"","site-post-title":"","ast-breadcrumbs-content":"","ast-featured-img":"","footer-sml-layout":"","theme-transparent-header-meta":"","adv-header-id-meta":"","stick-header-meta":"","header-above-stick-meta":"","header-main-stick-meta":"","header-below-stick-meta":"","astra-migrate-meta-layouts":"set","ast-page-background-enabled":"default","ast-page-background-meta":{"desktop":{"background-color":"var(--ast-global-color-4)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"tablet":{"background-color":"","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"mobile":{"background-color":"","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""}},"ast-content-background-meta":{"desktop":{"background-color":"var(--ast-global-color-5)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"tablet":{"background-color":"var(--ast-global-color-5)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"mobile":{"background-color":"var(--ast-global-color-5)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""}},"_jetpack_memberships_contains_paid_content":false,"footnotes":""},"categories":[444,445,443,442],"tags":[399],"class_list":["post-4568","post","type-post","status-publish","format-standard","hentry","category-ai","category-ainews","category-llm","category-llms","tag-jamba"],"views":2545,"jetpack_sharing_enabled":true,"jetpack_featured_media_url":"","_links":{"self":[{"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/posts\/4568","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=%2Fwp%2Fv2%2Fcomments&post=4568"}],"version-history":[{"count":17,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/posts\/4568\/revisions"}],"predecessor-version":[{"id":4588,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/posts\/4568\/revisions\/4588"}],"wp:attachment":[{"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=%2Fwp%2Fv2%2Fmedia&parent=4568"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=%2Fwp%2Fv2%2Fcategories&post=4568"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=%2Fwp%2Fv2%2Ftags&post=4568"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}