{"id":2720,"date":"2024-03-30T18:18:40","date_gmt":"2024-03-30T10:18:40","guid":{"rendered":"https:\/\/www.aqwu.net\/wp\/?p=2720"},"modified":"2024-04-28T20:00:54","modified_gmt":"2024-04-28T12:00:54","slug":"%e4%ba%86%e8%a7%a3-qwen-vl-%e6%a8%a1%e5%9e%8b","status":"publish","type":"post","link":"https:\/\/www.aqwu.net\/wp\/?p=2720","title":{"rendered":"\u4e86\u89e3 Qwen-VL \u6a21\u578b"},"content":{"rendered":"\n<p><strong>Qwen-VL<\/strong>&nbsp;\u662f\u963f\u91cc\u4e91\u7814\u53d1\u7684\u5927\u89c4\u6a21\u89c6\u89c9\u8bed\u8a00\u6a21\u578b\uff08Large Vision Language Model, LVLM\uff09\u3002Qwen-VL \u53ef\u4ee5\u4ee5\u56fe\u50cf\u3001\u6587\u672c\u3001\u68c0\u6d4b\u6846\u4f5c\u4e3a\u8f93\u5165\uff0c\u5e76\u4ee5\u6587\u672c\u548c\u68c0\u6d4b\u6846\u4f5c\u4e3a\u8f93\u51fa\u3002Qwen-VL \u7cfb\u5217\u6a21\u578b\u6027\u80fd\u5f3a\u5927\uff0c\u5177\u5907\u591a\u8bed\u8a00\u5bf9\u8bdd\u3001\u591a\u56fe\u4ea4\u9519\u5bf9\u8bdd\u7b49\u80fd\u529b\uff0c\u5e76\u652f\u6301\u4e2d\u6587\u5f00\u653e\u57df\u5b9a\u4f4d\u548c\u7ec6\u7c92\u5ea6\u56fe\u50cf\u8bc6\u522b\u4e0e\u7406\u89e3\u3002<\/p>\n\n\n\n<p>\u76ee\u524d\uff0c\u63d0\u4f9b\u4e86Qwen-VL\u548cQwen-VL-Chat\u4e24\u4e2a\u6a21\u578b\uff0c\u5206\u522b\u4e3a\u9884\u8bad\u7ec3\u6a21\u578b\u548cChat\u6a21\u578b\u3002\u5982\u679c\u60f3\u4e86\u89e3\u66f4\u591a\u5173\u4e8e\u6a21\u578b\u7684\u4fe1\u606f\uff0c\u8bf7\u70b9\u51fb<a href=\"https:\/\/github.com\/QwenLM\/Qwen-VL\/blob\/master\/visual_memo.md\">\u94fe\u63a5<\/a>\u67e5\u770b\u6211\u4eec\u7684\u6280\u672f\u5907\u5fd8\u5f55\u3002<\/p>\n\n\n\n<p>\u9700\u8981\u6267\u884c\u5982\u4e0b\u7684\u547d\u4ee4\uff1a<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">pip install matplotlib tiktoken\npip install transformers_stream_generator<\/pre><\/div>\n\n\n\n<h2 class=\"wp-block-heading\"><strong>1. \u5355\u5361\u52a0\u8f7d Qwen-VL<\/strong><\/h2>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">from transformers import AutoModelForCausalLM, AutoTokenizer\nimport torch\nimport time\n\ntorch.manual_seed(1234)\n\nMODEL_NAME = \"Qwen\/Qwen-VL-Chat\"\n\n# \u68c0\u6d4b\u53ef\u7528\u7684GPU\u6570\u91cf\nNUM_GPUS = torch.cuda.device_count()\nprint(f\"NUM_GPUS: {NUM_GPUS}\")\n\n# \u83b7\u53d6\u8d77\u59cb\u65f6\u95f4\u6233\nstart_time = time.time()\n\n# \u52a0\u8f7d\u5206\u8bcd\u5668\u548c\u6a21\u578b\uff0c\u6307\u5b9a\u8bbe\u5907\u6620\u5c04\u548c\u6570\u636e\u7c7b\u578b\ntokenizer = AutoTokenizer.from_pretrained(MODEL_NAME, trust_remote_code=True)\n\n# use bf16\nmodel = AutoModelForCausalLM.from_pretrained(MODEL_NAME,\n                                             trust_remote_code=True,\n                                             bf16=True,\n                                             device_map=\"cuda:0\")\n\nmodel = model.eval()\n\nend_time = time.time()\nelapsed_time = end_time - start_time\nprint(f\"Load Model Time: {elapsed_time} seconds\")\n\nprint(model)\n\nstart_time2 = time.time()\n\n# 1st dialogue turn\nquery = tokenizer.from_list_format([\n    {'image': 'https:\/\/qianwen-res.oss-cn-beijing.aliyuncs.com\/Qwen-VL\/assets\/demo.jpeg'},\n    {'text': '\u8fd9\u662f\u4ec0\u4e48'},\n])\n\nresponse, history = model.chat(tokenizer, query=query, history=None)\nprint(response)\n# \u56fe\u4e2d\u662f\u4e00\u540d\u5e74\u8f7b\u5973\u5b50\u5728\u6c99\u6ee9\u4e0a\u548c\u5979\u7684\u72d7\u73a9\u800d\uff0c\u72d7\u7684\u54c1\u79cd\u53ef\u80fd\u662f\u62c9\u5e03\u62c9\u591a\u3002\u5979\u4eec\u5750\u5728\u6c99\u6ee9\u4e0a\uff0c\u72d7\u7684\u524d\u817f\u62ac\u8d77\u6765\uff0c\u4f3c\u4e4e\u5728\u548c\u4eba\u7c7b\u51fb\u638c\u3002\u4e24\u4eba\u4e4b\u95f4\u5145\u6ee1\u4e86\u4fe1\u4efb\u548c\u7231\u3002\n\n# 2nd dialogue turn\nresponse, history = model.chat(tokenizer, '\u8f93\u51fa\"\u51fb\u638c\"\u7684\u68c0\u6d4b\u6846', history=history)\n\nend_time2 = time.time()\nelapsed_time2 = end_time2 - start_time2\n\nprint(response)\n# &lt;ref&gt;\u51fb\u638c&lt;\/ref&gt;&lt;box&gt;(517,508),(589,611)&lt;\/box&gt;\nimage = tokenizer.draw_bbox_on_latest_picture(response, history)\nif image:\n  image.save('1.jpg')\nelse:\n  print(\"no box\")\n  \nprint(f\"Total Generation Time: {elapsed_time2} seconds\")\n<\/pre><\/div>\n\n\n\n<p>\u8fd0\u884c\u7ed3\u679c\uff1a<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \"> time python test05-VL.py\nNUM_GPUS: 8\nLoading checkpoint shards: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 10\/10 [19:32&lt;00:00, 117.30s\/it]\nLoad Model Time: 1177.1278638839722 seconds\nQWenLMHeadModel(\n  (transformer): QWenModel(\n    (wte): Embedding(151936, 4096)\n    (drop): Dropout(p=0.0, inplace=False)\n    (rotary_emb): RotaryEmbedding()\n    (h): ModuleList(\n      (0-31): 32 x QWenBlock(\n        (ln_1): RMSNorm()\n        (attn): QWenAttention(\n          (c_attn): Linear(in_features=4096, out_features=12288, bias=True)\n          (c_proj): Linear(in_features=4096, out_features=4096, bias=False)\n          (attn_dropout): Dropout(p=0.0, inplace=False)\n        )\n        (ln_2): RMSNorm()\n        (mlp): QWenMLP(\n          (w1): Linear(in_features=4096, out_features=11008, bias=False)\n          (w2): Linear(in_features=4096, out_features=11008, bias=False)\n          (c_proj): Linear(in_features=11008, out_features=4096, bias=False)\n        )\n      )\n    )\n    (ln_f): RMSNorm()\n    (visual): VisionTransformer(\n      (conv1): Conv2d(3, 1664, kernel_size=(14, 14), stride=(14, 14), bias=False)\n      (ln_pre): LayerNorm((1664,), eps=1e-06, elementwise_affine=True)\n      (transformer): TransformerBlock(\n        (resblocks): ModuleList(\n          (0-47): 48 x VisualAttentionBlock(\n            (ln_1): LayerNorm((1664,), eps=1e-06, elementwise_affine=True)\n            (ln_2): LayerNorm((1664,), eps=1e-06, elementwise_affine=True)\n            (attn): VisualAttention(\n              (in_proj): Linear(in_features=1664, out_features=4992, bias=True)\n              (out_proj): Linear(in_features=1664, out_features=1664, bias=True)\n            )\n            (mlp): Sequential(\n              (c_fc): Linear(in_features=1664, out_features=8192, bias=True)\n              (gelu): GELU(approximate='none')\n              (c_proj): Linear(in_features=8192, out_features=1664, bias=True)\n            )\n          )\n        )\n      )\n      (attn_pool): Resampler(\n        (kv_proj): Linear(in_features=1664, out_features=4096, bias=False)\n        (attn): MultiheadAttention(\n          (out_proj): NonDynamicallyQuantizableLinear(in_features=4096, out_features=4096, bias=True)\n        )\n        (ln_q): LayerNorm((4096,), eps=1e-06, elementwise_affine=True)\n        (ln_kv): LayerNorm((4096,), eps=1e-06, elementwise_affine=True)\n      )\n      (ln_post): LayerNorm((4096,), eps=1e-06, elementwise_affine=True)\n    )\n  )\n  (lm_head): Linear(in_features=4096, out_features=151936, bias=False)\n)\n\u56fe\u4e2d\u662f\u4e00\u540d\u5973\u5b50\u5728\u6c99\u6ee9\u4e0a\u548c\u72d7\u73a9\u800d\uff0c\u65c1\u8fb9\u7684\u72d7\u662f\u4e00\u53ea\u62c9\u5e03\u62c9\u591a\u72ac\uff0c\u5b83\u4eec\u5904\u4e8e\u6c99\u6ee9\u4e0a\u3002\n&lt;ref&gt;\u51fb\u638c&lt;\/ref&gt;&lt;box&gt;(523,512),(588,605)&lt;\/box&gt;\nTotal Generation Time: 4.424967527389526 seconds\n\nreal    19m45.588s\nuser    0m12.085s\nsys     0m29.553s<\/pre><\/div>\n\n\n\n<p>\u4ee3\u7801\u4e2d\u7684\u56fe\u7247<\/p>\n\n\n\n<figure class=\"wp-block-image size-large\"><img loading=\"lazy\" decoding=\"async\" width=\"1024\" height=\"683\" src=\"https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/03\/demo-1024x683.jpeg\" alt=\"\" class=\"wp-image-2728\" srcset=\"https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/03\/demo-1024x683.jpeg 1024w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/03\/demo-300x200.jpeg 300w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/03\/demo-768x512.jpeg 768w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/03\/demo-1536x1024.jpeg 1536w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/03\/demo.jpeg 2048w\" sizes=\"auto, (max-width: 1024px) 100vw, 1024px\" \/><\/figure>\n\n\n\n<p>\u51fb\u638c\u56fe\u7247\uff1a<\/p>\n\n\n\n<figure class=\"wp-block-image size-large\"><img loading=\"lazy\" decoding=\"async\" width=\"1024\" height=\"683\" src=\"https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/03\/1-1024x683.jpg\" alt=\"\" class=\"wp-image-2729\" srcset=\"https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/03\/1-1024x683.jpg 1024w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/03\/1-300x200.jpg 300w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/03\/1-768x512.jpg 768w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/03\/1-1536x1024.jpg 1536w, https:\/\/www.aqwu.net\/wp\/wp-content\/uploads\/2024\/03\/1.jpg 2048w\" sizes=\"auto, (max-width: 1024px) 100vw, 1024px\" \/><\/figure>\n\n\n\n<p><strong>2. \u591a\u5361\u52a0\u8f7d Qwen-VL<\/strong><\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \">from transformers import AutoModelForCausalLM, AutoTokenizer\nimport torch\nimport time\n\ntorch.manual_seed(1234)\n\nMODEL_NAME = \"Qwen\/Qwen-VL-Chat\"\n\n# \u5b9a\u4e49\u4e00\u4e2a\u51fd\u6570\u6765\u81ea\u52a8\u914d\u7f6e\u5728\u591aGPU\u73af\u5883\u4e0b\u6a21\u578b\u5404\u90e8\u5206\u7684\u8bbe\u5907\u5206\u5e03\ndef auto_configure_device_map(num_gpus: int):\n    num_trans_layers = 32  # \u5b9a\u4e49Transformer\u6a21\u578b\u7684\u5c42\u6570\n    per_gpu_layers = num_trans_layers \/ num_gpus  # \u8ba1\u7b97\u6bcf\u4e2aGPU\u5e94\u627f\u62c5\u7684\u5c42\u6570\n    # \u521d\u59cb\u5316\u8bbe\u5907\u6620\u5c04\u5b57\u5178\uff0c\u6307\u5b9a\u4e00\u4e9b\u7279\u5b9a\u6a21\u5757\u5e94\u8be5\u653e\u7f6e\u7684GPU\u7f16\u53f7\n    device_map = {\n        'transformer.wte': 0,  # \u5d4c\u5165\u5c42\u653e\u5728\u7b2c\u4e00\u4e2aGPU\u4e0a\n        'transformer.visual': num_gpus-1,  # \u6700\u540e\u4e00\u4e2a\u6b63\u5219\u5316\u5c42\u653e\u5728\u6700\u540e\u4e00\u4e2aGPU\u4e0a\n        'transformer.ln_f': num_gpus-1,  # \u6700\u540e\u4e00\u4e2a\u6b63\u5219\u5316\u5c42\u653e\u5728\u6700\u540e\u4e00\u4e2aGPU\u4e0a\n        'lm_head': num_gpus-1  # \u8bed\u8a00\u6a21\u578b\u5934\uff08\u7528\u4e8e\u9884\u6d4b\u4e0b\u4e00\u4e2a\u8bcd\u7684\u5c42\uff09\u653e\u5728\u6700\u540e\u4e00\u4e2aGPU\u4e0a\n    }\n    # \u5c06Transformer\u6a21\u578b\u7684\u6bcf\u4e00\u5c42\u5206\u914d\u7ed9\u4e00\u4e2aGPU\n    for i in range(num_trans_layers):\n        device_map[f'transformer.h.{i}'] = int(i\/\/per_gpu_layers)\n    return device_map\n\n# \u68c0\u6d4b\u53ef\u7528\u7684GPU\u6570\u91cf\nNUM_GPUS = torch.cuda.device_count()\nprint(f\"NUM_GPUS: {NUM_GPUS}\")\n\n# \u5982\u679c\u6709\u53ef\u7528\u7684GPU\uff0c\u5219\u57fa\u4e8eGPU\u6570\u91cf\u81ea\u52a8\u914d\u7f6e\u8bbe\u5907\u6620\u5c04\uff1b\u5426\u5219\u4e0d\u4f7f\u7528\u8bbe\u5907\u6620\u5c04\ndevice_map = auto_configure_device_map(NUM_GPUS) if NUM_GPUS &gt; 0 else None\n\n# \u83b7\u53d6\u8d77\u59cb\u65f6\u95f4\u6233\nstart_time = time.time()\n\n# \u52a0\u8f7d\u5206\u8bcd\u5668\u548c\u6a21\u578b\uff0c\u6307\u5b9a\u8bbe\u5907\u6620\u5c04\u548c\u6570\u636e\u7c7b\u578b\ntokenizer = AutoTokenizer.from_pretrained(MODEL_NAME, trust_remote_code=True)\n\n# use bf16\nmodel = AutoModelForCausalLM.from_pretrained(MODEL_NAME,\n                                             trust_remote_code=True,\n                                             bf16=True,\n                                             device_map=device_map)\n\nmodel = model.eval()\n\nend_time = time.time()\nelapsed_time = end_time - start_time\nprint(f\"Load Model Time: {elapsed_time} seconds\")\n\nprint(model)\n\nstart_time2 = time.time()\n\n# 1st dialogue turn\nquery = tokenizer.from_list_format([\n    {'image': 'https:\/\/qianwen-res.oss-cn-beijing.aliyuncs.com\/Qwen-VL\/assets\/demo.jpeg'},\n    {'text': '\u8fd9\u662f\u4ec0\u4e48'},\n])\n\nresponse, history = model.chat(tokenizer, query=query, history=None)\nprint(response)\n# \u56fe\u4e2d\u662f\u4e00\u540d\u5e74\u8f7b\u5973\u5b50\u5728\u6c99\u6ee9\u4e0a\u548c\u5979\u7684\u72d7\u73a9\u800d\uff0c\u72d7\u7684\u54c1\u79cd\u53ef\u80fd\u662f\u62c9\u5e03\u62c9\u591a\u3002\u5979\u4eec\u5750\u5728\u6c99\u6ee9\u4e0a\uff0c\u72d7\u7684\u524d\u817f\u62ac\u8d77\u6765\uff0c\u4f3c\u4e4e\u5728\u548c\u4eba\u7c7b\u51fb\u638c\u3002\u4e24\u4eba\u4e4b\u95f4\u5145\u6ee1\u4e86\u4fe1\u4efb\u548c\u7231\u3002\n\n# 2nd dialogue turn\nresponse, history = model.chat(tokenizer, '\u8f93\u51fa\"\u51fb\u638c\"\u7684\u68c0\u6d4b\u6846', history=history)\n\nend_time2 = time.time()\nelapsed_time2 = end_time2 - start_time2\n\nprint(response)\n# &lt;ref&gt;\u51fb\u638c&lt;\/ref&gt;&lt;box&gt;(517,508),(589,611)&lt;\/box&gt;\nimage = tokenizer.draw_bbox_on_latest_picture(response, history)\nif image:\n  image.save('2.jpg')\nelse:\n  print(\"no box\")\n  \nprint(f\"Total Generation Time: {elapsed_time2} seconds\")\n<\/pre><\/div>\n\n\n\n<p>\u8fd0\u884c\u7ed3\u679c\u5982\u4e0b\uff1a<\/p>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><\/div>\n\n\n\n<div class=\"wp-block-urvanov-syntax-highlighter-code-block\"><pre class=\"lang:python decode:true \" >time python test05-VL-2.py\nNUM_GPUS: 8\nLoading checkpoint shards: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 10\/10 [48:26&lt;00:00, 290.68s\/it]\nLoad Model Time: 2910.5123233795166 seconds\nQWenLMHeadModel(\n  (transformer): QWenModel(\n    (wte): Embedding(151936, 4096)\n    (drop): Dropout(p=0.0, inplace=False)\n    (rotary_emb): RotaryEmbedding()\n    (h): ModuleList(\n      (0-31): 32 x QWenBlock(\n        (ln_1): RMSNorm()\n        (attn): QWenAttention(\n          (c_attn): Linear(in_features=4096, out_features=12288, bias=True)\n          (c_proj): Linear(in_features=4096, out_features=4096, bias=False)\n          (attn_dropout): Dropout(p=0.0, inplace=False)\n        )\n        (ln_2): RMSNorm()\n        (mlp): QWenMLP(\n          (w1): Linear(in_features=4096, out_features=11008, bias=False)\n          (w2): Linear(in_features=4096, out_features=11008, bias=False)\n          (c_proj): Linear(in_features=11008, out_features=4096, bias=False)\n        )\n      )\n    )\n    (ln_f): RMSNorm()\n    (visual): VisionTransformer(\n      (conv1): Conv2d(3, 1664, kernel_size=(14, 14), stride=(14, 14), bias=False)\n      (ln_pre): LayerNorm((1664,), eps=1e-06, elementwise_affine=True)\n      (transformer): TransformerBlock(\n        (resblocks): ModuleList(\n          (0-47): 48 x VisualAttentionBlock(\n            (ln_1): LayerNorm((1664,), eps=1e-06, elementwise_affine=True)\n            (ln_2): LayerNorm((1664,), eps=1e-06, elementwise_affine=True)\n            (attn): VisualAttention(\n              (in_proj): Linear(in_features=1664, out_features=4992, bias=True)\n              (out_proj): Linear(in_features=1664, out_features=1664, bias=True)\n            )\n            (mlp): Sequential(\n              (c_fc): Linear(in_features=1664, out_features=8192, bias=True)\n              (gelu): GELU(approximate='none')\n              (c_proj): Linear(in_features=8192, out_features=1664, bias=True)\n            )\n          )\n        )\n      )\n      (attn_pool): Resampler(\n        (kv_proj): Linear(in_features=1664, out_features=4096, bias=False)\n        (attn): MultiheadAttention(\n          (out_proj): NonDynamicallyQuantizableLinear(in_features=4096, out_features=4096, bias=True)\n        )\n        (ln_q): LayerNorm((4096,), eps=1e-06, elementwise_affine=True)\n        (ln_kv): LayerNorm((4096,), eps=1e-06, elementwise_affine=True)\n      )\n      (ln_post): LayerNorm((4096,), eps=1e-06, elementwise_affine=True)\n    )\n  )\n  (lm_head): Linear(in_features=4096, out_features=151936, bias=False)\n)\n\u56fe\u4e2d\u662f\u4e00\u540d\u5973\u5b50\u5728\u6c99\u6ee9\u4e0a\u548c\u72d7\u73a9\u800d\uff0c\u65c1\u8fb9\u7684\u72d7\u662f\u4e00\u53ea\u62c9\u5e03\u62c9\u591a\u72ac\uff0c\u5b83\u4eec\u5904\u4e8e\u6c99\u6ee9\u4e0a\u3002\n&lt;ref&gt;\u51fb\u638c&lt;\/ref&gt;&lt;box&gt;(523,512),(588,605)&lt;\/box&gt;\nTotal Generation Time: 38.64623808860779 seconds\n\nreal    49m14.595s\nuser    0m34.058s\nsys     0m50.434s<\/pre><\/div>\n","protected":false},"excerpt":{"rendered":"<p>Qwen-VL&nbsp;\u662f\u963f\u91cc\u4e91\u7814\u53d1\u7684\u5927\u89c4\u6a21\u89c6\u89c9\u8bed\u8a00\u6a21\u578b\uff08Large Vision Language Mod [&hellip;]<\/p>\n","protected":false},"author":1,"featured_media":0,"comment_status":"open","ping_status":"open","sticky":false,"template":"","format":"standard","meta":{"site-sidebar-layout":"default","site-content-layout":"","ast-site-content-layout":"default","site-content-style":"default","site-sidebar-style":"default","ast-global-header-display":"","ast-banner-title-visibility":"","ast-main-header-display":"","ast-hfb-above-header-display":"","ast-hfb-below-header-display":"","ast-hfb-mobile-header-display":"","site-post-title":"","ast-breadcrumbs-content":"","ast-featured-img":"","footer-sml-layout":"","theme-transparent-header-meta":"","adv-header-id-meta":"","stick-header-meta":"","header-above-stick-meta":"","header-main-stick-meta":"","header-below-stick-meta":"","astra-migrate-meta-layouts":"set","ast-page-background-enabled":"default","ast-page-background-meta":{"desktop":{"background-color":"var(--ast-global-color-4)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"tablet":{"background-color":"","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"mobile":{"background-color":"","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""}},"ast-content-background-meta":{"desktop":{"background-color":"var(--ast-global-color-5)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"tablet":{"background-color":"var(--ast-global-color-5)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"mobile":{"background-color":"var(--ast-global-color-5)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""}},"_jetpack_memberships_contains_paid_content":false,"footnotes":""},"categories":[313,443,442,312],"tags":[],"class_list":["post-2720","post","type-post","status-publish","format-standard","hentry","category-chatgpt","category-llm","category-llms","category-openai"],"views":3312,"jetpack_sharing_enabled":true,"jetpack_featured_media_url":"","_links":{"self":[{"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/posts\/2720","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=%2Fwp%2Fv2%2Fcomments&post=2720"}],"version-history":[{"count":10,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/posts\/2720\/revisions"}],"predecessor-version":[{"id":2735,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=\/wp\/v2\/posts\/2720\/revisions\/2735"}],"wp:attachment":[{"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=%2Fwp%2Fv2%2Fmedia&parent=2720"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=%2Fwp%2Fv2%2Fcategories&post=2720"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/www.aqwu.net\/wp\/index.php?rest_route=%2Fwp%2Fv2%2Ftags&post=2720"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}