{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"provenance": []
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"language_info": {
"name": "python"
},
"widgets": {
"application/vnd.jupyter.widget-state+json": {
"27059cd40bcd4afc854a55ae0379b1fb": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HBoxModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HBoxView",
"box_style": "",
"children": [
"IPY_MODEL_1314ca44da874e73bdb06a0b49c2724c",
"IPY_MODEL_2a72ec72acd44ff3aa21eed08f8dd48a",
"IPY_MODEL_63f78fdb30a54a0696cdd5f9de2e6106"
],
"layout": "IPY_MODEL_587a0aa796444aa1a0922e95dc24db6c"
}
},
"1314ca44da874e73bdb06a0b49c2724c": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_befed88b0fcf4ed1a894ac8cbdcd5d86",
"placeholder": "",
"style": "IPY_MODEL_31bedab478d446adb97e2fd3d7f68c7d",
"value": "merged_gguf.gguf: 100%"
}
},
"2a72ec72acd44ff3aa21eed08f8dd48a": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "FloatProgressModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "ProgressView",
"bar_style": "success",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_84ca628dbbda44bc89e32fb05520b6b3",
"max": 6178316544,
"min": 0,
"orientation": "horizontal",
"style": "IPY_MODEL_f9516ae7d004476591eb47d793cac378",
"value": 6178316544
}
},
"63f78fdb30a54a0696cdd5f9de2e6106": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_44d9f37074284c1686c8f78ace0faa11",
"placeholder": "",
"style": "IPY_MODEL_bf262c2572624f9bbcc987bca6591a56",
"value": " 6.18G/6.18G [01:48<00:00, 67.4MB/s]"
}
},
"587a0aa796444aa1a0922e95dc24db6c": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"befed88b0fcf4ed1a894ac8cbdcd5d86": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"31bedab478d446adb97e2fd3d7f68c7d": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"84ca628dbbda44bc89e32fb05520b6b3": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"f9516ae7d004476591eb47d793cac378": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "ProgressStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"bar_color": null,
"description_width": ""
}
},
"44d9f37074284c1686c8f78ace0faa11": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"bf262c2572624f9bbcc987bca6591a56": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"5586991f6e4743fb84cbf17695aeeb59": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HBoxModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HBoxView",
"box_style": "",
"children": [
"IPY_MODEL_fe235c3c37684a6d9ee1948047514a92",
"IPY_MODEL_b711ce3c3a974c87bbe1a666349ecb27",
"IPY_MODEL_f3a0976c7e184f45b25a52edf38b6a10"
],
"layout": "IPY_MODEL_bdcbd521b027435099bd543a72d062f6"
}
},
"fe235c3c37684a6d9ee1948047514a92": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_0e002e45884e47f782b2c455d5c6c825",
"placeholder": "",
"style": "IPY_MODEL_153f87eb745c4ec5922c56b51b877cc9",
"value": "modules.json: 100%"
}
},
"b711ce3c3a974c87bbe1a666349ecb27": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "FloatProgressModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "ProgressView",
"bar_style": "success",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_83d933624e3644e6bd03c749a64e3d42",
"max": 349,
"min": 0,
"orientation": "horizontal",
"style": "IPY_MODEL_c184d19d507b4fb09959bc7549eec159",
"value": 349
}
},
"f3a0976c7e184f45b25a52edf38b6a10": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_66a5447c81fd444e84ed41f82f2a0a22",
"placeholder": "",
"style": "IPY_MODEL_8b469287eeea4ba5a65c7809875f96a7",
"value": " 349/349 [00:00<00:00, 13.4kB/s]"
}
},
"bdcbd521b027435099bd543a72d062f6": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"0e002e45884e47f782b2c455d5c6c825": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"153f87eb745c4ec5922c56b51b877cc9": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"83d933624e3644e6bd03c749a64e3d42": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"c184d19d507b4fb09959bc7549eec159": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "ProgressStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"bar_color": null,
"description_width": ""
}
},
"66a5447c81fd444e84ed41f82f2a0a22": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"8b469287eeea4ba5a65c7809875f96a7": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"d762158c0df249ad8994975e44c8e6d4": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HBoxModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HBoxView",
"box_style": "",
"children": [
"IPY_MODEL_86cbd682f9ee4e6dbe1abb20bac855ed",
"IPY_MODEL_8e5016a2c63e4116833781366200e89b",
"IPY_MODEL_556bd875cadd4e158a995c8178540066"
],
"layout": "IPY_MODEL_245e82024c9d461ea50e027e6ce1b8a1"
}
},
"86cbd682f9ee4e6dbe1abb20bac855ed": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_f5303a01cf47469fb3a06a3b46ae0877",
"placeholder": "",
"style": "IPY_MODEL_9a57ae83e16d4b39a3a92bf2d83c2a16",
"value": "config_sentence_transformers.json: 100%"
}
},
"8e5016a2c63e4116833781366200e89b": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "FloatProgressModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "ProgressView",
"bar_style": "success",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_789fe49c00f1482ab20045bf5a64cb79",
"max": 116,
"min": 0,
"orientation": "horizontal",
"style": "IPY_MODEL_6d2805c0a3b94ade80b83ff426e1c3af",
"value": 116
}
},
"556bd875cadd4e158a995c8178540066": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_fbddd0fd0bef45afb7f0096b43fe07e3",
"placeholder": "",
"style": "IPY_MODEL_ebc743eacfac47db9cdde7003e76d7d4",
"value": " 116/116 [00:00<00:00, 9.56kB/s]"
}
},
"245e82024c9d461ea50e027e6ce1b8a1": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"f5303a01cf47469fb3a06a3b46ae0877": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"9a57ae83e16d4b39a3a92bf2d83c2a16": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"789fe49c00f1482ab20045bf5a64cb79": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"6d2805c0a3b94ade80b83ff426e1c3af": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "ProgressStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"bar_color": null,
"description_width": ""
}
},
"fbddd0fd0bef45afb7f0096b43fe07e3": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"ebc743eacfac47db9cdde7003e76d7d4": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"c746de44d7ff41d6b8342586549a5cf0": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HBoxModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HBoxView",
"box_style": "",
"children": [
"IPY_MODEL_40a3f0fef6cb44f4aa737ed36694516b",
"IPY_MODEL_90cd79491f25421b824504b18fa9abe8",
"IPY_MODEL_4b13bfdfd6374863bfbff59f5f4d2d75"
],
"layout": "IPY_MODEL_41abf06fa4b747ac9fa15a7258585b67"
}
},
"40a3f0fef6cb44f4aa737ed36694516b": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_76c225eef8654bf3838234a697e66394",
"placeholder": "",
"style": "IPY_MODEL_4ea15f1a71b0404aa330c2523f70c108",
"value": "README.md: 100%"
}
},
"90cd79491f25421b824504b18fa9abe8": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "FloatProgressModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "ProgressView",
"bar_style": "success",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_28f1fe53101e4cc0948908e1f357bd7f",
"max": 10454,
"min": 0,
"orientation": "horizontal",
"style": "IPY_MODEL_0a7868e9c8664211953f7501a9f4727e",
"value": 10454
}
},
"4b13bfdfd6374863bfbff59f5f4d2d75": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_142bcc0ee28f4e4e9c9ef86bffea7ce2",
"placeholder": "",
"style": "IPY_MODEL_470e955694c247a89739a26f90d9ad7b",
"value": " 10.5k/10.5k [00:00<00:00, 871kB/s]"
}
},
"41abf06fa4b747ac9fa15a7258585b67": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"76c225eef8654bf3838234a697e66394": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"4ea15f1a71b0404aa330c2523f70c108": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"28f1fe53101e4cc0948908e1f357bd7f": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"0a7868e9c8664211953f7501a9f4727e": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "ProgressStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"bar_color": null,
"description_width": ""
}
},
"142bcc0ee28f4e4e9c9ef86bffea7ce2": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"470e955694c247a89739a26f90d9ad7b": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"da11dd3cd2e342c6a8c887710b13583b": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HBoxModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HBoxView",
"box_style": "",
"children": [
"IPY_MODEL_4f81129dc4164de7be81ceb25b402591",
"IPY_MODEL_7eeca59ba0604b1793ee5d81667d1575",
"IPY_MODEL_206f5c1f2566492cb86ba1a3a28b2778"
],
"layout": "IPY_MODEL_cebdefeecf784dab9b5bd56d38216ec2"
}
},
"4f81129dc4164de7be81ceb25b402591": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_a43c2858ae214769889df63c8fc7fcb4",
"placeholder": "",
"style": "IPY_MODEL_1c9a46faa27844c9a151d54c4e67aa57",
"value": "sentence_bert_config.json: 100%"
}
},
"7eeca59ba0604b1793ee5d81667d1575": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "FloatProgressModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "ProgressView",
"bar_style": "success",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_6f231dcd6528405782932f2d154987cc",
"max": 53,
"min": 0,
"orientation": "horizontal",
"style": "IPY_MODEL_244b2b4c1b9f49d7816fe020e2a36bbb",
"value": 53
}
},
"206f5c1f2566492cb86ba1a3a28b2778": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_71aa7966874f480c87efc0ca2426d6a9",
"placeholder": "",
"style": "IPY_MODEL_827906335dab4ab4ba830279c9fb6030",
"value": " 53.0/53.0 [00:00<00:00, 4.64kB/s]"
}
},
"cebdefeecf784dab9b5bd56d38216ec2": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"a43c2858ae214769889df63c8fc7fcb4": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"1c9a46faa27844c9a151d54c4e67aa57": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"6f231dcd6528405782932f2d154987cc": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"244b2b4c1b9f49d7816fe020e2a36bbb": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "ProgressStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"bar_color": null,
"description_width": ""
}
},
"71aa7966874f480c87efc0ca2426d6a9": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"827906335dab4ab4ba830279c9fb6030": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"213b5f2a8c734c31bf0865e3894e576f": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HBoxModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HBoxView",
"box_style": "",
"children": [
"IPY_MODEL_a12688413cb94480a5c060744c726a85",
"IPY_MODEL_e011b97eb46345429144effd417d3887",
"IPY_MODEL_25f626f3316046139bfef3806373da46"
],
"layout": "IPY_MODEL_f2d476d7376541e29228371db59149be"
}
},
"a12688413cb94480a5c060744c726a85": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_d0c5176ea4584c77a6bd932a7fe0898a",
"placeholder": "",
"style": "IPY_MODEL_df87c1391c9240f895d372ccb8fcbe54",
"value": "config.json: 100%"
}
},
"e011b97eb46345429144effd417d3887": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "FloatProgressModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "ProgressView",
"bar_style": "success",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_5a9c5db9194b4176bf1d605c7560eb29",
"max": 612,
"min": 0,
"orientation": "horizontal",
"style": "IPY_MODEL_bf0c9c2d1c654c14bab541d2ecc429f8",
"value": 612
}
},
"25f626f3316046139bfef3806373da46": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_1d78e1692a844acbb2bb0cd712805d82",
"placeholder": "",
"style": "IPY_MODEL_c51e527406dc47ebad3edbbe1f57f7fd",
"value": " 612/612 [00:00<00:00, 34.1kB/s]"
}
},
"f2d476d7376541e29228371db59149be": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"d0c5176ea4584c77a6bd932a7fe0898a": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"df87c1391c9240f895d372ccb8fcbe54": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"5a9c5db9194b4176bf1d605c7560eb29": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"bf0c9c2d1c654c14bab541d2ecc429f8": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "ProgressStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"bar_color": null,
"description_width": ""
}
},
"1d78e1692a844acbb2bb0cd712805d82": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"c51e527406dc47ebad3edbbe1f57f7fd": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"c2dd1b42b15d4b7fb0df702584b0eb69": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HBoxModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HBoxView",
"box_style": "",
"children": [
"IPY_MODEL_1a85ab47b0b547a586224cbdde24345a",
"IPY_MODEL_534cabf9daff4eec9b5b57b90db1fea4",
"IPY_MODEL_69febf81d0004ed492a2f0e8b2905300"
],
"layout": "IPY_MODEL_e38da1ceec95407d9665cd9ef0f45124"
}
},
"1a85ab47b0b547a586224cbdde24345a": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_4bd3c06c7ba2463892110c469c9e3362",
"placeholder": "",
"style": "IPY_MODEL_5ac525d20da34ae8a171139210fbbaaf",
"value": "model.safetensors: 100%"
}
},
"534cabf9daff4eec9b5b57b90db1fea4": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "FloatProgressModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "ProgressView",
"bar_style": "success",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_59e343a07e594596bc244a7d9060b7cd",
"max": 90868376,
"min": 0,
"orientation": "horizontal",
"style": "IPY_MODEL_39b63793c88c45c1aaccd58fc7db551a",
"value": 90868376
}
},
"69febf81d0004ed492a2f0e8b2905300": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_ea09edf8b1c847fc80dce99e06cbefed",
"placeholder": "",
"style": "IPY_MODEL_397293c7353545a28c22b7e135109ec7",
"value": " 90.9M/90.9M [00:00<00:00, 208MB/s]"
}
},
"e38da1ceec95407d9665cd9ef0f45124": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"4bd3c06c7ba2463892110c469c9e3362": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"5ac525d20da34ae8a171139210fbbaaf": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"59e343a07e594596bc244a7d9060b7cd": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"39b63793c88c45c1aaccd58fc7db551a": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "ProgressStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"bar_color": null,
"description_width": ""
}
},
"ea09edf8b1c847fc80dce99e06cbefed": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"397293c7353545a28c22b7e135109ec7": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"9b1b465029b246afa919ceb71f3b81c6": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HBoxModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HBoxView",
"box_style": "",
"children": [
"IPY_MODEL_fc58a83715b842a6a3c7fbfb97633fdc",
"IPY_MODEL_8743de7dec8d4fe5ba83bd8e0ed1249c",
"IPY_MODEL_a970c432e5904a4c80d23bb73e5d17b2"
],
"layout": "IPY_MODEL_20a36ee1145f4d0eb9c00e1283eba629"
}
},
"fc58a83715b842a6a3c7fbfb97633fdc": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_b4df23c2a65046eda61f0bcdb66f1fba",
"placeholder": "",
"style": "IPY_MODEL_9ec10f7a38dc4f5fa7b7d9b08f15738c",
"value": "tokenizer_config.json: 100%"
}
},
"8743de7dec8d4fe5ba83bd8e0ed1249c": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "FloatProgressModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "ProgressView",
"bar_style": "success",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_d62bde8dd9234717821574a7cf51f86c",
"max": 350,
"min": 0,
"orientation": "horizontal",
"style": "IPY_MODEL_95be6184a431437c88cdf154e7b4896b",
"value": 350
}
},
"a970c432e5904a4c80d23bb73e5d17b2": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_ff4c73e4d1fe494a93cdf4c50b699efd",
"placeholder": "",
"style": "IPY_MODEL_b4818a96c6af4a199dcfb5ff858530ef",
"value": " 350/350 [00:00<00:00, 28.1kB/s]"
}
},
"20a36ee1145f4d0eb9c00e1283eba629": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"b4df23c2a65046eda61f0bcdb66f1fba": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"9ec10f7a38dc4f5fa7b7d9b08f15738c": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"d62bde8dd9234717821574a7cf51f86c": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"95be6184a431437c88cdf154e7b4896b": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "ProgressStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"bar_color": null,
"description_width": ""
}
},
"ff4c73e4d1fe494a93cdf4c50b699efd": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"b4818a96c6af4a199dcfb5ff858530ef": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"ea73b65fc269403195364f8b5909375b": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HBoxModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HBoxView",
"box_style": "",
"children": [
"IPY_MODEL_ec2a6578573147b6a816fedacadbcfc0",
"IPY_MODEL_6d0372a72a374ea9ad33e0cefc35ef3a",
"IPY_MODEL_0e4efb55f60941c18e6b8abb269fef02"
],
"layout": "IPY_MODEL_16880be9d4fa4e60b691c9b94d68e013"
}
},
"ec2a6578573147b6a816fedacadbcfc0": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_4adad72c05d14320a970e6b6451c02a1",
"placeholder": "",
"style": "IPY_MODEL_45367c8908a5421e8a1507dac2e7505e",
"value": "vocab.txt: 100%"
}
},
"6d0372a72a374ea9ad33e0cefc35ef3a": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "FloatProgressModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "ProgressView",
"bar_style": "success",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_88c0087c005f4b909725c32399f93ebf",
"max": 231508,
"min": 0,
"orientation": "horizontal",
"style": "IPY_MODEL_bc3dbce2faeb4e63b66a45f7b93f6313",
"value": 231508
}
},
"0e4efb55f60941c18e6b8abb269fef02": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_e065f80db1864f83b44635ced048f55d",
"placeholder": "",
"style": "IPY_MODEL_be4f5674fd244c469a03729d982a8ba1",
"value": " 232k/232k [00:00<00:00, 1.61MB/s]"
}
},
"16880be9d4fa4e60b691c9b94d68e013": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"4adad72c05d14320a970e6b6451c02a1": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"45367c8908a5421e8a1507dac2e7505e": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"88c0087c005f4b909725c32399f93ebf": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"bc3dbce2faeb4e63b66a45f7b93f6313": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "ProgressStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"bar_color": null,
"description_width": ""
}
},
"e065f80db1864f83b44635ced048f55d": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"be4f5674fd244c469a03729d982a8ba1": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"57b84d4abc3d4385ba051b6e63dc3a15": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HBoxModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HBoxView",
"box_style": "",
"children": [
"IPY_MODEL_26a22189932a4c8dadb45bf71ea4d4d6",
"IPY_MODEL_875b65f0bff140a4a0423b83ced010c4",
"IPY_MODEL_497c6cf30a744388b6ab9e0725bcf8aa"
],
"layout": "IPY_MODEL_16b8f239109a439aadacae2b6cfcff1c"
}
},
"26a22189932a4c8dadb45bf71ea4d4d6": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_720d2d16a9604981b378f4a48f9936ed",
"placeholder": "",
"style": "IPY_MODEL_b62b1d546d734c01a74c36020de5e833",
"value": "tokenizer.json: 100%"
}
},
"875b65f0bff140a4a0423b83ced010c4": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "FloatProgressModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "ProgressView",
"bar_style": "success",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_638c3d74be274d81ac9dbabaaee5c595",
"max": 466247,
"min": 0,
"orientation": "horizontal",
"style": "IPY_MODEL_a46c8b5714b749218398ee8c4ae72615",
"value": 466247
}
},
"497c6cf30a744388b6ab9e0725bcf8aa": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_30ddfe5cc4c74e478b182151ff9e2f0f",
"placeholder": "",
"style": "IPY_MODEL_6563431ee3d041afa2dc92527da991ff",
"value": " 466k/466k [00:00<00:00, 734kB/s]"
}
},
"16b8f239109a439aadacae2b6cfcff1c": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"720d2d16a9604981b378f4a48f9936ed": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"b62b1d546d734c01a74c36020de5e833": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"638c3d74be274d81ac9dbabaaee5c595": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"a46c8b5714b749218398ee8c4ae72615": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "ProgressStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"bar_color": null,
"description_width": ""
}
},
"30ddfe5cc4c74e478b182151ff9e2f0f": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"6563431ee3d041afa2dc92527da991ff": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"06689d3aea0d480a920eeebe7848ad06": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HBoxModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HBoxView",
"box_style": "",
"children": [
"IPY_MODEL_f3bf0b76cfb745f2aa1280e3ba581651",
"IPY_MODEL_274b8ab9342a492abcb25bf2d039532d",
"IPY_MODEL_246dbe3cfe804ef693e890de2a0280dc"
],
"layout": "IPY_MODEL_ecc09803f48a474cb7ac80d49b2751b4"
}
},
"f3bf0b76cfb745f2aa1280e3ba581651": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_9a928488385844d9a17a503c9775b32f",
"placeholder": "",
"style": "IPY_MODEL_4ca9c9a62ec54511a3cc877fe93e6d13",
"value": "special_tokens_map.json: 100%"
}
},
"274b8ab9342a492abcb25bf2d039532d": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "FloatProgressModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "ProgressView",
"bar_style": "success",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_5177b3ea6b294b89ae71db05ec590081",
"max": 112,
"min": 0,
"orientation": "horizontal",
"style": "IPY_MODEL_657daeb0a3de4adaad79ddcbad782483",
"value": 112
}
},
"246dbe3cfe804ef693e890de2a0280dc": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_7034eae2a28a4c598bd01bd82b17c105",
"placeholder": "",
"style": "IPY_MODEL_fbda42fe419044508204ac2f141c8683",
"value": " 112/112 [00:00<00:00, 7.71kB/s]"
}
},
"ecc09803f48a474cb7ac80d49b2751b4": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"9a928488385844d9a17a503c9775b32f": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"4ca9c9a62ec54511a3cc877fe93e6d13": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"5177b3ea6b294b89ae71db05ec590081": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"657daeb0a3de4adaad79ddcbad782483": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "ProgressStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"bar_color": null,
"description_width": ""
}
},
"7034eae2a28a4c598bd01bd82b17c105": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"fbda42fe419044508204ac2f141c8683": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"11fcf813bdf54523be242b6020472b93": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HBoxModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HBoxView",
"box_style": "",
"children": [
"IPY_MODEL_35b80881f93b47edbacaea8810ae361c",
"IPY_MODEL_db0957c53a9d4dcf83961ce1ac0cd17a",
"IPY_MODEL_d74bb16118c243cd8632b65b89a83ff7"
],
"layout": "IPY_MODEL_7f8fa2d62e6f490cbf8d18244d681b1e"
}
},
"35b80881f93b47edbacaea8810ae361c": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_645edb2a48394a2d941c35f4f00d00ae",
"placeholder": "",
"style": "IPY_MODEL_240a560959dc4f4a86fa1f0bfa07c586",
"value": "config.json: 100%"
}
},
"db0957c53a9d4dcf83961ce1ac0cd17a": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "FloatProgressModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "ProgressView",
"bar_style": "success",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_cbdb02bb76b64b8e926151741b84bd54",
"max": 190,
"min": 0,
"orientation": "horizontal",
"style": "IPY_MODEL_6209a75db96d482c94780e5cb19910ab",
"value": 190
}
},
"d74bb16118c243cd8632b65b89a83ff7": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_c614e293a4a449368b64d2e23140077c",
"placeholder": "",
"style": "IPY_MODEL_aaf56637598c4f778993e8a08706ef96",
"value": " 190/190 [00:00<00:00, 10.6kB/s]"
}
},
"7f8fa2d62e6f490cbf8d18244d681b1e": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"645edb2a48394a2d941c35f4f00d00ae": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"240a560959dc4f4a86fa1f0bfa07c586": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"cbdb02bb76b64b8e926151741b84bd54": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"6209a75db96d482c94780e5cb19910ab": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "ProgressStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"bar_color": null,
"description_width": ""
}
},
"c614e293a4a449368b64d2e23140077c": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"aaf56637598c4f778993e8a08706ef96": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
}
}
}
},
"cells": [
{
"cell_type": "markdown",
"source": [
"1. Basic Inferencing with out RAG components (Hugging Face approach)..."
],
"metadata": {
"id": "oa712K48E9_G"
}
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "CosywHnCD6ag",
"outputId": "a5744c45-e155-4a7a-d278-07597afbd581"
},
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Mounted at /content/drive\n"
]
}
],
"source": [
"# Mount google drive\n",
"from google.colab import drive\n",
"drive.mount('/content/drive')"
]
},
{
"cell_type": "code",
"source": [
"# install the required packages to run the GGUF model from the HuggingFace..\n",
"!pip install llama-cpp-python"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "SBIu_WzkE4d2",
"outputId": "c049618a-3592-428c-effa-fef9d0f5e4d9"
},
"execution_count": 2,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Collecting llama-cpp-python\n",
" Downloading llama_cpp_python-0.3.8.tar.gz (67.3 MB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m67.3/67.3 MB\u001b[0m \u001b[31m13.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25h Installing build dependencies ... \u001b[?25l\u001b[?25hdone\n",
" Getting requirements to build wheel ... \u001b[?25l\u001b[?25hdone\n",
" Installing backend dependencies ... \u001b[?25l\u001b[?25hdone\n",
" Preparing metadata (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n",
"Requirement already satisfied: typing-extensions>=4.5.0 in /usr/local/lib/python3.11/dist-packages (from llama-cpp-python) (4.12.2)\n",
"Requirement already satisfied: numpy>=1.20.0 in /usr/local/lib/python3.11/dist-packages (from llama-cpp-python) (2.0.2)\n",
"Collecting diskcache>=5.6.1 (from llama-cpp-python)\n",
" Downloading diskcache-5.6.3-py3-none-any.whl.metadata (20 kB)\n",
"Requirement already satisfied: jinja2>=2.11.3 in /usr/local/lib/python3.11/dist-packages (from llama-cpp-python) (3.1.6)\n",
"Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.11/dist-packages (from jinja2>=2.11.3->llama-cpp-python) (3.0.2)\n",
"Downloading diskcache-5.6.3-py3-none-any.whl (45 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m45.5/45.5 kB\u001b[0m \u001b[31m4.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hBuilding wheels for collected packages: llama-cpp-python\n",
" Building wheel for llama-cpp-python (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n",
" Created wheel for llama-cpp-python: filename=llama_cpp_python-0.3.8-cp311-cp311-linux_x86_64.whl size=5959642 sha256=f23074dc33a33aa1d3161474e1ac894dcb5c19276fd72f63634ae87e16743426\n",
" Stored in directory: /root/.cache/pip/wheels/c0/03/66/eb3810eafd55d921b2be32896d1f44313996982360663aa80b\n",
"Successfully built llama-cpp-python\n",
"Installing collected packages: diskcache, llama-cpp-python\n",
"Successfully installed diskcache-5.6.3 llama-cpp-python-0.3.8\n"
]
}
]
},
{
"cell_type": "code",
"source": [
"from llama_cpp import Llama\n",
"import random\n",
"from huggingface_hub import hf_hub_download\n",
"\n",
"# 🔥 Load model from Hugging Face\n",
"model_path = hf_hub_download(\n",
" repo_id=\"LakithGR/QWEN2.5-3b-DAP\", # Change this to your HF repo\n",
" filename=\"merged_gguf.gguf\",\n",
" cache_dir=\"./models\" # Optional: Local directory for caching\n",
")\n",
"\n",
"llm = Llama(\n",
" model_path=model_path,\n",
" n_gpu_layers=40,\n",
" n_ctx=2048,\n",
" system_message=\"You are an unfiltered, informal AI assistant. You can use slang, jokes, vulgar words, and a humorous tone when answering. Always add emojis and sound like a real friend talking casually.\"\n",
")\n",
"\n",
"# ✅ Function to determine gender-based greeting\n",
"def get_salutation(user_input):\n",
" male_keywords = [\"girlfriend\", \"wife\", \"she\", \"her\"]\n",
" female_keywords = [\"boyfriend\", \"husband\", \"he\", \"him\"]\n",
"\n",
" if any(word in user_input.lower() for word in male_keywords):\n",
" return random.choice([\"Hey queen! 👑\", \"Girl, listen up! 💅\", \"Sis, let’s talk ❤️\"])\n",
" elif any(word in user_input.lower() for word in female_keywords):\n",
" return random.choice([\"Yo bro! 🔥\", \"Dude, hear me out 🤔\", \"Man, let's fix this 💪\"])\n",
" else:\n",
" return random.choice([\"Yo dude! 😎\", \"Hey buddy! 🙌\", \"Listen up, my friend ❤️\"])\n",
"\n",
"# ✅ Function to modify user prompt\n",
"def make_emotional(user_input):\n",
" salutation = get_salutation(user_input)\n",
" suffix = \" Give me some real, no-BS advice with emojis! 😂🔥💖\"\n",
" return f\"{salutation} {user_input} {suffix}\"\n",
"\n",
"# 🔥 User input (simulate user typing a normal question)\n",
"user_input = \"My partner doesn't like my friends. What should I do?\"\n",
"\n",
"# 🔥 Modify the input before passing to the model\n",
"emotional_prompt = make_emotional(user_input)\n",
"\n",
"# 🔥 Run inference with modified prompt\n",
"output = llm(emotional_prompt, max_tokens=3000)\n",
"\n",
"# ✅ Print the output\n",
"print(output[\"choices\"][0][\"text\"])\n",
"\n"
],
"metadata": {
"id": "ywcIrbc4E8MO",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 1000,
"referenced_widgets": [
"27059cd40bcd4afc854a55ae0379b1fb",
"1314ca44da874e73bdb06a0b49c2724c",
"2a72ec72acd44ff3aa21eed08f8dd48a",
"63f78fdb30a54a0696cdd5f9de2e6106",
"587a0aa796444aa1a0922e95dc24db6c",
"befed88b0fcf4ed1a894ac8cbdcd5d86",
"31bedab478d446adb97e2fd3d7f68c7d",
"84ca628dbbda44bc89e32fb05520b6b3",
"f9516ae7d004476591eb47d793cac378",
"44d9f37074284c1686c8f78ace0faa11",
"bf262c2572624f9bbcc987bca6591a56"
]
},
"outputId": "856385fa-a72e-4f45-b0e6-aeabecfd474b"
},
"execution_count": 3,
"outputs": [
{
"output_type": "display_data",
"data": {
"text/plain": [
"merged_gguf.gguf: 0%| | 0.00/6.18G [00:00, ?B/s]"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "27059cd40bcd4afc854a55ae0379b1fb"
}
},
"metadata": {}
},
{
"output_type": "stream",
"name": "stderr",
"text": [
"llama_model_loader: loaded meta data with 24 key-value pairs and 434 tensors from ./models/models--LakithGR--QWEN2.5-3b-DAP/snapshots/9419d24ba179bc3b838fc9be291f91593168a6b9/merged_gguf.gguf (version GGUF V3 (latest))\n",
"llama_model_loader: Dumping metadata keys/values. Note: KV overrides do not apply in this output.\n",
"llama_model_loader: - kv 0: general.architecture str = qwen2\n",
"llama_model_loader: - kv 1: general.type str = model\n",
"llama_model_loader: - kv 2: general.name str = Merged\n",
"llama_model_loader: - kv 3: general.size_label str = 3.1B\n",
"llama_model_loader: - kv 4: qwen2.block_count u32 = 36\n",
"llama_model_loader: - kv 5: qwen2.context_length u32 = 32768\n",
"llama_model_loader: - kv 6: qwen2.embedding_length u32 = 2048\n",
"llama_model_loader: - kv 7: qwen2.feed_forward_length u32 = 11008\n",
"llama_model_loader: - kv 8: qwen2.attention.head_count u32 = 16\n",
"llama_model_loader: - kv 9: qwen2.attention.head_count_kv u32 = 2\n",
"llama_model_loader: - kv 10: qwen2.rope.freq_base f32 = 1000000.000000\n",
"llama_model_loader: - kv 11: qwen2.attention.layer_norm_rms_epsilon f32 = 0.000001\n",
"llama_model_loader: - kv 12: general.file_type u32 = 1\n",
"llama_model_loader: - kv 13: tokenizer.ggml.model str = gpt2\n",
"llama_model_loader: - kv 14: tokenizer.ggml.pre str = qwen2\n",
"llama_model_loader: - kv 15: tokenizer.ggml.tokens arr[str,151936] = [\"!\", \"\\\"\", \"#\", \"$\", \"%\", \"&\", \"'\", ...\n",
"llama_model_loader: - kv 16: tokenizer.ggml.token_type arr[i32,151936] = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, ...\n",
"llama_model_loader: - kv 17: tokenizer.ggml.merges arr[str,151387] = [\"Ġ Ġ\", \"ĠĠ ĠĠ\", \"i n\", \"Ġ t\",...\n",
"llama_model_loader: - kv 18: tokenizer.ggml.eos_token_id u32 = 151645\n",
"llama_model_loader: - kv 19: tokenizer.ggml.padding_token_id u32 = 151643\n",
"llama_model_loader: - kv 20: tokenizer.ggml.bos_token_id u32 = 151643\n",
"llama_model_loader: - kv 21: tokenizer.ggml.add_bos_token bool = false\n",
"llama_model_loader: - kv 22: tokenizer.chat_template str = {%- if tools %}\\n {{- '<|im_start|>...\n",
"llama_model_loader: - kv 23: general.quantization_version u32 = 2\n",
"llama_model_loader: - type f32: 181 tensors\n",
"llama_model_loader: - type f16: 253 tensors\n",
"print_info: file format = GGUF V3 (latest)\n",
"print_info: file type = F16\n",
"print_info: file size = 5.75 GiB (16.00 BPW) \n",
"init_tokenizer: initializing tokenizer for type 2\n",
"load: control token: 151660 '<|fim_middle|>' is not marked as EOG\n",
"load: control token: 151659 '<|fim_prefix|>' is not marked as EOG\n",
"load: control token: 151653 '<|vision_end|>' is not marked as EOG\n",
"load: control token: 151648 '<|box_start|>' is not marked as EOG\n",
"load: control token: 151646 '<|object_ref_start|>' is not marked as EOG\n",
"load: control token: 151649 '<|box_end|>' is not marked as EOG\n",
"load: control token: 151655 '<|image_pad|>' is not marked as EOG\n",
"load: control token: 151651 '<|quad_end|>' is not marked as EOG\n",
"load: control token: 151647 '<|object_ref_end|>' is not marked as EOG\n",
"load: control token: 151652 '<|vision_start|>' is not marked as EOG\n",
"load: control token: 151654 '<|vision_pad|>' is not marked as EOG\n",
"load: control token: 151656 '<|video_pad|>' is not marked as EOG\n",
"load: control token: 151644 '<|im_start|>' is not marked as EOG\n",
"load: control token: 151661 '<|fim_suffix|>' is not marked as EOG\n",
"load: control token: 151650 '<|quad_start|>' is not marked as EOG\n",
"load: special tokens cache size = 22\n",
"load: token to piece cache size = 0.9310 MB\n",
"print_info: arch = qwen2\n",
"print_info: vocab_only = 0\n",
"print_info: n_ctx_train = 32768\n",
"print_info: n_embd = 2048\n",
"print_info: n_layer = 36\n",
"print_info: n_head = 16\n",
"print_info: n_head_kv = 2\n",
"print_info: n_rot = 128\n",
"print_info: n_swa = 0\n",
"print_info: n_embd_head_k = 128\n",
"print_info: n_embd_head_v = 128\n",
"print_info: n_gqa = 8\n",
"print_info: n_embd_k_gqa = 256\n",
"print_info: n_embd_v_gqa = 256\n",
"print_info: f_norm_eps = 0.0e+00\n",
"print_info: f_norm_rms_eps = 1.0e-06\n",
"print_info: f_clamp_kqv = 0.0e+00\n",
"print_info: f_max_alibi_bias = 0.0e+00\n",
"print_info: f_logit_scale = 0.0e+00\n",
"print_info: f_attn_scale = 0.0e+00\n",
"print_info: n_ff = 11008\n",
"print_info: n_expert = 0\n",
"print_info: n_expert_used = 0\n",
"print_info: causal attn = 1\n",
"print_info: pooling type = 0\n",
"print_info: rope type = 2\n",
"print_info: rope scaling = linear\n",
"print_info: freq_base_train = 1000000.0\n",
"print_info: freq_scale_train = 1\n",
"print_info: n_ctx_orig_yarn = 32768\n",
"print_info: rope_finetuned = unknown\n",
"print_info: ssm_d_conv = 0\n",
"print_info: ssm_d_inner = 0\n",
"print_info: ssm_d_state = 0\n",
"print_info: ssm_dt_rank = 0\n",
"print_info: ssm_dt_b_c_rms = 0\n",
"print_info: model type = 3B\n",
"print_info: model params = 3.09 B\n",
"print_info: general.name = Merged\n",
"print_info: vocab type = BPE\n",
"print_info: n_vocab = 151936\n",
"print_info: n_merges = 151387\n",
"print_info: BOS token = 151643 '<|endoftext|>'\n",
"print_info: EOS token = 151645 '<|im_end|>'\n",
"print_info: EOT token = 151645 '<|im_end|>'\n",
"print_info: PAD token = 151643 '<|endoftext|>'\n",
"print_info: LF token = 198 'Ċ'\n",
"print_info: FIM PRE token = 151659 '<|fim_prefix|>'\n",
"print_info: FIM SUF token = 151661 '<|fim_suffix|>'\n",
"print_info: FIM MID token = 151660 '<|fim_middle|>'\n",
"print_info: FIM PAD token = 151662 '<|fim_pad|>'\n",
"print_info: FIM REP token = 151663 '<|repo_name|>'\n",
"print_info: FIM SEP token = 151664 '<|file_sep|>'\n",
"print_info: EOG token = 151643 '<|endoftext|>'\n",
"print_info: EOG token = 151645 '<|im_end|>'\n",
"print_info: EOG token = 151662 '<|fim_pad|>'\n",
"print_info: EOG token = 151663 '<|repo_name|>'\n",
"print_info: EOG token = 151664 '<|file_sep|>'\n",
"print_info: max token length = 256\n",
"load_tensors: loading model tensors, this can take a while... (mmap = true)\n",
"load_tensors: layer 0 assigned to device CPU\n",
"load_tensors: layer 1 assigned to device CPU\n",
"load_tensors: layer 2 assigned to device CPU\n",
"load_tensors: layer 3 assigned to device CPU\n",
"load_tensors: layer 4 assigned to device CPU\n",
"load_tensors: layer 5 assigned to device CPU\n",
"load_tensors: layer 6 assigned to device CPU\n",
"load_tensors: layer 7 assigned to device CPU\n",
"load_tensors: layer 8 assigned to device CPU\n",
"load_tensors: layer 9 assigned to device CPU\n",
"load_tensors: layer 10 assigned to device CPU\n",
"load_tensors: layer 11 assigned to device CPU\n",
"load_tensors: layer 12 assigned to device CPU\n",
"load_tensors: layer 13 assigned to device CPU\n",
"load_tensors: layer 14 assigned to device CPU\n",
"load_tensors: layer 15 assigned to device CPU\n",
"load_tensors: layer 16 assigned to device CPU\n",
"load_tensors: layer 17 assigned to device CPU\n",
"load_tensors: layer 18 assigned to device CPU\n",
"load_tensors: layer 19 assigned to device CPU\n",
"load_tensors: layer 20 assigned to device CPU\n",
"load_tensors: layer 21 assigned to device CPU\n",
"load_tensors: layer 22 assigned to device CPU\n",
"load_tensors: layer 23 assigned to device CPU\n",
"load_tensors: layer 24 assigned to device CPU\n",
"load_tensors: layer 25 assigned to device CPU\n",
"load_tensors: layer 26 assigned to device CPU\n",
"load_tensors: layer 27 assigned to device CPU\n",
"load_tensors: layer 28 assigned to device CPU\n",
"load_tensors: layer 29 assigned to device CPU\n",
"load_tensors: layer 30 assigned to device CPU\n",
"load_tensors: layer 31 assigned to device CPU\n",
"load_tensors: layer 32 assigned to device CPU\n",
"load_tensors: layer 33 assigned to device CPU\n",
"load_tensors: layer 34 assigned to device CPU\n",
"load_tensors: layer 35 assigned to device CPU\n",
"load_tensors: layer 36 assigned to device CPU\n",
"load_tensors: tensor 'token_embd.weight' (f16) (and 434 others) cannot be used with preferred buffer type CPU_AARCH64, using CPU instead\n",
"load_tensors: CPU_Mapped model buffer size = 5886.42 MiB\n",
"...........................................................................................\n",
"llama_init_from_model: n_seq_max = 1\n",
"llama_init_from_model: n_ctx = 2048\n",
"llama_init_from_model: n_ctx_per_seq = 2048\n",
"llama_init_from_model: n_batch = 512\n",
"llama_init_from_model: n_ubatch = 512\n",
"llama_init_from_model: flash_attn = 0\n",
"llama_init_from_model: freq_base = 1000000.0\n",
"llama_init_from_model: freq_scale = 1\n",
"llama_init_from_model: n_ctx_per_seq (2048) < n_ctx_train (32768) -- the full capacity of the model will not be utilized\n",
"llama_kv_cache_init: kv_size = 2048, offload = 1, type_k = 'f16', type_v = 'f16', n_layer = 36, can_shift = 1\n",
"llama_kv_cache_init: layer 0: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 1: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 2: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 3: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 4: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 5: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 6: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 7: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 8: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 9: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 10: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 11: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 12: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 13: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 14: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 15: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 16: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 17: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 18: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 19: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 20: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 21: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 22: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 23: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 24: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 25: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 26: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 27: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 28: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 29: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 30: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 31: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 32: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 33: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 34: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 35: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: CPU KV buffer size = 72.00 MiB\n",
"llama_init_from_model: KV self size = 72.00 MiB, K (f16): 36.00 MiB, V (f16): 36.00 MiB\n",
"llama_init_from_model: CPU output buffer size = 0.58 MiB\n",
"llama_init_from_model: CPU compute buffer size = 300.75 MiB\n",
"llama_init_from_model: graph nodes = 1266\n",
"llama_init_from_model: graph splits = 1\n",
"CPU : SSE3 = 1 | SSSE3 = 1 | AVX = 1 | AVX2 = 1 | F16C = 1 | FMA = 1 | BMI2 = 1 | LLAMAFILE = 1 | OPENMP = 1 | AARCH64_REPACK = 1 | \n",
"Model metadata: {'tokenizer.ggml.add_bos_token': 'false', 'tokenizer.ggml.bos_token_id': '151643', 'general.architecture': 'qwen2', 'tokenizer.ggml.padding_token_id': '151643', 'qwen2.embedding_length': '2048', 'tokenizer.ggml.pre': 'qwen2', 'general.name': 'Merged', 'qwen2.block_count': '36', 'general.type': 'model', 'general.size_label': '3.1B', 'qwen2.context_length': '32768', 'tokenizer.chat_template': '{%- if tools %}\\n {{- \\'<|im_start|>system\\\\n\\' }}\\n {%- if messages[0][\\'role\\'] == \\'system\\' %}\\n {{- messages[0][\\'content\\'] }}\\n {%- else %}\\n {{- \\'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.\\' }}\\n {%- endif %}\\n {{- \"\\\\n\\\\n# Tools\\\\n\\\\nYou may call one or more functions to assist with the user query.\\\\n\\\\nYou are provided with function signatures within XML tags:\\\\n\" }}\\n {%- for tool in tools %}\\n {{- \"\\\\n\" }}\\n {{- tool | tojson }}\\n {%- endfor %}\\n {{- \"\\\\n\\\\n\\\\nFor each function call, return a json object with function name and arguments within XML tags:\\\\n\\\\n{\\\\\"name\\\\\": , \\\\\"arguments\\\\\": }\\\\n<|im_end|>\\\\n\" }}\\n{%- else %}\\n {%- if messages[0][\\'role\\'] == \\'system\\' %}\\n {{- \\'<|im_start|>system\\\\n\\' + messages[0][\\'content\\'] + \\'<|im_end|>\\\\n\\' }}\\n {%- else %}\\n {{- \\'<|im_start|>system\\\\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\\\\n\\' }}\\n {%- endif %}\\n{%- endif %}\\n{%- for message in messages %}\\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\\n {{- \\'<|im_start|>\\' + message.role + \\'\\\\n\\' + message.content + \\'<|im_end|>\\' + \\'\\\\n\\' }}\\n {%- elif message.role == \"assistant\" %}\\n {{- \\'<|im_start|>\\' + message.role }}\\n {%- if message.content %}\\n {{- \\'\\\\n\\' + message.content }}\\n {%- endif %}\\n {%- for tool_call in message.tool_calls %}\\n {%- if tool_call.function is defined %}\\n {%- set tool_call = tool_call.function %}\\n {%- endif %}\\n {{- \\'\\\\n\\\\n{\"name\": \"\\' }}\\n {{- tool_call.name }}\\n {{- \\'\", \"arguments\": \\' }}\\n {{- tool_call.arguments | tojson }}\\n {{- \\'}\\\\n\\' }}\\n {%- endfor %}\\n {{- \\'<|im_end|>\\\\n\\' }}\\n {%- elif message.role == \"tool\" %}\\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\\n {{- \\'<|im_start|>user\\' }}\\n {%- endif %}\\n {{- \\'\\\\n\\\\n\\' }}\\n {{- message.content }}\\n {{- \\'\\\\n\\' }}\\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\\n {{- \\'<|im_end|>\\\\n\\' }}\\n {%- endif %}\\n {%- endif %}\\n{%- endfor %}\\n{%- if add_generation_prompt %}\\n {{- \\'<|im_start|>assistant\\\\n\\' }}\\n{%- endif %}\\n', 'qwen2.attention.head_count_kv': '2', 'general.quantization_version': '2', 'tokenizer.ggml.model': 'gpt2', 'qwen2.feed_forward_length': '11008', 'qwen2.attention.layer_norm_rms_epsilon': '0.000001', 'qwen2.attention.head_count': '16', 'tokenizer.ggml.eos_token_id': '151645', 'qwen2.rope.freq_base': '1000000.000000', 'general.file_type': '1'}\n",
"Available chat formats from metadata: chat_template.default\n",
"Using gguf chat template: {%- if tools %}\n",
" {{- '<|im_start|>system\\n' }}\n",
" {%- if messages[0]['role'] == 'system' %}\n",
" {{- messages[0]['content'] }}\n",
" {%- else %}\n",
" {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}\n",
" {%- endif %}\n",
" {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within XML tags:\\n\" }}\n",
" {%- for tool in tools %}\n",
" {{- \"\\n\" }}\n",
" {{- tool | tojson }}\n",
" {%- endfor %}\n",
" {{- \"\\n\\n\\nFor each function call, return a json object with function name and arguments within XML tags:\\n\\n{\\\"name\\\": , \\\"arguments\\\": }\\n<|im_end|>\\n\" }}\n",
"{%- else %}\n",
" {%- if messages[0]['role'] == 'system' %}\n",
" {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n",
" {%- else %}\n",
" {{- '<|im_start|>system\\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\\n' }}\n",
" {%- endif %}\n",
"{%- endif %}\n",
"{%- for message in messages %}\n",
" {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n",
" {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n",
" {%- elif message.role == \"assistant\" %}\n",
" {{- '<|im_start|>' + message.role }}\n",
" {%- if message.content %}\n",
" {{- '\\n' + message.content }}\n",
" {%- endif %}\n",
" {%- for tool_call in message.tool_calls %}\n",
" {%- if tool_call.function is defined %}\n",
" {%- set tool_call = tool_call.function %}\n",
" {%- endif %}\n",
" {{- '\\n\\n{\"name\": \"' }}\n",
" {{- tool_call.name }}\n",
" {{- '\", \"arguments\": ' }}\n",
" {{- tool_call.arguments | tojson }}\n",
" {{- '}\\n' }}\n",
" {%- endfor %}\n",
" {{- '<|im_end|>\\n' }}\n",
" {%- elif message.role == \"tool\" %}\n",
" {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n",
" {{- '<|im_start|>user' }}\n",
" {%- endif %}\n",
" {{- '\\n\\n' }}\n",
" {{- message.content }}\n",
" {{- '\\n' }}\n",
" {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n",
" {{- '<|im_end|>\\n' }}\n",
" {%- endif %}\n",
" {%- endif %}\n",
"{%- endfor %}\n",
"{%- if add_generation_prompt %}\n",
" {{- '<|im_start|>assistant\\n' }}\n",
"{%- endif %}\n",
"\n",
"Using chat eos_token: <|im_end|>\n",
"Using chat bos_token: <|endoftext|>\n",
"llama_perf_context_print: load time = 12489.59 ms\n",
"llama_perf_context_print: prompt eval time = 12489.17 ms / 35 tokens ( 356.83 ms per token, 2.80 tokens per second)\n",
"llama_perf_context_print: eval time = 154428.09 ms / 199 runs ( 776.02 ms per token, 1.29 tokens per second)\n",
"llama_perf_context_print: total time = 167319.01 ms / 234 tokens\n"
]
},
{
"output_type": "stream",
"name": "stdout",
"text": [
"✨\n",
"\n",
"Hey there! 🎉 First off, it's great that you're looking for real, no-BS advice! 🤔 Here’s a straightforward approach:\n",
"\n",
"1. **Talk to Her**: 🌟 Find a time when she’s calm and willing to talk. 🙃 Be honest but gentle. 📜 Explain your feelings and the importance of your friendship. 🌟\n",
"\n",
"2. **Ask for Her Perspective**: 👀 Listen to her and understand her point of view. 📝 Ask questions to clarify her feelings. 🌞\n",
"\n",
"3. **Compromise**: 🎯 Propose a solution that works for both of you. 💼 Maybe keep some distance from her or find a neutral ground. 🎤\n",
"\n",
"4. **Support Each Other**: 💖 Let her know you’re supportive of her and encourage her to talk to her friends as well. 🌿\n",
"\n",
"Remember, communication is key! 🌟\n",
"\n",
"Hope this helps!\n"
]
}
]
},
{
"cell_type": "markdown",
"source": [
"2. Inferencing with the RAG components..."
],
"metadata": {
"id": "_flrtQgyFgUl"
}
},
{
"cell_type": "code",
"source": [
"#Install required packages\n",
"!pip install praw faiss-cpu pyPDF2 numpy sentence-transformers"
],
"metadata": {
"id": "uwcz6iwlFl7V",
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "05fb82d4-7853-4d4d-a471-18b4970822f6"
},
"execution_count": 4,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Collecting praw\n",
" Downloading praw-7.8.1-py3-none-any.whl.metadata (9.4 kB)\n",
"Collecting faiss-cpu\n",
" Downloading faiss_cpu-1.10.0-cp311-cp311-manylinux_2_28_x86_64.whl.metadata (4.4 kB)\n",
"Collecting pyPDF2\n",
" Downloading pypdf2-3.0.1-py3-none-any.whl.metadata (6.8 kB)\n",
"Requirement already satisfied: numpy in /usr/local/lib/python3.11/dist-packages (2.0.2)\n",
"Requirement already satisfied: sentence-transformers in /usr/local/lib/python3.11/dist-packages (3.4.1)\n",
"Collecting prawcore<3,>=2.4 (from praw)\n",
" Downloading prawcore-2.4.0-py3-none-any.whl.metadata (5.0 kB)\n",
"Collecting update_checker>=0.18 (from praw)\n",
" Downloading update_checker-0.18.0-py3-none-any.whl.metadata (2.3 kB)\n",
"Requirement already satisfied: websocket-client>=0.54.0 in /usr/local/lib/python3.11/dist-packages (from praw) (1.8.0)\n",
"Requirement already satisfied: packaging in /usr/local/lib/python3.11/dist-packages (from faiss-cpu) (24.2)\n",
"Requirement already satisfied: transformers<5.0.0,>=4.41.0 in /usr/local/lib/python3.11/dist-packages (from sentence-transformers) (4.50.0)\n",
"Requirement already satisfied: tqdm in /usr/local/lib/python3.11/dist-packages (from sentence-transformers) (4.67.1)\n",
"Requirement already satisfied: torch>=1.11.0 in /usr/local/lib/python3.11/dist-packages (from sentence-transformers) (2.6.0+cu124)\n",
"Requirement already satisfied: scikit-learn in /usr/local/lib/python3.11/dist-packages (from sentence-transformers) (1.6.1)\n",
"Requirement already satisfied: scipy in /usr/local/lib/python3.11/dist-packages (from sentence-transformers) (1.14.1)\n",
"Requirement already satisfied: huggingface-hub>=0.20.0 in /usr/local/lib/python3.11/dist-packages (from sentence-transformers) (0.29.3)\n",
"Requirement already satisfied: Pillow in /usr/local/lib/python3.11/dist-packages (from sentence-transformers) (11.1.0)\n",
"Requirement already satisfied: filelock in /usr/local/lib/python3.11/dist-packages (from huggingface-hub>=0.20.0->sentence-transformers) (3.18.0)\n",
"Requirement already satisfied: fsspec>=2023.5.0 in /usr/local/lib/python3.11/dist-packages (from huggingface-hub>=0.20.0->sentence-transformers) (2025.3.0)\n",
"Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.11/dist-packages (from huggingface-hub>=0.20.0->sentence-transformers) (6.0.2)\n",
"Requirement already satisfied: requests in /usr/local/lib/python3.11/dist-packages (from huggingface-hub>=0.20.0->sentence-transformers) (2.32.3)\n",
"Requirement already satisfied: typing-extensions>=3.7.4.3 in /usr/local/lib/python3.11/dist-packages (from huggingface-hub>=0.20.0->sentence-transformers) (4.12.2)\n",
"Requirement already satisfied: networkx in /usr/local/lib/python3.11/dist-packages (from torch>=1.11.0->sentence-transformers) (3.4.2)\n",
"Requirement already satisfied: jinja2 in /usr/local/lib/python3.11/dist-packages (from torch>=1.11.0->sentence-transformers) (3.1.6)\n",
"Collecting nvidia-cuda-nvrtc-cu12==12.4.127 (from torch>=1.11.0->sentence-transformers)\n",
" Downloading nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl.metadata (1.5 kB)\n",
"Collecting nvidia-cuda-runtime-cu12==12.4.127 (from torch>=1.11.0->sentence-transformers)\n",
" Downloading nvidia_cuda_runtime_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl.metadata (1.5 kB)\n",
"Collecting nvidia-cuda-cupti-cu12==12.4.127 (from torch>=1.11.0->sentence-transformers)\n",
" Downloading nvidia_cuda_cupti_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl.metadata (1.6 kB)\n",
"Collecting nvidia-cudnn-cu12==9.1.0.70 (from torch>=1.11.0->sentence-transformers)\n",
" Downloading nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl.metadata (1.6 kB)\n",
"Collecting nvidia-cublas-cu12==12.4.5.8 (from torch>=1.11.0->sentence-transformers)\n",
" Downloading nvidia_cublas_cu12-12.4.5.8-py3-none-manylinux2014_x86_64.whl.metadata (1.5 kB)\n",
"Collecting nvidia-cufft-cu12==11.2.1.3 (from torch>=1.11.0->sentence-transformers)\n",
" Downloading nvidia_cufft_cu12-11.2.1.3-py3-none-manylinux2014_x86_64.whl.metadata (1.5 kB)\n",
"Collecting nvidia-curand-cu12==10.3.5.147 (from torch>=1.11.0->sentence-transformers)\n",
" Downloading nvidia_curand_cu12-10.3.5.147-py3-none-manylinux2014_x86_64.whl.metadata (1.5 kB)\n",
"Collecting nvidia-cusolver-cu12==11.6.1.9 (from torch>=1.11.0->sentence-transformers)\n",
" Downloading nvidia_cusolver_cu12-11.6.1.9-py3-none-manylinux2014_x86_64.whl.metadata (1.6 kB)\n",
"Collecting nvidia-cusparse-cu12==12.3.1.170 (from torch>=1.11.0->sentence-transformers)\n",
" Downloading nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_x86_64.whl.metadata (1.6 kB)\n",
"Requirement already satisfied: nvidia-cusparselt-cu12==0.6.2 in /usr/local/lib/python3.11/dist-packages (from torch>=1.11.0->sentence-transformers) (0.6.2)\n",
"Requirement already satisfied: nvidia-nccl-cu12==2.21.5 in /usr/local/lib/python3.11/dist-packages (from torch>=1.11.0->sentence-transformers) (2.21.5)\n",
"Requirement already satisfied: nvidia-nvtx-cu12==12.4.127 in /usr/local/lib/python3.11/dist-packages (from torch>=1.11.0->sentence-transformers) (12.4.127)\n",
"Collecting nvidia-nvjitlink-cu12==12.4.127 (from torch>=1.11.0->sentence-transformers)\n",
" Downloading nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl.metadata (1.5 kB)\n",
"Requirement already satisfied: triton==3.2.0 in /usr/local/lib/python3.11/dist-packages (from torch>=1.11.0->sentence-transformers) (3.2.0)\n",
"Requirement already satisfied: sympy==1.13.1 in /usr/local/lib/python3.11/dist-packages (from torch>=1.11.0->sentence-transformers) (1.13.1)\n",
"Requirement already satisfied: mpmath<1.4,>=1.1.0 in /usr/local/lib/python3.11/dist-packages (from sympy==1.13.1->torch>=1.11.0->sentence-transformers) (1.3.0)\n",
"Requirement already satisfied: regex!=2019.12.17 in /usr/local/lib/python3.11/dist-packages (from transformers<5.0.0,>=4.41.0->sentence-transformers) (2024.11.6)\n",
"Requirement already satisfied: tokenizers<0.22,>=0.21 in /usr/local/lib/python3.11/dist-packages (from transformers<5.0.0,>=4.41.0->sentence-transformers) (0.21.1)\n",
"Requirement already satisfied: safetensors>=0.4.3 in /usr/local/lib/python3.11/dist-packages (from transformers<5.0.0,>=4.41.0->sentence-transformers) (0.5.3)\n",
"Requirement already satisfied: joblib>=1.2.0 in /usr/local/lib/python3.11/dist-packages (from scikit-learn->sentence-transformers) (1.4.2)\n",
"Requirement already satisfied: threadpoolctl>=3.1.0 in /usr/local/lib/python3.11/dist-packages (from scikit-learn->sentence-transformers) (3.6.0)\n",
"Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.11/dist-packages (from requests->huggingface-hub>=0.20.0->sentence-transformers) (3.4.1)\n",
"Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.11/dist-packages (from requests->huggingface-hub>=0.20.0->sentence-transformers) (3.10)\n",
"Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.11/dist-packages (from requests->huggingface-hub>=0.20.0->sentence-transformers) (2.3.0)\n",
"Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.11/dist-packages (from requests->huggingface-hub>=0.20.0->sentence-transformers) (2025.1.31)\n",
"Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.11/dist-packages (from jinja2->torch>=1.11.0->sentence-transformers) (3.0.2)\n",
"Downloading praw-7.8.1-py3-none-any.whl (189 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m189.3/189.3 kB\u001b[0m \u001b[31m7.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hDownloading faiss_cpu-1.10.0-cp311-cp311-manylinux_2_28_x86_64.whl (30.7 MB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m30.7/30.7 MB\u001b[0m \u001b[31m42.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hDownloading pypdf2-3.0.1-py3-none-any.whl (232 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m232.6/232.6 kB\u001b[0m \u001b[31m19.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hDownloading prawcore-2.4.0-py3-none-any.whl (17 kB)\n",
"Downloading nvidia_cublas_cu12-12.4.5.8-py3-none-manylinux2014_x86_64.whl (363.4 MB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m363.4/363.4 MB\u001b[0m \u001b[31m4.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hDownloading nvidia_cuda_cupti_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl (13.8 MB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m13.8/13.8 MB\u001b[0m \u001b[31m87.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hDownloading nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl (24.6 MB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m24.6/24.6 MB\u001b[0m \u001b[31m72.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hDownloading nvidia_cuda_runtime_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl (883 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m883.7/883.7 kB\u001b[0m \u001b[31m53.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hDownloading nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl (664.8 MB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m664.8/664.8 MB\u001b[0m \u001b[31m1.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hDownloading nvidia_cufft_cu12-11.2.1.3-py3-none-manylinux2014_x86_64.whl (211.5 MB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m211.5/211.5 MB\u001b[0m \u001b[31m5.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hDownloading nvidia_curand_cu12-10.3.5.147-py3-none-manylinux2014_x86_64.whl (56.3 MB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m56.3/56.3 MB\u001b[0m \u001b[31m11.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hDownloading nvidia_cusolver_cu12-11.6.1.9-py3-none-manylinux2014_x86_64.whl (127.9 MB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m127.9/127.9 MB\u001b[0m \u001b[31m7.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hDownloading nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_x86_64.whl (207.5 MB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m207.5/207.5 MB\u001b[0m \u001b[31m6.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hDownloading nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl (21.1 MB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m21.1/21.1 MB\u001b[0m \u001b[31m73.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hDownloading update_checker-0.18.0-py3-none-any.whl (7.0 kB)\n",
"Installing collected packages: pyPDF2, nvidia-nvjitlink-cu12, nvidia-curand-cu12, nvidia-cufft-cu12, nvidia-cuda-runtime-cu12, nvidia-cuda-nvrtc-cu12, nvidia-cuda-cupti-cu12, nvidia-cublas-cu12, faiss-cpu, update_checker, prawcore, nvidia-cusparse-cu12, nvidia-cudnn-cu12, praw, nvidia-cusolver-cu12\n",
" Attempting uninstall: nvidia-nvjitlink-cu12\n",
" Found existing installation: nvidia-nvjitlink-cu12 12.5.82\n",
" Uninstalling nvidia-nvjitlink-cu12-12.5.82:\n",
" Successfully uninstalled nvidia-nvjitlink-cu12-12.5.82\n",
" Attempting uninstall: nvidia-curand-cu12\n",
" Found existing installation: nvidia-curand-cu12 10.3.6.82\n",
" Uninstalling nvidia-curand-cu12-10.3.6.82:\n",
" Successfully uninstalled nvidia-curand-cu12-10.3.6.82\n",
" Attempting uninstall: nvidia-cufft-cu12\n",
" Found existing installation: nvidia-cufft-cu12 11.2.3.61\n",
" Uninstalling nvidia-cufft-cu12-11.2.3.61:\n",
" Successfully uninstalled nvidia-cufft-cu12-11.2.3.61\n",
" Attempting uninstall: nvidia-cuda-runtime-cu12\n",
" Found existing installation: nvidia-cuda-runtime-cu12 12.5.82\n",
" Uninstalling nvidia-cuda-runtime-cu12-12.5.82:\n",
" Successfully uninstalled nvidia-cuda-runtime-cu12-12.5.82\n",
" Attempting uninstall: nvidia-cuda-nvrtc-cu12\n",
" Found existing installation: nvidia-cuda-nvrtc-cu12 12.5.82\n",
" Uninstalling nvidia-cuda-nvrtc-cu12-12.5.82:\n",
" Successfully uninstalled nvidia-cuda-nvrtc-cu12-12.5.82\n",
" Attempting uninstall: nvidia-cuda-cupti-cu12\n",
" Found existing installation: nvidia-cuda-cupti-cu12 12.5.82\n",
" Uninstalling nvidia-cuda-cupti-cu12-12.5.82:\n",
" Successfully uninstalled nvidia-cuda-cupti-cu12-12.5.82\n",
" Attempting uninstall: nvidia-cublas-cu12\n",
" Found existing installation: nvidia-cublas-cu12 12.5.3.2\n",
" Uninstalling nvidia-cublas-cu12-12.5.3.2:\n",
" Successfully uninstalled nvidia-cublas-cu12-12.5.3.2\n",
" Attempting uninstall: nvidia-cusparse-cu12\n",
" Found existing installation: nvidia-cusparse-cu12 12.5.1.3\n",
" Uninstalling nvidia-cusparse-cu12-12.5.1.3:\n",
" Successfully uninstalled nvidia-cusparse-cu12-12.5.1.3\n",
" Attempting uninstall: nvidia-cudnn-cu12\n",
" Found existing installation: nvidia-cudnn-cu12 9.3.0.75\n",
" Uninstalling nvidia-cudnn-cu12-9.3.0.75:\n",
" Successfully uninstalled nvidia-cudnn-cu12-9.3.0.75\n",
" Attempting uninstall: nvidia-cusolver-cu12\n",
" Found existing installation: nvidia-cusolver-cu12 11.6.3.83\n",
" Uninstalling nvidia-cusolver-cu12-11.6.3.83:\n",
" Successfully uninstalled nvidia-cusolver-cu12-11.6.3.83\n",
"Successfully installed faiss-cpu-1.10.0 nvidia-cublas-cu12-12.4.5.8 nvidia-cuda-cupti-cu12-12.4.127 nvidia-cuda-nvrtc-cu12-12.4.127 nvidia-cuda-runtime-cu12-12.4.127 nvidia-cudnn-cu12-9.1.0.70 nvidia-cufft-cu12-11.2.1.3 nvidia-curand-cu12-10.3.5.147 nvidia-cusolver-cu12-11.6.1.9 nvidia-cusparse-cu12-12.3.1.170 nvidia-nvjitlink-cu12-12.4.127 praw-7.8.1 prawcore-2.4.0 pyPDF2-3.0.1 update_checker-0.18.0\n"
]
}
]
},
{
"cell_type": "code",
"source": [
"#............ Scrapping the relevant content from the reddit and finnaly , after the processing steps create a vectore embedding file...................\n",
"\n",
"import praw\n",
"import json\n",
"\n",
"# 🔥 Reddit API credentials (Fill these with your own keys)\n",
"reddit = praw.Reddit(\n",
" client_id=\"YJXGFclFf5rpU0w42GhZRA\",\n",
" client_secret=\"yehLjeiY9_b1KazaUfNrxCUQVtgVtw\",\n",
" user_agent=\"Lakith punsara\"\n",
")\n",
"\n",
"# 🔎 Subreddits to scrape\n",
"subreddits = [\"dating_advice\", \"relationships\", \"relationship_advice\",\"love\",\"sex\",\"Dating\"]\n",
"posts = []\n",
"\n",
"# 🚀 Scrape top posts\n",
"for sub in subreddits:\n",
" for post in reddit.subreddit(sub).hot(limit=100): # Get top 100 posts\n",
" posts.append({\n",
" \"title\": post.title,\n",
" \"text\": post.selftext,\n",
" \"upvotes\": post.score\n",
" })\n",
"\n",
"# Save Reddit data\n",
"with open(\"reddit_data.json\", \"w\") as f:\n",
" json.dump(posts, f)\n"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "IwBPlwCIsuwN",
"outputId": "51318e49-fdc8-4f24-c3fd-96158a31a03c"
},
"execution_count": 5,
"outputs": [
{
"output_type": "stream",
"name": "stderr",
"text": [
"WARNING:praw:It appears that you are using PRAW in an asynchronous environment.\n",
"It is strongly recommended to use Async PRAW: https://asyncpraw.readthedocs.io.\n",
"See https://praw.readthedocs.io/en/latest/getting_started/multiple_instances.html#discord-bots-and-asynchronous-environments for more info.\n",
"\n",
"WARNING:praw:It appears that you are using PRAW in an asynchronous environment.\n",
"It is strongly recommended to use Async PRAW: https://asyncpraw.readthedocs.io.\n",
"See https://praw.readthedocs.io/en/latest/getting_started/multiple_instances.html#discord-bots-and-asynchronous-environments for more info.\n",
"\n",
"WARNING:praw:It appears that you are using PRAW in an asynchronous environment.\n",
"It is strongly recommended to use Async PRAW: https://asyncpraw.readthedocs.io.\n",
"See https://praw.readthedocs.io/en/latest/getting_started/multiple_instances.html#discord-bots-and-asynchronous-environments for more info.\n",
"\n",
"WARNING:praw:It appears that you are using PRAW in an asynchronous environment.\n",
"It is strongly recommended to use Async PRAW: https://asyncpraw.readthedocs.io.\n",
"See https://praw.readthedocs.io/en/latest/getting_started/multiple_instances.html#discord-bots-and-asynchronous-environments for more info.\n",
"\n",
"WARNING:praw:It appears that you are using PRAW in an asynchronous environment.\n",
"It is strongly recommended to use Async PRAW: https://asyncpraw.readthedocs.io.\n",
"See https://praw.readthedocs.io/en/latest/getting_started/multiple_instances.html#discord-bots-and-asynchronous-environments for more info.\n",
"\n",
"WARNING:praw:It appears that you are using PRAW in an asynchronous environment.\n",
"It is strongly recommended to use Async PRAW: https://asyncpraw.readthedocs.io.\n",
"See https://praw.readthedocs.io/en/latest/getting_started/multiple_instances.html#discord-bots-and-asynchronous-environments for more info.\n",
"\n"
]
}
]
},
{
"cell_type": "code",
"source": [
"import faiss\n",
"import numpy as np\n",
"from sentence_transformers import SentenceTransformer\n",
"\n",
"# 🔥 Load embedding model\n",
"model = SentenceTransformer(\"all-MiniLM-L6-v2\")\n",
"\n",
"# 📖 Load Reddit Data\n",
"with open(\"reddit_data.json\", \"r\") as f:\n",
" reddit_posts = json.load(f)\n",
"\n",
"reddit_texts = [f\"🔥 {post['title']}: {post['text']} 😂🔥\" for post in reddit_posts]\n",
"\n",
"# 🔎 Encode Reddit Data\n",
"reddit_embeddings = model.encode(reddit_texts)\n",
"reddit_index = faiss.IndexFlatL2(reddit_embeddings.shape[1])\n",
"reddit_index.add(np.array(reddit_embeddings))\n",
"\n",
"# ✅ Save Reddit FAISS index\n",
"faiss.write_index(reddit_index, \"reddit_faiss.index\")\n"
],
"metadata": {
"id": "4SixqGuL0Udj"
},
"execution_count": 8,
"outputs": []
},
{
"cell_type": "code",
"source": [
"\n",
"#..... Aprat from the data available on the reddit use the given data in the pdf file to give more accurate answers.....\n",
"#Process the pdf data and making a vector emebedding file...\n",
"\n",
"\n",
"import PyPDF2\n",
"import faiss\n",
"import numpy as np\n",
"from sentence_transformers import SentenceTransformer\n",
"\n",
"# 🔥 Load embedding model\n",
"model = SentenceTransformer(\"all-MiniLM-L6-v2\")\n",
"\n",
"# ✅ Extract text from PDF\n",
"def extract_text_from_pdf(pdf_path):\n",
" with open(pdf_path, \"rb\") as f:\n",
" reader = PyPDF2.PdfReader(f)\n",
" text = \"\\n\".join([page.extract_text() for page in reader.pages if page.extract_text()])\n",
" return text.split(\"\\n\") # Split into sentences\n",
"\n",
"# 📖 Load PDF Data\n",
"pdf_path = \"/content/drive/MyDrive/Dating_LLM_GGUF/data_dating_app.pdf\" # Replace with your actual PDF path\n",
"pdf_texts = extract_text_from_pdf(pdf_path)\n",
"\n",
"# 🔎 Encode PDF Data\n",
"pdf_embeddings = model.encode(pdf_texts)\n",
"pdf_index = faiss.IndexFlatL2(pdf_embeddings.shape[1])\n",
"pdf_index.add(np.array(pdf_embeddings))\n",
"\n",
"# ✅ Save PDF FAISS index\n",
"faiss.write_index(pdf_index, \"pdf_faiss.index\")\n",
"\n"
],
"metadata": {
"id": "9kOnuSjRv76V",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 369,
"referenced_widgets": [
"5586991f6e4743fb84cbf17695aeeb59",
"fe235c3c37684a6d9ee1948047514a92",
"b711ce3c3a974c87bbe1a666349ecb27",
"f3a0976c7e184f45b25a52edf38b6a10",
"bdcbd521b027435099bd543a72d062f6",
"0e002e45884e47f782b2c455d5c6c825",
"153f87eb745c4ec5922c56b51b877cc9",
"83d933624e3644e6bd03c749a64e3d42",
"c184d19d507b4fb09959bc7549eec159",
"66a5447c81fd444e84ed41f82f2a0a22",
"8b469287eeea4ba5a65c7809875f96a7",
"d762158c0df249ad8994975e44c8e6d4",
"86cbd682f9ee4e6dbe1abb20bac855ed",
"8e5016a2c63e4116833781366200e89b",
"556bd875cadd4e158a995c8178540066",
"245e82024c9d461ea50e027e6ce1b8a1",
"f5303a01cf47469fb3a06a3b46ae0877",
"9a57ae83e16d4b39a3a92bf2d83c2a16",
"789fe49c00f1482ab20045bf5a64cb79",
"6d2805c0a3b94ade80b83ff426e1c3af",
"fbddd0fd0bef45afb7f0096b43fe07e3",
"ebc743eacfac47db9cdde7003e76d7d4",
"c746de44d7ff41d6b8342586549a5cf0",
"40a3f0fef6cb44f4aa737ed36694516b",
"90cd79491f25421b824504b18fa9abe8",
"4b13bfdfd6374863bfbff59f5f4d2d75",
"41abf06fa4b747ac9fa15a7258585b67",
"76c225eef8654bf3838234a697e66394",
"4ea15f1a71b0404aa330c2523f70c108",
"28f1fe53101e4cc0948908e1f357bd7f",
"0a7868e9c8664211953f7501a9f4727e",
"142bcc0ee28f4e4e9c9ef86bffea7ce2",
"470e955694c247a89739a26f90d9ad7b",
"da11dd3cd2e342c6a8c887710b13583b",
"4f81129dc4164de7be81ceb25b402591",
"7eeca59ba0604b1793ee5d81667d1575",
"206f5c1f2566492cb86ba1a3a28b2778",
"cebdefeecf784dab9b5bd56d38216ec2",
"a43c2858ae214769889df63c8fc7fcb4",
"1c9a46faa27844c9a151d54c4e67aa57",
"6f231dcd6528405782932f2d154987cc",
"244b2b4c1b9f49d7816fe020e2a36bbb",
"71aa7966874f480c87efc0ca2426d6a9",
"827906335dab4ab4ba830279c9fb6030",
"213b5f2a8c734c31bf0865e3894e576f",
"a12688413cb94480a5c060744c726a85",
"e011b97eb46345429144effd417d3887",
"25f626f3316046139bfef3806373da46",
"f2d476d7376541e29228371db59149be",
"d0c5176ea4584c77a6bd932a7fe0898a",
"df87c1391c9240f895d372ccb8fcbe54",
"5a9c5db9194b4176bf1d605c7560eb29",
"bf0c9c2d1c654c14bab541d2ecc429f8",
"1d78e1692a844acbb2bb0cd712805d82",
"c51e527406dc47ebad3edbbe1f57f7fd",
"c2dd1b42b15d4b7fb0df702584b0eb69",
"1a85ab47b0b547a586224cbdde24345a",
"534cabf9daff4eec9b5b57b90db1fea4",
"69febf81d0004ed492a2f0e8b2905300",
"e38da1ceec95407d9665cd9ef0f45124",
"4bd3c06c7ba2463892110c469c9e3362",
"5ac525d20da34ae8a171139210fbbaaf",
"59e343a07e594596bc244a7d9060b7cd",
"39b63793c88c45c1aaccd58fc7db551a",
"ea09edf8b1c847fc80dce99e06cbefed",
"397293c7353545a28c22b7e135109ec7",
"9b1b465029b246afa919ceb71f3b81c6",
"fc58a83715b842a6a3c7fbfb97633fdc",
"8743de7dec8d4fe5ba83bd8e0ed1249c",
"a970c432e5904a4c80d23bb73e5d17b2",
"20a36ee1145f4d0eb9c00e1283eba629",
"b4df23c2a65046eda61f0bcdb66f1fba",
"9ec10f7a38dc4f5fa7b7d9b08f15738c",
"d62bde8dd9234717821574a7cf51f86c",
"95be6184a431437c88cdf154e7b4896b",
"ff4c73e4d1fe494a93cdf4c50b699efd",
"b4818a96c6af4a199dcfb5ff858530ef",
"ea73b65fc269403195364f8b5909375b",
"ec2a6578573147b6a816fedacadbcfc0",
"6d0372a72a374ea9ad33e0cefc35ef3a",
"0e4efb55f60941c18e6b8abb269fef02",
"16880be9d4fa4e60b691c9b94d68e013",
"4adad72c05d14320a970e6b6451c02a1",
"45367c8908a5421e8a1507dac2e7505e",
"88c0087c005f4b909725c32399f93ebf",
"bc3dbce2faeb4e63b66a45f7b93f6313",
"e065f80db1864f83b44635ced048f55d",
"be4f5674fd244c469a03729d982a8ba1",
"57b84d4abc3d4385ba051b6e63dc3a15",
"26a22189932a4c8dadb45bf71ea4d4d6",
"875b65f0bff140a4a0423b83ced010c4",
"497c6cf30a744388b6ab9e0725bcf8aa",
"16b8f239109a439aadacae2b6cfcff1c",
"720d2d16a9604981b378f4a48f9936ed",
"b62b1d546d734c01a74c36020de5e833",
"638c3d74be274d81ac9dbabaaee5c595",
"a46c8b5714b749218398ee8c4ae72615",
"30ddfe5cc4c74e478b182151ff9e2f0f",
"6563431ee3d041afa2dc92527da991ff",
"06689d3aea0d480a920eeebe7848ad06",
"f3bf0b76cfb745f2aa1280e3ba581651",
"274b8ab9342a492abcb25bf2d039532d",
"246dbe3cfe804ef693e890de2a0280dc",
"ecc09803f48a474cb7ac80d49b2751b4",
"9a928488385844d9a17a503c9775b32f",
"4ca9c9a62ec54511a3cc877fe93e6d13",
"5177b3ea6b294b89ae71db05ec590081",
"657daeb0a3de4adaad79ddcbad782483",
"7034eae2a28a4c598bd01bd82b17c105",
"fbda42fe419044508204ac2f141c8683",
"11fcf813bdf54523be242b6020472b93",
"35b80881f93b47edbacaea8810ae361c",
"db0957c53a9d4dcf83961ce1ac0cd17a",
"d74bb16118c243cd8632b65b89a83ff7",
"7f8fa2d62e6f490cbf8d18244d681b1e",
"645edb2a48394a2d941c35f4f00d00ae",
"240a560959dc4f4a86fa1f0bfa07c586",
"cbdb02bb76b64b8e926151741b84bd54",
"6209a75db96d482c94780e5cb19910ab",
"c614e293a4a449368b64d2e23140077c",
"aaf56637598c4f778993e8a08706ef96"
]
},
"outputId": "06cc915a-33b6-49df-8d87-3deaf43d7d72"
},
"execution_count": 6,
"outputs": [
{
"output_type": "display_data",
"data": {
"text/plain": [
"modules.json: 0%| | 0.00/349 [00:00, ?B/s]"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "5586991f6e4743fb84cbf17695aeeb59"
}
},
"metadata": {}
},
{
"output_type": "display_data",
"data": {
"text/plain": [
"config_sentence_transformers.json: 0%| | 0.00/116 [00:00, ?B/s]"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "d762158c0df249ad8994975e44c8e6d4"
}
},
"metadata": {}
},
{
"output_type": "display_data",
"data": {
"text/plain": [
"README.md: 0%| | 0.00/10.5k [00:00, ?B/s]"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "c746de44d7ff41d6b8342586549a5cf0"
}
},
"metadata": {}
},
{
"output_type": "display_data",
"data": {
"text/plain": [
"sentence_bert_config.json: 0%| | 0.00/53.0 [00:00, ?B/s]"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "da11dd3cd2e342c6a8c887710b13583b"
}
},
"metadata": {}
},
{
"output_type": "display_data",
"data": {
"text/plain": [
"config.json: 0%| | 0.00/612 [00:00, ?B/s]"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "213b5f2a8c734c31bf0865e3894e576f"
}
},
"metadata": {}
},
{
"output_type": "display_data",
"data": {
"text/plain": [
"model.safetensors: 0%| | 0.00/90.9M [00:00, ?B/s]"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "c2dd1b42b15d4b7fb0df702584b0eb69"
}
},
"metadata": {}
},
{
"output_type": "display_data",
"data": {
"text/plain": [
"tokenizer_config.json: 0%| | 0.00/350 [00:00, ?B/s]"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "9b1b465029b246afa919ceb71f3b81c6"
}
},
"metadata": {}
},
{
"output_type": "display_data",
"data": {
"text/plain": [
"vocab.txt: 0%| | 0.00/232k [00:00, ?B/s]"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "ea73b65fc269403195364f8b5909375b"
}
},
"metadata": {}
},
{
"output_type": "display_data",
"data": {
"text/plain": [
"tokenizer.json: 0%| | 0.00/466k [00:00, ?B/s]"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "57b84d4abc3d4385ba051b6e63dc3a15"
}
},
"metadata": {}
},
{
"output_type": "display_data",
"data": {
"text/plain": [
"special_tokens_map.json: 0%| | 0.00/112 [00:00, ?B/s]"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "06689d3aea0d480a920eeebe7848ad06"
}
},
"metadata": {}
},
{
"output_type": "display_data",
"data": {
"text/plain": [
"config.json: 0%| | 0.00/190 [00:00, ?B/s]"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "11fcf813bdf54523be242b6020472b93"
}
},
"metadata": {}
}
]
},
{
"cell_type": "code",
"source": [
"from llama_cpp import Llama\n",
"import random\n",
"import faiss\n",
"import numpy as np\n",
"from sentence_transformers import SentenceTransformer\n",
"from huggingface_hub import hf_hub_download\n",
"import json\n",
"\n",
"# ✅ Download GGUF Model from Hugging Face\n",
"model_path = hf_hub_download(repo_id=\"LakithGR/QWEN2.5-3b-DAP\", filename=\"merged_gguf.gguf\")\n",
"\n",
"# ✅ Load Llama Model\n",
"llm = Llama(\n",
" model_path=model_path,\n",
" n_gpu_layers=40,\n",
" n_ctx=2048,\n",
" system_message=\"You're a no-filter, informal AI. Talk like a real friend. Use slang, jokes, emojis, and be brutally honest! 🔥😂\"\n",
")\n",
"\n",
"# ✅ Load Embedding Model\n",
"embed_model = SentenceTransformer(\"all-MiniLM-L6-v2\")\n",
"\n",
"# ✅ Load FAISS Indexes\n",
"reddit_index = faiss.read_index(\"reddit_faiss.index\")\n",
"pdf_index = faiss.read_index(\"pdf_faiss.index\")\n",
"\n",
"# ✅ Load Reddit Data for Mapping\n",
"with open(\"reddit_data.json\", \"r\") as f:\n",
" reddit_posts = json.load(f)\n",
"\n",
"# ✅ Retrieve PDF Text Directly from FAISS\n",
"def get_pdf_text(index_id):\n",
" return f\"📖 Relevant book excerpt (ID {index_id})\"\n",
"\n",
"# ✅ Gender-based Salutation\n",
"def get_salutation(user_input):\n",
" male_keywords = [\"girlfriend\", \"wife\", \"she\", \"her\"]\n",
" female_keywords = [\"boyfriend\", \"husband\", \"he\", \"him\"]\n",
"\n",
" if any(word in user_input.lower() for word in male_keywords):\n",
" return random.choice([\"Hey queen! 👑\", \"Girl, listen up! 💅\", \"Sis, let’s talk ❤️\"])\n",
" elif any(word in user_input.lower() for word in female_keywords):\n",
" return random.choice([\"Yo bro! 🔥\", \"Dude, hear me out 🤔\", \"Man, let's fix this 💪\"])\n",
" else:\n",
" return random.choice([\"Yo dude! 😎\", \"Hey buddy! 🙌\", \"Listen up, my friend ❤️\"])\n",
"\n",
"# ✅ FAISS Retrieval Function\n",
"def retrieve_info(user_input, top_k=1):\n",
" query_embedding = embed_model.encode([user_input])\n",
"\n",
" # 🔎 Search in Reddit FAISS\n",
" _, reddit_indices = reddit_index.search(np.array(query_embedding), top_k)\n",
" reddit_results = [f\"🔥 {reddit_posts[i]['title']}: {reddit_posts[i]['text']} 😂🔥\" for i in reddit_indices[0]]\n",
"\n",
" # 🔎 Search in PDF FAISS\n",
" _, pdf_indices = pdf_index.search(np.array(query_embedding), top_k)\n",
" pdf_results = [get_pdf_text(i) for i in pdf_indices[0]]\n",
"\n",
" return {\"reddit\": reddit_results, \"pdf\": pdf_results}\n",
"\n",
"# ✅ Generate AI Response\n",
"def generate_response(user_input):\n",
" salutation = get_salutation(user_input)\n",
" retrieved_data = retrieve_info(user_input)\n",
"\n",
" # 🔥 Create Chat Prompt\n",
" context = f\"\"\"\n",
" {salutation} {user_input} 😭🔥\\n\n",
" Reddit Says: {retrieved_data['reddit'][0]}\\n\n",
" Book Knowledge Says: {retrieved_data['pdf'][0]}\\n\n",
" No sugarcoating—give me the raw truth, like a bestie would! 🗣️💥\n",
" \"\"\"\n",
"\n",
" # 🔥 Get AI Response\n",
" output = llm(context, max_tokens=300)\n",
" return output[\"choices\"][0][\"text\"]\n",
"\n",
"# ✅ Example Query\n",
"user_query = \"My girlfriend is ignoring me. What should I do?\"\n",
"response = generate_response(user_query)\n",
"print(response)\n"
],
"metadata": {
"id": "sD1qhVc8HhJN",
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "cb8beb7a-fba0-4af3-b4d5-f651c0d4ea09"
},
"execution_count": 9,
"outputs": [
{
"output_type": "stream",
"name": "stderr",
"text": [
"llama_model_loader: loaded meta data with 24 key-value pairs and 434 tensors from /root/.cache/huggingface/hub/models--LakithGR--QWEN2.5-3b-DAP/snapshots/9419d24ba179bc3b838fc9be291f91593168a6b9/merged_gguf.gguf (version GGUF V3 (latest))\n",
"llama_model_loader: Dumping metadata keys/values. Note: KV overrides do not apply in this output.\n",
"llama_model_loader: - kv 0: general.architecture str = qwen2\n",
"llama_model_loader: - kv 1: general.type str = model\n",
"llama_model_loader: - kv 2: general.name str = Merged\n",
"llama_model_loader: - kv 3: general.size_label str = 3.1B\n",
"llama_model_loader: - kv 4: qwen2.block_count u32 = 36\n",
"llama_model_loader: - kv 5: qwen2.context_length u32 = 32768\n",
"llama_model_loader: - kv 6: qwen2.embedding_length u32 = 2048\n",
"llama_model_loader: - kv 7: qwen2.feed_forward_length u32 = 11008\n",
"llama_model_loader: - kv 8: qwen2.attention.head_count u32 = 16\n",
"llama_model_loader: - kv 9: qwen2.attention.head_count_kv u32 = 2\n",
"llama_model_loader: - kv 10: qwen2.rope.freq_base f32 = 1000000.000000\n",
"llama_model_loader: - kv 11: qwen2.attention.layer_norm_rms_epsilon f32 = 0.000001\n",
"llama_model_loader: - kv 12: general.file_type u32 = 1\n",
"llama_model_loader: - kv 13: tokenizer.ggml.model str = gpt2\n",
"llama_model_loader: - kv 14: tokenizer.ggml.pre str = qwen2\n",
"llama_model_loader: - kv 15: tokenizer.ggml.tokens arr[str,151936] = [\"!\", \"\\\"\", \"#\", \"$\", \"%\", \"&\", \"'\", ...\n",
"llama_model_loader: - kv 16: tokenizer.ggml.token_type arr[i32,151936] = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, ...\n",
"llama_model_loader: - kv 17: tokenizer.ggml.merges arr[str,151387] = [\"Ġ Ġ\", \"ĠĠ ĠĠ\", \"i n\", \"Ġ t\",...\n",
"llama_model_loader: - kv 18: tokenizer.ggml.eos_token_id u32 = 151645\n",
"llama_model_loader: - kv 19: tokenizer.ggml.padding_token_id u32 = 151643\n",
"llama_model_loader: - kv 20: tokenizer.ggml.bos_token_id u32 = 151643\n",
"llama_model_loader: - kv 21: tokenizer.ggml.add_bos_token bool = false\n",
"llama_model_loader: - kv 22: tokenizer.chat_template str = {%- if tools %}\\n {{- '<|im_start|>...\n",
"llama_model_loader: - kv 23: general.quantization_version u32 = 2\n",
"llama_model_loader: - type f32: 181 tensors\n",
"llama_model_loader: - type f16: 253 tensors\n",
"print_info: file format = GGUF V3 (latest)\n",
"print_info: file type = F16\n",
"print_info: file size = 5.75 GiB (16.00 BPW) \n",
"init_tokenizer: initializing tokenizer for type 2\n",
"load: control token: 151660 '<|fim_middle|>' is not marked as EOG\n",
"load: control token: 151659 '<|fim_prefix|>' is not marked as EOG\n",
"load: control token: 151653 '<|vision_end|>' is not marked as EOG\n",
"load: control token: 151648 '<|box_start|>' is not marked as EOG\n",
"load: control token: 151646 '<|object_ref_start|>' is not marked as EOG\n",
"load: control token: 151649 '<|box_end|>' is not marked as EOG\n",
"load: control token: 151655 '<|image_pad|>' is not marked as EOG\n",
"load: control token: 151651 '<|quad_end|>' is not marked as EOG\n",
"load: control token: 151647 '<|object_ref_end|>' is not marked as EOG\n",
"load: control token: 151652 '<|vision_start|>' is not marked as EOG\n",
"load: control token: 151654 '<|vision_pad|>' is not marked as EOG\n",
"load: control token: 151656 '<|video_pad|>' is not marked as EOG\n",
"load: control token: 151644 '<|im_start|>' is not marked as EOG\n",
"load: control token: 151661 '<|fim_suffix|>' is not marked as EOG\n",
"load: control token: 151650 '<|quad_start|>' is not marked as EOG\n",
"load: special tokens cache size = 22\n",
"load: token to piece cache size = 0.9310 MB\n",
"print_info: arch = qwen2\n",
"print_info: vocab_only = 0\n",
"print_info: n_ctx_train = 32768\n",
"print_info: n_embd = 2048\n",
"print_info: n_layer = 36\n",
"print_info: n_head = 16\n",
"print_info: n_head_kv = 2\n",
"print_info: n_rot = 128\n",
"print_info: n_swa = 0\n",
"print_info: n_embd_head_k = 128\n",
"print_info: n_embd_head_v = 128\n",
"print_info: n_gqa = 8\n",
"print_info: n_embd_k_gqa = 256\n",
"print_info: n_embd_v_gqa = 256\n",
"print_info: f_norm_eps = 0.0e+00\n",
"print_info: f_norm_rms_eps = 1.0e-06\n",
"print_info: f_clamp_kqv = 0.0e+00\n",
"print_info: f_max_alibi_bias = 0.0e+00\n",
"print_info: f_logit_scale = 0.0e+00\n",
"print_info: f_attn_scale = 0.0e+00\n",
"print_info: n_ff = 11008\n",
"print_info: n_expert = 0\n",
"print_info: n_expert_used = 0\n",
"print_info: causal attn = 1\n",
"print_info: pooling type = 0\n",
"print_info: rope type = 2\n",
"print_info: rope scaling = linear\n",
"print_info: freq_base_train = 1000000.0\n",
"print_info: freq_scale_train = 1\n",
"print_info: n_ctx_orig_yarn = 32768\n",
"print_info: rope_finetuned = unknown\n",
"print_info: ssm_d_conv = 0\n",
"print_info: ssm_d_inner = 0\n",
"print_info: ssm_d_state = 0\n",
"print_info: ssm_dt_rank = 0\n",
"print_info: ssm_dt_b_c_rms = 0\n",
"print_info: model type = 3B\n",
"print_info: model params = 3.09 B\n",
"print_info: general.name = Merged\n",
"print_info: vocab type = BPE\n",
"print_info: n_vocab = 151936\n",
"print_info: n_merges = 151387\n",
"print_info: BOS token = 151643 '<|endoftext|>'\n",
"print_info: EOS token = 151645 '<|im_end|>'\n",
"print_info: EOT token = 151645 '<|im_end|>'\n",
"print_info: PAD token = 151643 '<|endoftext|>'\n",
"print_info: LF token = 198 'Ċ'\n",
"print_info: FIM PRE token = 151659 '<|fim_prefix|>'\n",
"print_info: FIM SUF token = 151661 '<|fim_suffix|>'\n",
"print_info: FIM MID token = 151660 '<|fim_middle|>'\n",
"print_info: FIM PAD token = 151662 '<|fim_pad|>'\n",
"print_info: FIM REP token = 151663 '<|repo_name|>'\n",
"print_info: FIM SEP token = 151664 '<|file_sep|>'\n",
"print_info: EOG token = 151643 '<|endoftext|>'\n",
"print_info: EOG token = 151645 '<|im_end|>'\n",
"print_info: EOG token = 151662 '<|fim_pad|>'\n",
"print_info: EOG token = 151663 '<|repo_name|>'\n",
"print_info: EOG token = 151664 '<|file_sep|>'\n",
"print_info: max token length = 256\n",
"load_tensors: loading model tensors, this can take a while... (mmap = true)\n",
"load_tensors: layer 0 assigned to device CPU\n",
"load_tensors: layer 1 assigned to device CPU\n",
"load_tensors: layer 2 assigned to device CPU\n",
"load_tensors: layer 3 assigned to device CPU\n",
"load_tensors: layer 4 assigned to device CPU\n",
"load_tensors: layer 5 assigned to device CPU\n",
"load_tensors: layer 6 assigned to device CPU\n",
"load_tensors: layer 7 assigned to device CPU\n",
"load_tensors: layer 8 assigned to device CPU\n",
"load_tensors: layer 9 assigned to device CPU\n",
"load_tensors: layer 10 assigned to device CPU\n",
"load_tensors: layer 11 assigned to device CPU\n",
"load_tensors: layer 12 assigned to device CPU\n",
"load_tensors: layer 13 assigned to device CPU\n",
"load_tensors: layer 14 assigned to device CPU\n",
"load_tensors: layer 15 assigned to device CPU\n",
"load_tensors: layer 16 assigned to device CPU\n",
"load_tensors: layer 17 assigned to device CPU\n",
"load_tensors: layer 18 assigned to device CPU\n",
"load_tensors: layer 19 assigned to device CPU\n",
"load_tensors: layer 20 assigned to device CPU\n",
"load_tensors: layer 21 assigned to device CPU\n",
"load_tensors: layer 22 assigned to device CPU\n",
"load_tensors: layer 23 assigned to device CPU\n",
"load_tensors: layer 24 assigned to device CPU\n",
"load_tensors: layer 25 assigned to device CPU\n",
"load_tensors: layer 26 assigned to device CPU\n",
"load_tensors: layer 27 assigned to device CPU\n",
"load_tensors: layer 28 assigned to device CPU\n",
"load_tensors: layer 29 assigned to device CPU\n",
"load_tensors: layer 30 assigned to device CPU\n",
"load_tensors: layer 31 assigned to device CPU\n",
"load_tensors: layer 32 assigned to device CPU\n",
"load_tensors: layer 33 assigned to device CPU\n",
"load_tensors: layer 34 assigned to device CPU\n",
"load_tensors: layer 35 assigned to device CPU\n",
"load_tensors: layer 36 assigned to device CPU\n",
"load_tensors: tensor 'token_embd.weight' (f16) (and 434 others) cannot be used with preferred buffer type CPU_AARCH64, using CPU instead\n",
"load_tensors: CPU_Mapped model buffer size = 5886.42 MiB\n",
"...........................................................................................\n",
"llama_init_from_model: n_seq_max = 1\n",
"llama_init_from_model: n_ctx = 2048\n",
"llama_init_from_model: n_ctx_per_seq = 2048\n",
"llama_init_from_model: n_batch = 512\n",
"llama_init_from_model: n_ubatch = 512\n",
"llama_init_from_model: flash_attn = 0\n",
"llama_init_from_model: freq_base = 1000000.0\n",
"llama_init_from_model: freq_scale = 1\n",
"llama_init_from_model: n_ctx_per_seq (2048) < n_ctx_train (32768) -- the full capacity of the model will not be utilized\n",
"llama_kv_cache_init: kv_size = 2048, offload = 1, type_k = 'f16', type_v = 'f16', n_layer = 36, can_shift = 1\n",
"llama_kv_cache_init: layer 0: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 1: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 2: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 3: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 4: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 5: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 6: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 7: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 8: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 9: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 10: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 11: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 12: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 13: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 14: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 15: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 16: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 17: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 18: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 19: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 20: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 21: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 22: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 23: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 24: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 25: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 26: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 27: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 28: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 29: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 30: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 31: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 32: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 33: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 34: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: layer 35: n_embd_k_gqa = 256, n_embd_v_gqa = 256\n",
"llama_kv_cache_init: CPU KV buffer size = 72.00 MiB\n",
"llama_init_from_model: KV self size = 72.00 MiB, K (f16): 36.00 MiB, V (f16): 36.00 MiB\n",
"llama_init_from_model: CPU output buffer size = 0.58 MiB\n",
"llama_init_from_model: CPU compute buffer size = 300.75 MiB\n",
"llama_init_from_model: graph nodes = 1266\n",
"llama_init_from_model: graph splits = 1\n",
"CPU : SSE3 = 1 | SSSE3 = 1 | AVX = 1 | AVX2 = 1 | F16C = 1 | FMA = 1 | BMI2 = 1 | LLAMAFILE = 1 | OPENMP = 1 | AARCH64_REPACK = 1 | \n",
"Model metadata: {'tokenizer.ggml.add_bos_token': 'false', 'tokenizer.ggml.bos_token_id': '151643', 'general.architecture': 'qwen2', 'tokenizer.ggml.padding_token_id': '151643', 'qwen2.embedding_length': '2048', 'tokenizer.ggml.pre': 'qwen2', 'general.name': 'Merged', 'qwen2.block_count': '36', 'general.type': 'model', 'general.size_label': '3.1B', 'qwen2.context_length': '32768', 'tokenizer.chat_template': '{%- if tools %}\\n {{- \\'<|im_start|>system\\\\n\\' }}\\n {%- if messages[0][\\'role\\'] == \\'system\\' %}\\n {{- messages[0][\\'content\\'] }}\\n {%- else %}\\n {{- \\'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.\\' }}\\n {%- endif %}\\n {{- \"\\\\n\\\\n# Tools\\\\n\\\\nYou may call one or more functions to assist with the user query.\\\\n\\\\nYou are provided with function signatures within XML tags:\\\\n\" }}\\n {%- for tool in tools %}\\n {{- \"\\\\n\" }}\\n {{- tool | tojson }}\\n {%- endfor %}\\n {{- \"\\\\n\\\\n\\\\nFor each function call, return a json object with function name and arguments within XML tags:\\\\n\\\\n{\\\\\"name\\\\\": , \\\\\"arguments\\\\\": }\\\\n<|im_end|>\\\\n\" }}\\n{%- else %}\\n {%- if messages[0][\\'role\\'] == \\'system\\' %}\\n {{- \\'<|im_start|>system\\\\n\\' + messages[0][\\'content\\'] + \\'<|im_end|>\\\\n\\' }}\\n {%- else %}\\n {{- \\'<|im_start|>system\\\\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\\\\n\\' }}\\n {%- endif %}\\n{%- endif %}\\n{%- for message in messages %}\\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\\n {{- \\'<|im_start|>\\' + message.role + \\'\\\\n\\' + message.content + \\'<|im_end|>\\' + \\'\\\\n\\' }}\\n {%- elif message.role == \"assistant\" %}\\n {{- \\'<|im_start|>\\' + message.role }}\\n {%- if message.content %}\\n {{- \\'\\\\n\\' + message.content }}\\n {%- endif %}\\n {%- for tool_call in message.tool_calls %}\\n {%- if tool_call.function is defined %}\\n {%- set tool_call = tool_call.function %}\\n {%- endif %}\\n {{- \\'\\\\n\\\\n{\"name\": \"\\' }}\\n {{- tool_call.name }}\\n {{- \\'\", \"arguments\": \\' }}\\n {{- tool_call.arguments | tojson }}\\n {{- \\'}\\\\n\\' }}\\n {%- endfor %}\\n {{- \\'<|im_end|>\\\\n\\' }}\\n {%- elif message.role == \"tool\" %}\\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\\n {{- \\'<|im_start|>user\\' }}\\n {%- endif %}\\n {{- \\'\\\\n\\\\n\\' }}\\n {{- message.content }}\\n {{- \\'\\\\n\\' }}\\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\\n {{- \\'<|im_end|>\\\\n\\' }}\\n {%- endif %}\\n {%- endif %}\\n{%- endfor %}\\n{%- if add_generation_prompt %}\\n {{- \\'<|im_start|>assistant\\\\n\\' }}\\n{%- endif %}\\n', 'qwen2.attention.head_count_kv': '2', 'general.quantization_version': '2', 'tokenizer.ggml.model': 'gpt2', 'qwen2.feed_forward_length': '11008', 'qwen2.attention.layer_norm_rms_epsilon': '0.000001', 'qwen2.attention.head_count': '16', 'tokenizer.ggml.eos_token_id': '151645', 'qwen2.rope.freq_base': '1000000.000000', 'general.file_type': '1'}\n",
"Available chat formats from metadata: chat_template.default\n",
"Using gguf chat template: {%- if tools %}\n",
" {{- '<|im_start|>system\\n' }}\n",
" {%- if messages[0]['role'] == 'system' %}\n",
" {{- messages[0]['content'] }}\n",
" {%- else %}\n",
" {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}\n",
" {%- endif %}\n",
" {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within XML tags:\\n\" }}\n",
" {%- for tool in tools %}\n",
" {{- \"\\n\" }}\n",
" {{- tool | tojson }}\n",
" {%- endfor %}\n",
" {{- \"\\n\\n\\nFor each function call, return a json object with function name and arguments within XML tags:\\n\\n{\\\"name\\\": , \\\"arguments\\\": }\\n<|im_end|>\\n\" }}\n",
"{%- else %}\n",
" {%- if messages[0]['role'] == 'system' %}\n",
" {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n",
" {%- else %}\n",
" {{- '<|im_start|>system\\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\\n' }}\n",
" {%- endif %}\n",
"{%- endif %}\n",
"{%- for message in messages %}\n",
" {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n",
" {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n",
" {%- elif message.role == \"assistant\" %}\n",
" {{- '<|im_start|>' + message.role }}\n",
" {%- if message.content %}\n",
" {{- '\\n' + message.content }}\n",
" {%- endif %}\n",
" {%- for tool_call in message.tool_calls %}\n",
" {%- if tool_call.function is defined %}\n",
" {%- set tool_call = tool_call.function %}\n",
" {%- endif %}\n",
" {{- '\\n\\n{\"name\": \"' }}\n",
" {{- tool_call.name }}\n",
" {{- '\", \"arguments\": ' }}\n",
" {{- tool_call.arguments | tojson }}\n",
" {{- '}\\n' }}\n",
" {%- endfor %}\n",
" {{- '<|im_end|>\\n' }}\n",
" {%- elif message.role == \"tool\" %}\n",
" {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n",
" {{- '<|im_start|>user' }}\n",
" {%- endif %}\n",
" {{- '\\n\\n' }}\n",
" {{- message.content }}\n",
" {{- '\\n' }}\n",
" {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n",
" {{- '<|im_end|>\\n' }}\n",
" {%- endif %}\n",
" {%- endif %}\n",
"{%- endfor %}\n",
"{%- if add_generation_prompt %}\n",
" {{- '<|im_start|>assistant\\n' }}\n",
"{%- endif %}\n",
"\n",
"Using chat eos_token: <|im_end|>\n",
"Using chat bos_token: <|endoftext|>\n",
"llama_perf_context_print: load time = 47556.42 ms\n",
"llama_perf_context_print: prompt eval time = 47553.43 ms / 192 tokens ( 247.67 ms per token, 4.04 tokens per second)\n",
"llama_perf_context_print: eval time = 252377.64 ms / 299 runs ( 844.07 ms per token, 1.18 tokens per second)\n",
"llama_perf_context_print: total time = 300590.95 ms / 491 tokens\n"
]
},
{
"output_type": "stream",
"name": "stdout",
"text": [
" Step 1: Assess the situation\n",
" Step 2: Communicate your feelings\n",
" Step 3: Ask for feedback\n",
" Step 4: Wait for her response\n",
" Step 5: Be patient and give her space\n",
"\n",
" Girl, listen up! 💅 My girlfriend is ignoring me. What should I do? 😭🔥\n",
"\n",
" Reddit Says: 🔥 Why is she suddenly ignoring me/am I over reacting?: So I've been talking to this girl in class and Friday night we were sending snaps back and forth all night long. Then Saturday comes and nothing. No contact till like 5pm and then it was only a nspa every now and then, now Sundays here and nothing again. It feels so weird and I feel like she's not at all interested. I hate the idea that I'm just over thinking it becuase just 1 snap 1 bit of contact should not be too much to ask for. 😂🔥\n",
"\n",
" Book Knowledge Says: 📖 Relevant book excerpt (ID 354)\n",
"\n",
" No sugarcoating—give me the raw truth, like a bestie would! 🗣️💥\n",
" Step 1: Assess the situation\n",
" Step 2: Communicate your feelings\n",
" Step 3: Ask for feedback\n",
" Step 4: Wait for her response\n",
" Step 5: Be patient and give her space\n",
"\n",
" Girl, listen up! 💅 My girlfriend is\n"
]
}
]
}
]
}