You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

9683 lines
14 MiB

3 years ago
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"name": "dl_cw2_final_2.ipynb",
"provenance": [],
"collapsed_sections": [],
"toc_visible": true
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3",
"language": "python"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.9-final"
},
"widgets": {
"application/vnd.jupyter.widget-state+json": {
"f5b45a4d4cd040cfb5b5c9b0034f2879": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"state": {
"_view_name": "HBoxView",
"_dom_classes": [],
"_model_name": "HBoxModel",
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.5.0",
"box_style": "",
"layout": "IPY_MODEL_51b95cf998e7427c9ad7b96d3e6bd1ee",
"_model_module": "@jupyter-widgets/controls",
"children": [
"IPY_MODEL_5e08e45a0cba4dd0b8ce21959c673933",
"IPY_MODEL_c7ccdfb21c14421c9316a8b7aee926e8"
]
}
},
"51b95cf998e7427c9ad7b96d3e6bd1ee": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"5e08e45a0cba4dd0b8ce21959c673933": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"state": {
"_view_name": "ProgressView",
"style": "IPY_MODEL_7d98adb1baa54d50bed2fd291b3b61d7",
"_dom_classes": [],
"description": "",
"_model_name": "FloatProgressModel",
"bar_style": "success",
"max": 1,
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"value": 1,
"_view_count": null,
"_view_module_version": "1.5.0",
"orientation": "horizontal",
"min": 0,
"description_tooltip": null,
"_model_module": "@jupyter-widgets/controls",
"layout": "IPY_MODEL_271afb66e8984130bd29bc0352162918"
}
},
"c7ccdfb21c14421c9316a8b7aee926e8": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"state": {
"_view_name": "HTMLView",
"style": "IPY_MODEL_03f0a5d734424182a9561f1b0894e722",
"_dom_classes": [],
"description": "",
"_model_name": "HTMLModel",
"placeholder": "",
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"value": " 9920512/? [00:00<00:00, 10038747.14it/s]",
"_view_count": null,
"_view_module_version": "1.5.0",
"description_tooltip": null,
"_model_module": "@jupyter-widgets/controls",
"layout": "IPY_MODEL_08ef0e4e350348f8a51dcd641b261c48"
}
},
"7d98adb1baa54d50bed2fd291b3b61d7": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"state": {
"_view_name": "StyleView",
"_model_name": "ProgressStyleModel",
"description_width": "initial",
"_view_module": "@jupyter-widgets/base",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.2.0",
"bar_color": null,
"_model_module": "@jupyter-widgets/controls"
}
},
"271afb66e8984130bd29bc0352162918": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"03f0a5d734424182a9561f1b0894e722": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"state": {
"_view_name": "StyleView",
"_model_name": "DescriptionStyleModel",
"description_width": "",
"_view_module": "@jupyter-widgets/base",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.2.0",
"_model_module": "@jupyter-widgets/controls"
}
},
"08ef0e4e350348f8a51dcd641b261c48": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"7d3df8e5a7e24ba68a159d8e42b3f087": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"state": {
"_view_name": "HBoxView",
"_dom_classes": [],
"_model_name": "HBoxModel",
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.5.0",
"box_style": "",
"layout": "IPY_MODEL_dd38e332f62442e3974aed66affbbae7",
"_model_module": "@jupyter-widgets/controls",
"children": [
"IPY_MODEL_2f3367c024ed4e2a944ff8c6f416e82f",
"IPY_MODEL_56fa7685cee643a18df229f4128509c0"
]
}
},
"dd38e332f62442e3974aed66affbbae7": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"2f3367c024ed4e2a944ff8c6f416e82f": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"state": {
"_view_name": "ProgressView",
"style": "IPY_MODEL_23868dc9379a4f6cb90d67364d00172f",
"_dom_classes": [],
"description": "",
"_model_name": "FloatProgressModel",
"bar_style": "success",
"max": 1,
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"value": 1,
"_view_count": null,
"_view_module_version": "1.5.0",
"orientation": "horizontal",
"min": 0,
"description_tooltip": null,
"_model_module": "@jupyter-widgets/controls",
"layout": "IPY_MODEL_3801c55d57344820952ad82035e6127c"
}
},
"56fa7685cee643a18df229f4128509c0": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"state": {
"_view_name": "HTMLView",
"style": "IPY_MODEL_2d1279a7f86d4ce297375f262036fb33",
"_dom_classes": [],
"description": "",
"_model_name": "HTMLModel",
"placeholder": "",
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"value": " 32768/? [00:00<00:00, 89241.03it/s]",
"_view_count": null,
"_view_module_version": "1.5.0",
"description_tooltip": null,
"_model_module": "@jupyter-widgets/controls",
"layout": "IPY_MODEL_f0059cf109384a259c024d4dc560428f"
}
},
"23868dc9379a4f6cb90d67364d00172f": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"state": {
"_view_name": "StyleView",
"_model_name": "ProgressStyleModel",
"description_width": "initial",
"_view_module": "@jupyter-widgets/base",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.2.0",
"bar_color": null,
"_model_module": "@jupyter-widgets/controls"
}
},
"3801c55d57344820952ad82035e6127c": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"2d1279a7f86d4ce297375f262036fb33": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"state": {
"_view_name": "StyleView",
"_model_name": "DescriptionStyleModel",
"description_width": "",
"_view_module": "@jupyter-widgets/base",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.2.0",
"_model_module": "@jupyter-widgets/controls"
}
},
"f0059cf109384a259c024d4dc560428f": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"977a64a542a940edbd8a711e62a8896d": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"state": {
"_view_name": "HBoxView",
"_dom_classes": [],
"_model_name": "HBoxModel",
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.5.0",
"box_style": "",
"layout": "IPY_MODEL_eee9e7dba76249169ec3d54a1eb0ec9a",
"_model_module": "@jupyter-widgets/controls",
"children": [
"IPY_MODEL_71b3dc664d5348f6bc121ff33194074c",
"IPY_MODEL_b59b7d5861194d3284c183880fb38983"
]
}
},
"eee9e7dba76249169ec3d54a1eb0ec9a": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"71b3dc664d5348f6bc121ff33194074c": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"state": {
"_view_name": "ProgressView",
"style": "IPY_MODEL_d41970da65bd471fbf1fdbfe05eb4b31",
"_dom_classes": [],
"description": "",
"_model_name": "FloatProgressModel",
"bar_style": "success",
"max": 1,
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"value": 1,
"_view_count": null,
"_view_module_version": "1.5.0",
"orientation": "horizontal",
"min": 0,
"description_tooltip": null,
"_model_module": "@jupyter-widgets/controls",
"layout": "IPY_MODEL_440eb9e1709c482dadb1936c7233d2b4"
}
},
"b59b7d5861194d3284c183880fb38983": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"state": {
"_view_name": "HTMLView",
"style": "IPY_MODEL_12bba14540ac4f0494af76bc3ce8409c",
"_dom_classes": [],
"description": "",
"_model_name": "HTMLModel",
"placeholder": "",
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"value": " 1654784/? [00:00<00:00, 5764601.89it/s]",
"_view_count": null,
"_view_module_version": "1.5.0",
"description_tooltip": null,
"_model_module": "@jupyter-widgets/controls",
"layout": "IPY_MODEL_836b8f0466fd4e26b3791248dee03bf0"
}
},
"d41970da65bd471fbf1fdbfe05eb4b31": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"state": {
"_view_name": "StyleView",
"_model_name": "ProgressStyleModel",
"description_width": "initial",
"_view_module": "@jupyter-widgets/base",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.2.0",
"bar_color": null,
"_model_module": "@jupyter-widgets/controls"
}
},
"440eb9e1709c482dadb1936c7233d2b4": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"12bba14540ac4f0494af76bc3ce8409c": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"state": {
"_view_name": "StyleView",
"_model_name": "DescriptionStyleModel",
"description_width": "",
"_view_module": "@jupyter-widgets/base",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.2.0",
"_model_module": "@jupyter-widgets/controls"
}
},
"836b8f0466fd4e26b3791248dee03bf0": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"800e985bb7db4f7f9368d8464b6b1dfc": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"state": {
"_view_name": "HBoxView",
"_dom_classes": [],
"_model_name": "HBoxModel",
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.5.0",
"box_style": "",
"layout": "IPY_MODEL_1f5492e0a5d542adb296b39d078c0e7e",
"_model_module": "@jupyter-widgets/controls",
"children": [
"IPY_MODEL_88bd758103cd47128998a9b430df1c05",
"IPY_MODEL_78dc4f4d1fef45d49f90acadf971f393"
]
}
},
"1f5492e0a5d542adb296b39d078c0e7e": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"88bd758103cd47128998a9b430df1c05": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"state": {
"_view_name": "ProgressView",
"style": "IPY_MODEL_89854090e47c4da8baf33730638b211f",
"_dom_classes": [],
"description": " 0%",
"_model_name": "FloatProgressModel",
"bar_style": "info",
"max": 1,
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"value": 0,
"_view_count": null,
"_view_module_version": "1.5.0",
"orientation": "horizontal",
"min": 0,
"description_tooltip": null,
"_model_module": "@jupyter-widgets/controls",
"layout": "IPY_MODEL_33abc99f932c4879a5701637e7f1ded4"
}
},
"78dc4f4d1fef45d49f90acadf971f393": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"state": {
"_view_name": "HTMLView",
"style": "IPY_MODEL_a895cb2fe4ab484d8982586a52062fe6",
"_dom_classes": [],
"description": "",
"_model_name": "HTMLModel",
"placeholder": "",
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"value": " 0/4542 [00:00<?, ?it/s]",
"_view_count": null,
"_view_module_version": "1.5.0",
"description_tooltip": null,
"_model_module": "@jupyter-widgets/controls",
"layout": "IPY_MODEL_d55fea12d6bd48efaad78efd0dadf7ed"
}
},
"89854090e47c4da8baf33730638b211f": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"state": {
"_view_name": "StyleView",
"_model_name": "ProgressStyleModel",
"description_width": "initial",
"_view_module": "@jupyter-widgets/base",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.2.0",
"bar_color": null,
"_model_module": "@jupyter-widgets/controls"
}
},
"33abc99f932c4879a5701637e7f1ded4": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"a895cb2fe4ab484d8982586a52062fe6": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"state": {
"_view_name": "StyleView",
"_model_name": "DescriptionStyleModel",
"description_width": "",
"_view_module": "@jupyter-widgets/base",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.2.0",
"_model_module": "@jupyter-widgets/controls"
}
},
"d55fea12d6bd48efaad78efd0dadf7ed": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"208254968d00463da4fd6218773e4a83": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"state": {
"_view_name": "HBoxView",
"_dom_classes": [],
"_model_name": "HBoxModel",
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.5.0",
"box_style": "",
"layout": "IPY_MODEL_c3361e8a6f02493ab6f599ecca41bb6d",
"_model_module": "@jupyter-widgets/controls",
"children": [
"IPY_MODEL_a06338df87b349e2a79355b801ceb3a7",
"IPY_MODEL_edb18eb99fc34c6b80b0176191df8a3b"
]
}
},
"c3361e8a6f02493ab6f599ecca41bb6d": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"a06338df87b349e2a79355b801ceb3a7": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"state": {
"_view_name": "ProgressView",
"style": "IPY_MODEL_1923dadc242949febc606e4652a0b4b5",
"_dom_classes": [],
"description": "",
"_model_name": "FloatProgressModel",
"bar_style": "info",
"max": 1,
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"value": 1,
"_view_count": null,
"_view_module_version": "1.5.0",
"orientation": "horizontal",
"min": 0,
"description_tooltip": null,
"_model_module": "@jupyter-widgets/controls",
"layout": "IPY_MODEL_ebb299f97f93402eb7acce9784955020"
}
},
"edb18eb99fc34c6b80b0176191df8a3b": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"state": {
"_view_name": "HTMLView",
"style": "IPY_MODEL_439c5b234eb84943b19eefcb5747a1f9",
"_dom_classes": [],
"description": "",
"_model_name": "HTMLModel",
"placeholder": "",
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"value": " 9920512/? [00:20<00:00, 5732888.29it/s]",
"_view_count": null,
"_view_module_version": "1.5.0",
"description_tooltip": null,
"_model_module": "@jupyter-widgets/controls",
"layout": "IPY_MODEL_47023f26b6c643fe834a4a587a87f798"
}
},
"1923dadc242949febc606e4652a0b4b5": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"state": {
"_view_name": "StyleView",
"_model_name": "ProgressStyleModel",
"description_width": "initial",
"_view_module": "@jupyter-widgets/base",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.2.0",
"bar_color": null,
"_model_module": "@jupyter-widgets/controls"
}
},
"ebb299f97f93402eb7acce9784955020": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"439c5b234eb84943b19eefcb5747a1f9": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"state": {
"_view_name": "StyleView",
"_model_name": "DescriptionStyleModel",
"description_width": "",
"_view_module": "@jupyter-widgets/base",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.2.0",
"_model_module": "@jupyter-widgets/controls"
}
},
"47023f26b6c643fe834a4a587a87f798": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"175bd5ac104e4246abbac3e8cf87cef9": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"state": {
"_view_name": "HBoxView",
"_dom_classes": [],
"_model_name": "HBoxModel",
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.5.0",
"box_style": "",
"layout": "IPY_MODEL_84d0690198f74e638cd8e7b08e720705",
"_model_module": "@jupyter-widgets/controls",
"children": [
"IPY_MODEL_8af14b2a15e946a4bbd0f903d113cbe5",
"IPY_MODEL_8b5475fa86874f5cb311e0ff64cf8891"
]
}
},
"84d0690198f74e638cd8e7b08e720705": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"8af14b2a15e946a4bbd0f903d113cbe5": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"state": {
"_view_name": "ProgressView",
"style": "IPY_MODEL_03564ee9e1a945129177cad0cf040a56",
"_dom_classes": [],
"description": "",
"_model_name": "FloatProgressModel",
"bar_style": "success",
"max": 1,
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"value": 1,
"_view_count": null,
"_view_module_version": "1.5.0",
"orientation": "horizontal",
"min": 0,
"description_tooltip": null,
"_model_module": "@jupyter-widgets/controls",
"layout": "IPY_MODEL_1516c0706ead46abb20a40eab3ff27e9"
}
},
"8b5475fa86874f5cb311e0ff64cf8891": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"state": {
"_view_name": "HTMLView",
"style": "IPY_MODEL_ccea99f12154455a99d1bc6e4d7816ff",
"_dom_classes": [],
"description": "",
"_model_name": "HTMLModel",
"placeholder": "",
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"value": " 32768/? [00:00<00:00, 71114.91it/s]",
"_view_count": null,
"_view_module_version": "1.5.0",
"description_tooltip": null,
"_model_module": "@jupyter-widgets/controls",
"layout": "IPY_MODEL_4738f5763ee04734acf3306d1bf19850"
}
},
"03564ee9e1a945129177cad0cf040a56": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"state": {
"_view_name": "StyleView",
"_model_name": "ProgressStyleModel",
"description_width": "initial",
"_view_module": "@jupyter-widgets/base",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.2.0",
"bar_color": null,
"_model_module": "@jupyter-widgets/controls"
}
},
"1516c0706ead46abb20a40eab3ff27e9": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"ccea99f12154455a99d1bc6e4d7816ff": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"state": {
"_view_name": "StyleView",
"_model_name": "DescriptionStyleModel",
"description_width": "",
"_view_module": "@jupyter-widgets/base",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.2.0",
"_model_module": "@jupyter-widgets/controls"
}
},
"4738f5763ee04734acf3306d1bf19850": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"34e4fecfa60643a9897e7125ec70cd9a": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"state": {
"_view_name": "HBoxView",
"_dom_classes": [],
"_model_name": "HBoxModel",
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.5.0",
"box_style": "",
"layout": "IPY_MODEL_d2c1bd006aa54791a93257b6760221bc",
"_model_module": "@jupyter-widgets/controls",
"children": [
"IPY_MODEL_ae63f0b072f2450d9751c4bc46803b00",
"IPY_MODEL_4f40f22058744466902a762959895af0"
]
}
},
"d2c1bd006aa54791a93257b6760221bc": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"ae63f0b072f2450d9751c4bc46803b00": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"state": {
"_view_name": "ProgressView",
"style": "IPY_MODEL_8469cc2a05b94c8ebeb06e7c0ef48ed9",
"_dom_classes": [],
"description": "",
"_model_name": "FloatProgressModel",
"bar_style": "success",
"max": 1,
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"value": 1,
"_view_count": null,
"_view_module_version": "1.5.0",
"orientation": "horizontal",
"min": 0,
"description_tooltip": null,
"_model_module": "@jupyter-widgets/controls",
"layout": "IPY_MODEL_9a1b16b06f5840aaa188d0026dea345b"
}
},
"4f40f22058744466902a762959895af0": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"state": {
"_view_name": "HTMLView",
"style": "IPY_MODEL_161d7f9f503942a5937e8099fe2c7953",
"_dom_classes": [],
"description": "",
"_model_name": "HTMLModel",
"placeholder": "",
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"value": " 1654784/? [00:00<00:00, 4347767.38it/s]",
"_view_count": null,
"_view_module_version": "1.5.0",
"description_tooltip": null,
"_model_module": "@jupyter-widgets/controls",
"layout": "IPY_MODEL_b9fe179f1c0d4130942349edb1ee7023"
}
},
"8469cc2a05b94c8ebeb06e7c0ef48ed9": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"state": {
"_view_name": "StyleView",
"_model_name": "ProgressStyleModel",
"description_width": "initial",
"_view_module": "@jupyter-widgets/base",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.2.0",
"bar_color": null,
"_model_module": "@jupyter-widgets/controls"
}
},
"9a1b16b06f5840aaa188d0026dea345b": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"161d7f9f503942a5937e8099fe2c7953": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"state": {
"_view_name": "StyleView",
"_model_name": "DescriptionStyleModel",
"description_width": "",
"_view_module": "@jupyter-widgets/base",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.2.0",
"_model_module": "@jupyter-widgets/controls"
}
},
"b9fe179f1c0d4130942349edb1ee7023": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"d6e3a81c8fa04646ad977c2f712713bf": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"state": {
"_view_name": "HBoxView",
"_dom_classes": [],
"_model_name": "HBoxModel",
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.5.0",
"box_style": "",
"layout": "IPY_MODEL_e86265fb7d4b4fb0abdb60a9d012741c",
"_model_module": "@jupyter-widgets/controls",
"children": [
"IPY_MODEL_5a64e0bf3c23450aa585320262158182",
"IPY_MODEL_c47f145ab37d4e959bf6ae4b1e1dfb37"
]
}
},
"e86265fb7d4b4fb0abdb60a9d012741c": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"5a64e0bf3c23450aa585320262158182": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"state": {
"_view_name": "ProgressView",
"style": "IPY_MODEL_21ee221a6b484ccdbf5047ba67d260a0",
"_dom_classes": [],
"description": "",
"_model_name": "FloatProgressModel",
"bar_style": "success",
"max": 1,
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"value": 1,
"_view_count": null,
"_view_module_version": "1.5.0",
"orientation": "horizontal",
"min": 0,
"description_tooltip": null,
"_model_module": "@jupyter-widgets/controls",
"layout": "IPY_MODEL_af8622099a214ca188b2053d50e85f6b"
}
},
"c47f145ab37d4e959bf6ae4b1e1dfb37": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"state": {
"_view_name": "HTMLView",
"style": "IPY_MODEL_0e66434877534e09a9b1249c08d7f5a1",
"_dom_classes": [],
"description": "",
"_model_name": "HTMLModel",
"placeholder": "",
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"value": " 8192/? [00:00<00:00, 51368.91it/s]",
"_view_count": null,
"_view_module_version": "1.5.0",
"description_tooltip": null,
"_model_module": "@jupyter-widgets/controls",
"layout": "IPY_MODEL_99ec63cc36a04a8ea6348f9dd78dc015"
}
},
"21ee221a6b484ccdbf5047ba67d260a0": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"state": {
"_view_name": "StyleView",
"_model_name": "ProgressStyleModel",
"description_width": "initial",
"_view_module": "@jupyter-widgets/base",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.2.0",
"bar_color": null,
"_model_module": "@jupyter-widgets/controls"
}
},
"af8622099a214ca188b2053d50e85f6b": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"0e66434877534e09a9b1249c08d7f5a1": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"state": {
"_view_name": "StyleView",
"_model_name": "DescriptionStyleModel",
"description_width": "",
"_view_module": "@jupyter-widgets/base",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.2.0",
"_model_module": "@jupyter-widgets/controls"
}
},
"99ec63cc36a04a8ea6348f9dd78dc015": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
}
}
},
"accelerator": "GPU"
},
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "VzzPw7rEZ-OX"
},
"source": [
"# Coursework 2: Generative Models\n",
"\n",
"## Instructions\n",
"\n",
"Please submit on CATe two zip files:\n",
"\n",
"*CW2.zip* containing the following:\n",
"1. A version of this notebook containing your answers. Write your answers in the cells below each question. **Please deliver the notebook including the outputs of the cells below.**\n",
"2. Your trained VAE model as *VAE_model.pth*\n",
"\n",
"*GAN.zip* containing your trained Generator and Discriminator: *DCGAN_model_D.pth and DCGAN_model_G.pth*\n",
"\n",
"Please avoid using markdown headings (# ## etc.) as these will affect the ToC. Instead use html headings if you want emphasis.\n",
"\n",
"Similarly to the previous coursework, we recommend that you use Google Colaboratory in order to train the required networks.\n",
"\n",
"TAs will run a testing cell (at the end of this notebook), so you are required to copy your transform and denorm functions to a cell near the bottom of the document (it is demarkated).\n",
"\n",
"<font color=\"blue\">**The deadline for submission is 19:00, Thursday 19th February, 2021** </font>"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "1oqY55OLpxDm"
},
"source": [
"## Setting up working environment\n",
"\n",
"For this coursework you, will need to train a large network, therefore we recommend you work with Google Colaboratory, which provides free GPU time. You will need a Google account to do so.\n",
"\n",
"Please log in to your account and go to the following page: https://colab.research.google.com. Then upload this notebook.\n",
"\n",
"For GPU support, go to \"Edit\" -> \"Notebook Settings\", and select \"Hardware accelerator\" as \"GPU\".\n",
"\n",
"You will need to install pytorch and import some utilities by running the following cell:"
]
},
{
"cell_type": "code",
"metadata": {
"id": "FJg7ozC_q3HF",
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "4edef2cd-79ba-4a8d-a1b5-ae1ceea1da3e"
},
"source": [
"!pip install -q torch torchvision\n",
"!git clone -q https://github.com/afspies/icl_dl_cw2_utils\n",
"from icl_dl_cw2_utils.utils.plotting import plot_tsne\n",
"%load_ext google.colab.data_table"
],
"execution_count": 1,
"outputs": [
{
"output_type": "stream",
"text": [
"fatal: destination path 'icl_dl_cw2_utils' already exists and is not an empty directory.\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "oEyMm16MoegE",
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "40c8bcb3-a749-4894-8594-a9e78c2f1b3d"
},
"source": [
"from google.colab import drive\n",
"drive.mount('/content/drive') # Outputs will be saved in your google drive"
],
"execution_count": 2,
"outputs": [
{
"output_type": "stream",
"text": [
"Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount(\"/content/drive\", force_remount=True).\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "ezLSfB6IqAzK"
},
"source": [
"## Introduction\n",
"\n",
"For this coursework, you are asked to implement two commonly used generative models:\n",
"1. A **Variational Autoencoder (VAE)**\n",
"2. A **Deep Convolutional Generative Adversarial Network (DCGAN)**\n",
"\n",
"For the first part you will the MNIST dataset https://en.wikipedia.org/wiki/MNIST_database and for the second the CIFAR-10 (https://www.cs.toronto.edu/~kriz/cifar.html).\n",
"\n",
"Each part is worth 50 points. \n",
"\n",
"The emphasis of both parts lies in understanding how the models behave and learn, however, some points will be available for getting good results with your GAN (though you should not spend too long on this)."
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "75mICbvzqQyx"
},
"source": [
"# Part 1 - Variational Autoencoder\n",
"\n",
"## Part 1.1 (25 points)\n",
"**Your Task:**\n",
"\n",
"a. Implement the VAE architecture with accompanying hyperparameters. Experiment with Feedforward and Convolutional Layers to see which gives better results.\n",
"\n",
"b. Design an appropriate loss function and train the model.\n"
]
},
{
"cell_type": "code",
"metadata": {
"id": "ym5l5RmmJtLw",
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "33d92050-a092-4eb4-856e-e37663a74e1d"
},
"source": [
"import os\n",
"import numpy as np\n",
"import torch\n",
"import torch.nn as nn\n",
"from torch.utils.data import DataLoader, sampler\n",
"from torchvision import datasets, transforms\n",
"from torchvision.utils import save_image, make_grid\n",
"import torch.nn.functional as F\n",
"import matplotlib.pyplot as plt\n",
"\n",
"def show(img):\n",
" npimg = img.cpu().numpy()\n",
" plt.imshow(np.transpose(npimg, (1,2,0)))\n",
"\n",
"if not os.path.exists('/content/drive/MyDrive/icl_dl_cw2/CW_VAE/'):\n",
" os.makedirs('/content/drive/MyDrive/icl_dl_cw2/CW_VAE/')\n",
"\n",
"# We set a random seed to ensure that your results are reproducible.\n",
"if torch.cuda.is_available():\n",
" torch.backends.cudnn.deterministic = True\n",
"torch.manual_seed(0)\n",
"\n",
"GPU = True # Choose whether to use GPU\n",
"if GPU:\n",
" device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n",
"else:\n",
" device = torch.device(\"cpu\")\n",
"print(f'Using {device}')"
],
"execution_count": 17,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Using cuda\n"
]
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "hqT7sdGzJtLy"
},
"source": [
"---\n",
"## Part 1.1a: Implement VAE (25 Points)\n",
"###Hyper-parameter selection\n"
]
},
{
"cell_type": "code",
"metadata": {
"id": "ZVPM6pgqJtLz"
},
"source": [
"# Necessary Hyperparameters \n",
"num_epochs = 25\n",
"learning_rate = 0.001\n",
"batch_size = 128\n",
"latent_dim = 16 # Choose a value for the size of the latent space\n",
"\n",
"# Additional Hyperparameters \n",
"conv1_c = 32\n",
"conv2_c = 64\n",
"leaky_relu = 0.2\n",
"beta = 2.5\n",
"\n",
"# (Optionally) Modify transformations on input\n",
"transform = transforms.Compose([\n",
" transforms.ToTensor(),\n",
"])\n",
"\n",
"# (Optionally) Modify the network's output for visualizing your images\n",
"def denorm(x):\n",
" return x"
],
"execution_count": 18,
"outputs": []
},
{
"cell_type": "markdown",
"metadata": {
"id": "iN5aL7sdJtL2"
},
"source": [
"### Data loading\n"
]
},
{
"cell_type": "code",
"metadata": {
"id": "VOKdAZa2JtL3",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 427,
"referenced_widgets": [
"f5b45a4d4cd040cfb5b5c9b0034f2879",
"51b95cf998e7427c9ad7b96d3e6bd1ee",
"5e08e45a0cba4dd0b8ce21959c673933",
"c7ccdfb21c14421c9316a8b7aee926e8",
"7d98adb1baa54d50bed2fd291b3b61d7",
"271afb66e8984130bd29bc0352162918",
"03f0a5d734424182a9561f1b0894e722",
"08ef0e4e350348f8a51dcd641b261c48",
"7d3df8e5a7e24ba68a159d8e42b3f087",
"dd38e332f62442e3974aed66affbbae7",
"2f3367c024ed4e2a944ff8c6f416e82f",
"56fa7685cee643a18df229f4128509c0",
"23868dc9379a4f6cb90d67364d00172f",
"3801c55d57344820952ad82035e6127c",
"2d1279a7f86d4ce297375f262036fb33",
"f0059cf109384a259c024d4dc560428f",
"977a64a542a940edbd8a711e62a8896d",
"eee9e7dba76249169ec3d54a1eb0ec9a",
"71b3dc664d5348f6bc121ff33194074c",
"b59b7d5861194d3284c183880fb38983",
"d41970da65bd471fbf1fdbfe05eb4b31",
"440eb9e1709c482dadb1936c7233d2b4",
"12bba14540ac4f0494af76bc3ce8409c",
"836b8f0466fd4e26b3791248dee03bf0",
"800e985bb7db4f7f9368d8464b6b1dfc",
"1f5492e0a5d542adb296b39d078c0e7e",
"88bd758103cd47128998a9b430df1c05",
"78dc4f4d1fef45d49f90acadf971f393",
"89854090e47c4da8baf33730638b211f",
"33abc99f932c4879a5701637e7f1ded4",
"a895cb2fe4ab484d8982586a52062fe6",
"d55fea12d6bd48efaad78efd0dadf7ed"
]
},
"outputId": "3e27eb0b-6b97-418a-ba1b-55ff88d477ab"
},
"source": [
"train_dat = datasets.MNIST(\n",
" \"data/\", train=True, download=True, transform=transform\n",
")\n",
"test_dat = datasets.MNIST(\"data/\", train=False, transform=transform)\n",
"\n",
"loader_train = DataLoader(train_dat, batch_size, shuffle=True)\n",
"loader_test = DataLoader(test_dat, batch_size, shuffle=False)\n",
"\n",
"# Don't change \n",
"sample_inputs, _ = next(iter(loader_test))\n",
"fixed_input = sample_inputs[:32, :, :, :]\n",
"save_image(fixed_input, '/content/drive/MyDrive/icl_dl_cw2/CW_VAE/image_original.png')"
],
"execution_count": 19,
"outputs": []
},
{
"cell_type": "markdown",
"metadata": {
"id": "LiQDXD24JtL7"
},
"source": [
"### Model Definition\n",
"\n",
"<figure>\n",
" <img src=\"https://blog.bayeslabs.co/assets/img/vae-gaussian.png\" style=\"width:60%\">\n",
" <figcaption>\n",
" Fig.1 - VAE Diagram (with a Guassian prior), taken from <a href=\"https://blog.bayeslabs.co/2019/06/04/All-you-need-to-know-about-Vae.html\">1</a>.\n",
" </figcaption>\n",
"</figure>\n",
"\n",
"\n",
"You will need to define:\n",
"* The hyperparameters\n",
"* The constructor\n",
"* encode\n",
"* reparametrize\n",
"* decode\n",
"* forward\n",
"\n",
"\n",
"\n",
"Hints:\n",
"- It is common practice to encode the log of the variance, rather than the variance\n",
"- You might try using BatchNorm"
]
},
{
"cell_type": "code",
"metadata": {
"id": "wDlll3BUJtL8",
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "4ef5c6a7-48af-4db9-833d-72a9775662f8"
},
"source": [
"# *CODE FOR PART 1.1a IN THIS CELL*\n",
"\n",
"class VAE(nn.Module):\n",
" def __init__(self, latent_dim):\n",
" super(VAE, self).__init__()\n",
" #######################################################################\n",
" # ** START OF YOUR CODE **\n",
" #######################################################################\n",
" self.conv1 = nn.Sequential( nn.Conv2d(1, conv1_c, kernel_size = 4, stride = 2, padding = 1),\n",
" nn.BatchNorm2d(conv1_c),\n",
" nn.LeakyReLU(leaky_relu,inplace=True))\n",
"\n",
" self.conv2 = nn.Sequential( nn.Conv2d(conv1_c, conv2_c, kernel_size = 4, stride = 2, padding = 1),\n",
" nn.BatchNorm2d(conv2_c),\n",
" nn.LeakyReLU(leaky_relu,inplace=True))\n",
"\n",
" self.conv3 = nn.Sequential( nn.Conv2d(conv2_c, conv1_c, kernel_size = 3, stride = 1, padding = 1),\n",
" nn.BatchNorm2d(conv1_c),\n",
" nn.LeakyReLU(leaky_relu,inplace=True))\n",
"\n",
" self.fcl1 = nn.Sequential(nn.Linear(conv1_c * 7 * 7, 128),\n",
" nn.LeakyReLU(leaky_relu,inplace=True)) \n",
"\n",
" self.fc12_1 = nn.Linear(128, latent_dim)\n",
" self.fc12_2 = nn.Linear(128, latent_dim)\n",
"\n",
"\n",
" self.fcld1 = nn.Sequential(nn.Linear(latent_dim, 128)) \n",
" self.fcld2 = nn.Sequential(nn.Linear(128, conv1_c * 7 * 7),\n",
" nn.ReLU()) \n",
"\n",
" # self.dropoutd = nn.Dropout(p = dropout)\n",
"\n",
" # self.deconv1 = nn.Sequential(nn.ConvTranspose2d(conv1_c, conv2_c, kernel_size=3, stride=1, padding = 1),\n",
" # nn.BatchNorm2d(conv2_c),\n",
" # nn.ReLU())\n",
" self.deconv2 = nn.Sequential(nn.ConvTranspose2d(conv1_c, conv1_c, kernel_size=4, stride=2, padding = 1),\n",
" nn.BatchNorm2d(conv1_c),\n",
" nn.ReLU())\n",
"\n",
" self.deconv3 = nn.Sequential(nn.ConvTranspose2d(conv1_c, 1, kernel_size=4, stride=2, padding = 1),\n",
" nn.Sigmoid())\n",
" #######################################################################\n",
" # ** END OF YOUR CODE **\n",
" ####################################################################### \n",
" \n",
" def encode(self, x):\n",
" #######################################################################\n",
" # ** START OF YOUR CODE **\n",
" #######################################################################\n",
" out = self.conv1(x)\n",
" out = self.conv2(out)\n",
" out = self.conv3(out)\n",
" out = self.fcl1(out.view(out.size(0),-1))\n",
" return self.fc12_1(out),self.fc12_2(out)\n",
" #######################################################################\n",
" # ** END OF YOUR CODE **\n",
" ####################################################################### \n",
" \n",
" def reparametrize(self, mu, logvar):\n",
" #######################################################################\n",
" # ** START OF YOUR CODE **\n",
" #######################################################################\n",
" std = torch.exp(logvar / 2)\n",
" epsilon = torch.randn_like(std)\n",
"\n",
" return mu + epsilon * std\n",
" #######################################################################\n",
" # ** END OF YOUR CODE **\n",
" ####################################################################### \n",
"\n",
" def decode(self, z):\n",
" #######################################################################\n",
" # ** START OF YOUR CODE **\n",
" #######################################################################\n",
" out = self.fcld1(z)\n",
" out = self.fcld2(out)\n",
" # print(out.shape)\n",
" out = out.view(z.size(0), conv1_c, 7, 7)\n",
" # out = self.deconv1(out)\n",
" out = self.deconv2(out)\n",
" out = self.deconv3(out)\n",
" # out = self.deconv3(out)\n",
" return out\n",
" #######################################################################\n",
" # ** END OF YOUR CODE **\n",
" ####################################################################### \n",
" \n",
" def forward(self, x):\n",
" #######################################################################\n",
" # ** START OF YOUR CODE **\n",
" #######################################################################\n",
" mu, logvar = self.encode(x)\n",
" out = self.reparametrize(mu, logvar)\n",
" out = self.decode(out)\n",
" return out, mu, logvar\n",
" #######################################################################\n",
" # ** END OF YOUR CODE **\n",
" ####################################################################### \n",
"\n",
"model = VAE(latent_dim).to(device)\n",
"params = sum(p.numel() for p in model.parameters() if p.requires_grad)\n",
"print(\"Total number of parameters is: {}\".format(params))\n",
"print(model)\n",
"# optimizer\n",
"optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)"
],
"execution_count": 23,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Total number of parameters is: 478497\nVAE(\n (conv1): Sequential(\n (0): Conv2d(1, 32, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))\n (1): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n (2): LeakyReLU(negative_slope=0.2, inplace=True)\n )\n (conv2): Sequential(\n (0): Conv2d(32, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))\n (1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n (2): LeakyReLU(negative_slope=0.2, inplace=True)\n )\n (conv3): Sequential(\n (0): Conv2d(64, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n (1): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n (2): LeakyReLU(negative_slope=0.2, inplace=True)\n )\n (fcl1): Sequential(\n (0): Linear(in_features=1568, out_features=128, bias=True)\n (1): LeakyReLU(negative_slope=0.2, inplace=True)\n )\n (fc12_1): Linear(in_features=128, out_features=16, bias=True)\n (fc12_2): Linear(in_features=128, out_features=16, bias=True)\n (fcld1): Sequential(\n (0): Linear(in_features=16, out_features=128, bias=True)\n )\n (fcld2): Sequential(\n (0): Linear(in_features=128, out_features=1568, bias=True)\n (1): ReLU()\n )\n (deconv2): Sequential(\n (0): ConvTranspose2d(32, 32, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))\n (1): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n (2): ReLU()\n )\n (deconv3): Sequential(\n (0): ConvTranspose2d(32, 1, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))\n (1): Sigmoid()\n )\n)\n"
]
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "aeSX6RZhJtMB"
},
"source": [
"--- \n",
"\n",
"## Part 1.1b: Training the Model (5 Points)"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "JN-Pc0mvq-7_"
},
"source": [
"### Defining a Loss\n",
"Recall the Beta VAE loss, with an encoder $q$ and decoder $p$:\n",
"$$ \\mathcal{L}=\\mathbb{E}_{q_\\phi(z \\mid X)}[\\log p_\\theta(X \\mid z)]-\\beta D_{K L}[q_\\phi(z \\mid X) \\| p_\\theta(z)]$$\n",
"\n",
"In order to implement this loss you will need to think carefully about your model's outputs and the choice of prior.\n",
"\n",
"There are multiple accepted solutions. Explain your design choices based on the assumptions you make regarding the distribution of your data.\n",
"\n",
"* Hint: this refers to the log likelihood as mentioned in the tutorial. Make sure these assumptions reflect on the values of your input data, i.e. depending on your choice you might need to do a simple preprocessing step.\n",
"\n",
"* You are encouraged to experiment with the weighting coefficient $\\beta$ and observe how it affects your training"
]
},
{
"cell_type": "code",
"metadata": {
"id": "F6CeeS9CJtMC",
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "67b756e3-644f-4bdd-f01a-5622a8b38e19"
},
"source": [
"# *CODE FOR PART 1.1b IN THIS CELL*\n",
"\n",
"def loss_function_VAE(recon_x, x, mu, logvar, beta):\n",
" #######################################################################\n",
" # ** START OF YOUR CODE **\n",
" #######################################################################\n",
" bce = F.binary_cross_entropy(recon_x, x, reduction='sum') / batch_size\n",
" kld = -0.5 * torch.sum(1 + logvar - mu.pow(2) - logvar.exp()) / batch_size\n",
"\n",
" return bce, kld * beta\n",
" #######################################################################\n",
" # ** END OF YOUR CODE **\n",
" ####################################################################### \n",
"\n",
"model.train()\n",
"#######################################################################\n",
"# ** START OF YOUR CODE **\n",
"#######################################################################\n",
"total_loss_train = []\n",
"kl_loss_train = []\n",
"recon_loss_train = []\n",
"\n",
"total_loss_test = []\n",
"kl_loss_test = []\n",
"recon_loss_test = []\n",
"#######################################################################\n",
"# ** END OF YOUR CODE **\n",
"####################################################################### \n",
"\n",
"for epoch in range(num_epochs): \n",
" #######################################################################\n",
" # ** START OF YOUR CODE **\n",
" #######################################################################\n",
" model.train()\n",
" total_loss_train_epoch = 0\n",
" kl_loss_train_epoch = 0\n",
" recon_loss_train_epoch = 0\n",
"\n",
" for batch_idx, (data, _) in enumerate(loader_train):\n",
" data = data.to(device)\n",
" model.zero_grad()\n",
"\n",
"\n",
" recon_x, mu, logvar = model(data)\n",
" recon_loss, kl_loss = loss_function_VAE(recon_x, data, mu, logvar, beta)\n",
" total_loss = recon_loss + kl_loss\n",
" total_loss_train_epoch += total_loss.item()\n",
" kl_loss_train_epoch += kl_loss.item() / beta\n",
" recon_loss_train_epoch += recon_loss.item()\n",
"\n",
"\n",
" total_loss.backward()\n",
" optimizer.step()\n",
"\n",
" total_loss_train.append(total_loss_train_epoch / len(loader_train.dataset))\n",
" kl_loss_train.append(kl_loss_train_epoch / len(loader_train.dataset))\n",
" recon_loss_train.append(recon_loss_train_epoch / len(loader_train.dataset))\n",
"\n",
" print('epoch [{}/{}], train loss:{:.4f}'.format(epoch + 1, num_epochs, total_loss_train_epoch / len(loader_train.dataset)))\n",
" \n",
" model.eval()\n",
" total_loss_test_epoch = 0\n",
" kl_loss_test_epoch = 0\n",
" recon_loss_test_epoch = 0\n",
"\n",
" with torch.no_grad():\n",
" for batch_idx, (data, _) in enumerate(loader_test):\n",
" data = data.to(device)\n",
" recon_x, mu, logvar = model(data)\n",
" recon_loss, kl_loss = loss_function_VAE(recon_x, data, mu, logvar, beta)\n",
" total_loss = recon_loss + kl_loss\n",
" total_loss_test_epoch += total_loss.item()\n",
" kl_loss_test_epoch += kl_loss.item() / beta\n",
" recon_loss_test_epoch += recon_loss.item()\n",
" \n",
" total_loss_test.append(total_loss_test_epoch / len(loader_test.dataset))\n",
" kl_loss_test.append(kl_loss_test_epoch / len(loader_test.dataset))\n",
" recon_loss_test.append(recon_loss_test_epoch / len(loader_test.dataset))\n",
"\n",
" print('epoch [{}/{}], test loss:{:.4f}'.format(epoch + 1, num_epochs, total_loss_test_epoch / len(loader_test.dataset)))\n",
"\n",
" #######################################################################\n",
" # ** END OF YOUR CODE **\n",
" ####################################################################### \n",
" \n",
" # save the model\n",
" if epoch == num_epochs - 1:\n",
" with torch.no_grad():\n",
" torch.jit.save(torch.jit.trace(model, (data), check_trace=False),\n",
" '/content/drive/MyDrive/icl_dl_cw2/CW_VAE/VAE_model.pth')\n"
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"text": [
"epoch [1/25], train loss:1.3808\n",
"epoch [1/25], test loss:1.0844\n",
"epoch [2/25], train loss:1.0679\n",
"epoch [2/25], test loss:1.0416\n",
"epoch [3/25], train loss:1.0409\n",
"epoch [3/25], test loss:1.0301\n",
"epoch [4/25], train loss:1.0276\n",
"epoch [4/25], test loss:1.0139\n",
"epoch [5/25], train loss:1.0168\n",
"epoch [5/25], test loss:1.0078\n",
"epoch [6/25], train loss:1.0119\n",
"epoch [6/25], test loss:1.0026\n",
"epoch [7/25], train loss:1.0069\n",
"epoch [7/25], test loss:1.0009\n",
"epoch [8/25], train loss:1.0021\n",
"epoch [8/25], test loss:0.9941\n",
"epoch [9/25], train loss:0.9988\n",
"epoch [9/25], test loss:0.9944\n",
"epoch [10/25], train loss:0.9959\n",
"epoch [10/25], test loss:0.9893\n",
"epoch [11/25], train loss:0.9930\n",
"epoch [11/25], test loss:0.9868\n",
"epoch [12/25], train loss:0.9911\n",
"epoch [12/25], test loss:0.9838\n",
"epoch [13/25], train loss:0.9884\n",
"epoch [13/25], test loss:0.9837\n",
"epoch [14/25], train loss:0.9867\n",
"epoch [14/25], test loss:0.9795\n",
"epoch [15/25], train loss:0.9853\n",
"epoch [15/25], test loss:0.9780\n",
"epoch [16/25], train loss:0.9832\n",
"epoch [16/25], test loss:0.9811\n",
"epoch [17/25], train loss:0.9811\n",
"epoch [17/25], test loss:0.9755\n",
"epoch [18/25], train loss:0.9801\n",
"epoch [18/25], test loss:0.9771\n",
"epoch [19/25], train loss:0.9793\n",
"epoch [19/25], test loss:0.9735\n",
"epoch [20/25], train loss:0.9775\n",
"epoch [20/25], test loss:0.9727\n",
"epoch [21/25], train loss:0.9767\n",
"epoch [21/25], test loss:0.9729\n",
"epoch [22/25], train loss:0.9754\n",
"epoch [22/25], test loss:0.9726\n",
"epoch [23/25], train loss:0.9748\n",
"epoch [23/25], test loss:0.9707\n",
"epoch [24/25], train loss:0.9736\n",
"epoch [24/25], test loss:0.9680\n",
"epoch [25/25], train loss:0.9727\n",
"epoch [25/25], test loss:0.9697\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "vF6B26_oJtMF"
},
"source": [
"### Loss Explanation\n",
"Explain your choice of loss and how this relates to:\n",
"\n",
"* The VAE Prior\n",
"* The output data domain\n",
"* Disentanglement in the latent space\n"
]
},
{
"cell_type": "code",
"metadata": {
"id": "DUqWwUvlrYnH"
},
"source": [
"# Any code for your explanation here"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "markdown",
"metadata": {
"id": "dhjE07mrB7Zs"
},
"source": [
"**YOUR ANSWER**\n",
"\n",
"The VAE loss function have two terms: one is aimed to maximises the reconstruction likelihood, and the other is designed to make the approximation of the posterior $q_\\phi(z \\mid X) $ becomes closer to the prior distribution $ p_θ(z) $.\n",
"\n",
"1. As for $D_{K L}[q_\\phi(z \\mid X) \\| p_\\theta(z)]$, we assume that:\n",
"\n",
"* $ p_θ(z) \\sim N\\left(0, I\\right)$, so $ p_θ(z) $ has no parameter, so it can be writen as $ p(z) $.\n",
"* $q_\\phi(z \\mid X) \\sim N\\left(\\mu, \\Sigma ; x^{(i)}\\right)$\n",
"\n",
"Then, we can get:\n",
"\n",
"$$\\begin{aligned} & D_{K L}\\left(q_{\\phi}\\left(z \\mid x^{(i)}\\right)_{d} \\| p_{\\theta}(z)_{d}\\right) \\\\=& K L\\left(N\\left(\\mu_{d}, \\sigma_{d}^{2}\\right) \\| N(0,1)\\right) \\\\=& \\frac{1}{2}\\left(-\\log \\sigma_{d}^{2}+\\mu_{d}^{2}+\\sigma_{d}^{2}-1\\right) \\end{aligned}$$\n",
"\n",
"So, the python code is `kld = -0.5 * torch.sum(1 + logvar - mu.pow(2) - logvar.exp())`\n",
"\n",
"2. As for $\\mathbb{E}_{q_\\phi(z \\mid X)}[\\log p_\\theta(X \\mid z)]$, we know $\\mathbb{E}_{z}\\left[\\log p_{\\theta}\\left(x^{(i)} \\mid z\\right)\\right] \\approx \\log p_{\\theta}\\left(x^{(i)} \\mid z\\right)$, and we assume $p_{\\theta}(x \\mid z) \\sim$ Bernoulli distribution, which corresponds to a binary value $X$ and a vector with $Q$ independent dimensions $\\left[\\rho_{1}, \\rho_{2}, \\ldots, \\rho_{Q}\\right]$. Then we can get $\\rho(z)=\\operatorname{dec}_{\\theta}(z)$. Now, we can calculate the reconstruction likelihood $\\log p_{\\theta}\\left(x^{(i)} \\mid z\\right)=\\sum_{q=1}^{Q}\\left(x_{q}^{(i)} \\log \\left[\\rho_{q}(z)\\right]+\\left(1-x_{q}^{(i)}\\right) \\log \\left[1-\\rho_{q}(z)\\right]\\right)$.\n",
"\n",
"So, we designed the sigmoid as activation function for the last layer, and use binary cross entropy as loss function. `bce = F.binary_cross_entropy(recon_x, x, reduction='sum') / batch_size`\n",
"\n",
"3. As for $\\beta$, it will reduce information of $z$, however improve the ability of disentanglement."
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "ez5nlMi1JtMF"
},
"source": [
"<h2>Part 1.2 (9 points)</h2>\n",
"\n",
"a. Plot your loss curves\n",
"\n",
"b. Show reconstructions and samples\n",
"\n",
"c. Discuss your results from parts (a) and (b)\n",
"\n",
"## Part 1.2a: Loss Curves (3 Points)\n",
"Plot your loss curves (6 in total, 3 for the training set and 3 for the test set): total loss, reconstruction log likelihood loss, KL loss (x-axis: epochs, y-axis: loss). If you experimented with different values of $\\beta$, you may wish to display multiple plots (worth 1 point)."
]
},
{
"cell_type": "code",
"metadata": {
"id": "fNA778FdhGFo"
},
"source": [
"# *CODE FOR PART 1.2a IN THIS CELL*\r\n",
"\r\n",
"# before running this code, we should make sure beta == 2.5\r\n",
"if beta == 2.5:\r\n",
" total_loss_train_beta25 = total_loss_train\r\n",
" total_loss_test_beta25 = total_loss_test\r\n",
" recon_loss_train_beta25 = recon_loss_train\r\n",
" recon_loss_test_beta25 = recon_loss_test\r\n",
" kl_loss_train_beta25 = kl_loss_train\r\n",
" kl_loss_test_beta25 = kl_loss_test"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "C0zWo_cMiq-j"
},
"source": [
"if beta == 1:\r\n",
" total_loss_train_beta1 = total_loss_train\r\n",
" total_loss_test_beta1 = total_loss_test\r\n",
" recon_loss_train_beta1 = recon_loss_train\r\n",
" recon_loss_test_beta1 = recon_loss_test\r\n",
" kl_loss_train_beta1 = kl_loss_train\r\n",
" kl_loss_test_beta1 = kl_loss_test"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "A4UJqR3qkd-c"
},
"source": [
"if beta == 4:\r\n",
" total_loss_train_beta4 = total_loss_train\r\n",
" total_loss_test_beta4 = total_loss_test\r\n",
" recon_loss_train_beta4 = recon_loss_train\r\n",
" recon_loss_test_beta4 = recon_loss_test\r\n",
" kl_loss_train_beta4 = kl_loss_train\r\n",
" kl_loss_test_beta4 = kl_loss_test"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "AADYspqtJtMG",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 896
},
"outputId": "c44c557c-3922-47fe-a3b4-a3dd9b638049"
},
"source": [
"\r\n",
"\r\n",
"fig, axs = plt.subplots(3, 1, figsize=(15,15))\r\n",
"axs[0].plot(list(range(1, 1 + len(total_loss_train_beta25))), total_loss_train_beta25)\r\n",
"axs[0].plot(list(range(1, 1 + len(total_loss_test_beta25))), total_loss_test_beta25)\r\n",
"axs[0].plot(list(range(1, 1 + len(total_loss_train_beta1))), total_loss_train_beta1)\r\n",
"axs[0].plot(list(range(1, 1 + len(total_loss_test_beta1))), total_loss_test_beta1)\r\n",
"axs[0].plot(list(range(1, 1 + len(total_loss_train_beta4))), total_loss_train_beta4)\r\n",
"axs[0].plot(list(range(1, 1 + len(total_loss_test_beta4))), total_loss_test_beta4)\r\n",
"axs[0].legend(['Train loss(beta=2.5)', 'Test loss(beta=2.5)','Train loss(beta=1)', 'Test loss(beta=1)','Train loss(beta=4)', 'Test loss(beta=4)'])\r\n",
"axs[0].set_title('Total Loss')\r\n",
"\r\n",
"axs[1].plot(list(range(1, 1 + len(recon_loss_train_beta25))), recon_loss_train_beta25)\r\n",
"axs[1].plot(list(range(1, 1 + len(recon_loss_test_beta25))), recon_loss_test_beta25)\r\n",
"axs[1].plot(list(range(1, 1 + len(recon_loss_train_beta1))), recon_loss_train_beta1)\r\n",
"axs[1].plot(list(range(1, 1 + len(recon_loss_test_beta1))), recon_loss_test_beta1)\r\n",
"axs[1].plot(list(range(1, 1 + len(recon_loss_train_beta4))), recon_loss_train_beta4)\r\n",
"axs[1].plot(list(range(1, 1 + len(recon_loss_test_beta4))), recon_loss_test_beta4)\r\n",
"axs[1].legend(['Train loss(beta=2.5)', 'Test loss(beta=2.5)','Train loss(beta=1)', 'Test loss(beta=1)','Train loss(beta=4)', 'Test loss(beta=4)'])\r\n",
"axs[1].set_title('Log Likelihood Loss')\r\n",
"\r\n",
"axs[2].plot(list(range(1, 1 + len(kl_loss_train_beta25))), kl_loss_train_beta25)\r\n",
"axs[2].plot(list(range(1, 1 + len(kl_loss_test_beta25))), kl_loss_test_beta25)\r\n",
"axs[2].plot(list(range(1, 1 + len(kl_loss_train_beta1))), kl_loss_train_beta1)\r\n",
"axs[2].plot(list(range(1, 1 + len(kl_loss_test_beta1))), kl_loss_test_beta1)\r\n",
"axs[2].plot(list(range(1, 1 + len(kl_loss_train_beta4))), kl_loss_train_beta4)\r\n",
"axs[2].plot(list(range(1, 1 + len(kl_loss_test_beta4))), kl_loss_test_beta4)\r\n",
"axs[2].legend(['Train loss(beta=2.5)', 'Test loss(beta=2.5)','Train loss(beta=1)', 'Test loss(beta=1)','Train loss(beta=4)', 'Test loss(beta=4)'])\r\n",
"axs[2].set_title('KL Divergence Loss')"
],
"execution_count": null,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/plain": [
"Text(0.5, 1.0, 'KL Divergence Loss')"
]
},
"metadata": {
"tags": []
},
"execution_count": 23
},
{
"output_type": "display_data",
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAA3AAAANeCAYAAABTTOyRAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOzdeXhUVb7v//eqqqQqE0lIgpnAgEIIQ0ggJswQEWSINtCIINogcrhwrgRoJ1SwgYbT0M0Bjni90V8b+Im2Yosog1y5tokQ4AghRAghDAYMIYAkkHmsqn3/SKUgEIaQEf2+nqeeXXtYa393kacfP732XltpmoYQQgghhBBCiNZP19IFCCGEEEIIIYS4OxLghBBCCCGEEOI+IQFOCCGEEEIIIe4TEuCEEEIIIYQQ4j4hAU4IIYQQQggh7hMS4IQQQgghhBDiPiEBTgghhACUUppS6uGWrkMIIYS4HQlwQgghWjWlVPF1H6tSquy69Sm3aDNUKZXdiDUkKqVmNFZ/QgghxL0ytHQBQgghxO1omuZa810pdRaYoWnaty1XkRBCCNFyZAROCCHEfUkpZVRKrVVK5dg+a23bXICdgP91I3X+SqlIpdR+pVS+UuqCUuodpZRjA2vQKaUWKqV+Vkr9opT6UCnlbttnUkp9pJTKs53zoFLqAdu+aUqpTKVUkVLqzK1GEoUQQogbSYATQghxv3oT6AuEAb2ASGChpmklwCggR9M0V9snB7AA8wFvoB8wDPj3BtYwzfaJBjoBrsA7tn1TAXegPeAFzALKbAHzbWCUpmluQH8gtYF1CCGE+I2QACeEEOJ+NQVYqmnaL5qmXQaWAM/d6mBN0w5pmvbfmqaZNU07C7wHDGmEGlZrmpapaVox8DowSSllAKqoDm4Pa5pmsZ2/0NbOCvRQSjlpmnZB07RjDaxDCCHEb4QEOCGEEPcrf+Dn69Z/tm2rk1Kqi1Jqu1LqolKqEPgPqkfjGrsGA/AAsBH4BvjUdovnX5VSDrYRwqepHpG7oJTaoZTq2sA6hBBC/EZIgBNCCHG/ygEevG69g20bgFbH8f8byAA6a5rWBngDUE1Qgxm4pGlalaZpSzRN60b1bZIxwB8ANE37RtO04YCfrab/r4F1CCGE+I2QACeEEOJ+9QmwUCnlo5TyBt4CPrLtuwR41UwoYuMGFALFthGv2fU8n8E2MUnNx8FWw3ylVEellCvVo3qbNE0zK6WilVI9lVJ623mrAKtS6gGl1O9sz8JVAMVU31IphBBC3JEEOCGEEPerZUAycAQ4CqTYtqFpWgbV4SrTNgOkP/Ay8AxQRPWI16Z6nu9/A2XXfdYD8VTfKrkbOAOUA3Nsx/sCn1Md3o4D39uO1QF/pHr07grVz+HVN0wKIYT4jVKaVtddJkIIIYQQQgghWhsZgRNCCCGEEEKI+4QEOCGEEEIIIYS4T0iAE0IIIYQQQoj7hAQ4IYQQQgghhLhPGFq6gBt5e3trQUFBLV2GEEIIIYQQQrSIQ4cO5Wqa5lPXvlYX4IKCgkhOTm7pMoQQQgghhBCiRSilfr7VPrmFUgghhBBCCCHuExLghBBCCCGEEOI+IQFOCCGEEEIIIe4Tre4ZOCGEEEIIIX4tqqqqyM7Opry8vKVLEa2QyWQiMDAQBweHu24jAU4IIYQQQogmkp2djZubG0FBQSilWroc0YpomkZeXh7Z2dl07NjxrtvJLZRCCCGEEEI0kfLycry8vCS8iZsopfDy8qr36KwEOCGEEEIIIZqQhDdxK/fytyEBTgghhBBCCCHuExLg7kJJfgUHtmVSVWFp6VKEEEIIIYQQv2ES4O5CQW4ZB3ec5VTypZYuRQghhBBCiLuWl5dHWFgYYWFh+Pr6EhAQYF+vrKy8bdvk5GRiY2Prdb6goCByc3MbUnIta9eu5cMPPwRg6NChJCcn33Xb1NRUvv766wbXUFpaypgxY+jatSvdu3dnwYIFdR539uxZnJyc7L/vrFmz7Psee+wxrl692uBaQGahvCt+D7nj6etMelIO3Qb4t3Q5QgghhBBC3BUvLy9SU1MBWLx4Ma6urrz88sv2/WazGYOh7kgQERFBREREs9RZF7PZTHx8PCkpKffUPjU1leTkZEaPHt3gWl5++WWio6OprKxk2LBh7Ny5k1GjRt103EMPPWT/va/33HPP8e677/Lmm282uBYJcHdBKUW3gf7s/fw0eeeL8QpwbemShBBCCCHEfWbJtmOk5xQ2ap/d/Nvwpye616vNtGnTMJlMHD58mAEDBjBp0iTmzp1LeXk5Tk5OrF+/nuDgYBITE1m1ahXbt29n8eLFZGVlkZmZSVZWFvPmzbvj6Nzq1auJj48HYMaMGcybN4+SkhImTpxIdnY2FouFRYsW8fTTT7NgwQK2bt2KwWBgxIgRrFq1iu+++47evXvXCpgbN25kxowZ9nAXGRlJSUkJc+bMIS0tjaqqKhYvXsyoUaN46623KCsrIykpiddff52OHTvWeZ134uzsTHR0NACOjo707t2b7Ozsev3mTz75JIMGDZIA15yC+/qy/8ufOJaUw+Cnu7R0OUIIIYQQQtyz7Oxs9u3bh16vp7CwkD179mAwGPj2229544032Lx5801tMjIySEhIoKioiODgYGbPnn3LF1AfOnSI9evX88MPP6BpGlFRUQwZMoTMzEz8/f3ZsWMHAAUFBeTl5bFlyxYyMjJQSpGfnw/A3r176dOnT61+S0tLSU1NZffu3UyfPp20tDSWL1/Oo48+Snx8PPn5+URGRvLYY4+xdOlSkpOTeeeddwBueZ0nTpzg6aefrvM6EhMT8fDwsK/n5+ezbds25s6dW+fxZ86cITw8nDZt2rBs2TIGDRoEgKenJxUVFeTl5eHl5XW7f5o7kgB3l5xcHXkovB0nf7hI/3EPYXDUt3RJQgghhBDiPlLfkbKm9NRTT6HXV//3bEFBAVOnTuXUqVMopaiqqqqzzZgxYzAajRiNRtq1a8elS5cIDAys89ikpCTGjRuHi4sLAOPHj2fPnj2MHDmSl156iddee42YmBgGDRqE2WzGZDLxwgsvEBMTQ0xMDAAXLlwgJCSkVr+TJ08GYPDgwRQWFpKfn8+uXbvYunUrq1atAqrfvZeVlXVTTbe6zuDg4Dpve7yR2Wxm8uTJxMbG0qlTp5v2+/n5kZWVhZeXF4cOHWLs2LEcO3aMNm3aANCuXTtycnIaHOBkEpN66DbQn4pSMz+l/NLSpQghhBBCCHHPaoIVwKJFi4iOjiYtLY1t27bd8sXSRqPR/l2v12M2m+t93i5dupCSkkLPnj1ZuHAhS5cuxWAwcODAASZMmMD27dsZOXIkAE5OTjfVcuN705RSaJrG5s2bSU1NJTU1laysrJuC3+2u88SJE/aJR2781IwGAsycOZPOnTszb968W/4+NeGsT58+PPTQQ5w8edK+v+bWzYaSAFcPAV08cPdx4lhSTkuXIoQQQgghRKMoKCggICAAgA0bNjRKn4MGDeLLL7+ktLSUkpIStmzZwqBBg8jJycHZ2Zlnn32WV155hZSUFIqLiykoKGD06NGsWbOGH3/8EYCQkBBOnz5dq99NmzYB1SN87u7uuLu78/jjj7Nu3To0TQPg8OHDALi5uVFUVHTH66wZgavrU3P75MKFCykoKGDt2rW3vObLly9jsVS/diwzM5NTp07ZR+o0TePixYsEBQXd609qJwGuHmomM7lwuoCrF0tauhwhhBBCCCEa7NVXX+X1118nPDz8nkbV6tK7d2+mTZtGZGQkUVFRzJgxg/DwcI4ePUpkZCRhYWEsWbKEhQsXUlRURExMDKGhoQwcOJDVq1cDMGrUKHbv3l2rX5PJRHh4OLNmzeKDDz4AqkfWqqqqCA0NpXv37ixatAiA6Oho0tPTCQsLY9OmTfd8ndnZ2Sxfvpz09HR69+5NWFgYf//73wHYunUrb731FgC7d+8mNDSUsLAwJkyYQFxcHG3btgWqnwns27fvLWf8rA9Vk1Rbi4iICK0+73dobqWFlfz/C/bS89FABk7o3NLlCCGEEEKIV
"text/plain": [
"<Figure size 1080x1080 with 3 Axes>"
]
},
"metadata": {
"tags": [],
"needs_background": "light"
}
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "B7wp4MzqsjjZ"
},
"source": [
"## Part 1.2b: Samples and Reconstructions (6 Points)\n",
"Visualize a subset of the images of the test set and their reconstructions **as well as** a few generated samples. Most of the code for this part is provided. You only need to call the forward pass of the model for the given inputs (might vary depending on your implementation)."
]
},
{
"cell_type": "code",
"metadata": {
"id": "Wu9CWtqoJtMK",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 997
},
"outputId": "a5fe3fba-e263-48ad-fe14-6004356a7b8a"
},
"source": [
"# *CODE FOR PART 1.2b IN THIS CELL*\n",
"\n",
"# load the model\n",
"print('Input images')\n",
"print('-'*50)\n",
"\n",
"sample_inputs, _ = next(iter(loader_test))\n",
"fixed_input = sample_inputs[0:32, :, :, :]\n",
"# visualize the original images of the last batch of the test set\n",
"img = make_grid(denorm(fixed_input), nrow=8, padding=2, normalize=False,\n",
" range=None, scale_each=False, pad_value=0)\n",
"plt.figure()\n",
"show(img)\n",
"\n",
"print('Reconstructed images')\n",
"print('-'*50)\n",
"with torch.no_grad():\n",
" # visualize the reconstructed images of the last batch of test set\n",
" \n",
" #######################################################################\n",
" # ** START OF YOUR CODE **\n",
" #######################################################################\n",
" fixed_input = fixed_input.to(device)\n",
" recon_batch, _, _ = model(fixed_input)\n",
" recon_batch = recon_batch.view(-1, 1, 28, 28)\n",
" #######################################################################\n",
" # ** END OF YOUR CODE **\n",
" ####################################################################### \n",
" \n",
" recon_batch = recon_batch.cpu()\n",
" recon_batch = make_grid(denorm(recon_batch), nrow=8, padding=2, normalize=False,\n",
" range=None, scale_each=False, pad_value=0)\n",
" plt.figure()\n",
" show(recon_batch)\n",
"\n",
"print('Generated Images') \n",
"print('-'*50)\n",
"model.eval()\n",
"n_samples = 256\n",
"z = torch.randn(n_samples,latent_dim).to(device)\n",
"with torch.no_grad():\n",
" #######################################################################\n",
" # ** START OF YOUR CODE **\n",
" #######################################################################\n",
" samples = model.decode(z).view(-1, 1, 28, 28)\n",
" #######################################################################\n",
" # ** END OF YOUR CODE **\n",
" ####################################################################### \n",
" \n",
" samples = samples.cpu()\n",
" samples = make_grid(denorm(samples), nrow=16, padding=2, normalize=False,\n",
" range=None, scale_each=False, pad_value=0)\n",
" plt.figure(figsize = (8,8))\n",
" show(samples)\n",
"\n"
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"text": [
"Input images\n",
"--------------------------------------------------\n",
"Reconstructed images\n",
"--------------------------------------------------\n",
"Generated Images\n",
"--------------------------------------------------\n"
],
"name": "stdout"
},
{
"output_type": "display_data",
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAADLCAYAAACVv9NEAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOy9d3hT99n4/Tkalqxlecp7T7wZxpiwagiQUdqEDDI7s5qk79Onbdo+6a/P9eT3Xm2epn2bjqShM02aQZISkhATppneBowx2NjGxtuyLW9bsqTz/kF0CoEkBiwLiD7XlSvY50jntnTO/b2/9xREUcSLFy9evFxfyDwtgBcvXrx4mXm8yt2LFy9erkO8yt2LFy9erkO8yt2LFy9erkO8yt2LFy9erkO8yt2LFy9erkPcptwFQVgjCEK9IAiNgiD8yF3X8eLFixcvFyK4I89dEAQ50ACsAtqBCmCDKIp1M34xL168ePFyAe6y3POARlEUm0VRtAFvAOvcdC0vXrx48fIJFG563wig7Zyf24GFn3ayIAjeMlkvXrx4uXT6RFEMvtgBdyn3z0UQhIeAhzx1fS9evHi5Dmj9tAPuUu4dQNQ5P0d+/DsJURQ3AhvBa7l78eLFy0zjLp97BZAkCEKcIAg+wN3Ae266lhcvXrx4+QRusdxFUbQLgvA48BEgB/4qiuJxd1zLixcvXrxciFtSIS9ZCK9bxosXL14uhypRFOdf7IDHAqpfRHx9fREEAUEQUCgU+Pj4IJOd9YxNTExgt9txOBxYrVYPS+rFi5drHa9yn0U2bNhAUFAQvr6+pKamsmTJEkJCQnA6nbzyyiucPHmS+vp6PvjgA0+L6sWLl2scr3KfBSIjI7n//vv5yle+gsFgQCaTodPpCAgIQC6XI5PJuPHGG8nMzOTYsWOUlZVhsViw2+2eFv0CBEEgLCyMhx9+mIiICLZs2cLOnTuZmJjwmEwqlYrCwkLmzZvHwoULaWlpYc+ePRQXF9Pf3+8xubx48SRe5e5mYmNjmTt3LmvWrCEjIwO1Wo3T6WR8fByz2YzD4UCtVhMeHo5erwcgISGBmpqaq1K5y2QyQkNDyc/PJyYmhtLSUuRyucfkEQQBrVbL/PnzWb16NQsXLqS1tZWRkRFOnTrlUeUul8vR6XRkZGTQ3t5Oa+unpiRLGI1GRFFkcnLyqnHP+fr64u/vT1ZWFu3t7ZjNZnp6embt+nK5HL1eT2pqKkNDQwwODmKxWJicnJz2eygUCrRaLXK5HIvFwtUQa3Q33q6QbmbdunV85zvfYfHixahUKhwOBzabjdbWVnbu3ElRURGlpaVMTU1hMBiIiYnhS1/6kqTorzZkMhnx8fFotVpsNht9fX04HA6PyhMUFEReXh55eXmIokhMTAyZmZnk5OR4TC4AjUZDUlISv/3tb/nyl788rdekp6eTkpJCcPBFiw49Qnh4OIWFhWzZsoUnn3ySRYsWzeqCrtPpSE9P5xe/+AUPPfQQy5YtIygoaNqvFwQBvV5PWloaWVlZKBSzY9PKZDLpP0/gtdzdhEwmIz09nWXLlpGfnw/A6dOn+de//sUbb7xBf38/IyMjOJ1ONBoNmzZtIiUlBblcjtFo9Kg1/FnI5XLmzJmDWq2mq6uLoqIij1mYKpUKk8nEL3/5S48r8othNBpZtmwZOp1u2t9nQUEB0dHR2O12vv/973t04QQwGAysXLmS//iP/0ChUHDPPfdgMpnYtm3brMhmMBj4r//6L26//XZMJhPDw8NUV1fT2dk5rdcrFAr8/f3ZtGkTYWFhnDlzhkceeYS2tjampqbcIrNcLker1fLkk08SFxeH1WrlySefnPWd+DWn3NPS0khISCAkJITR0VHsdjv9/f20tLQwNjbGyMjIVbGdFQSBgIAAtFotAD09PfzjH/9g//79NDY2Mjk5id1uJygoiOzsbIKDg1Gr1QwNDVFfX39V/A2fxNfXl7CwMJYuXcro6CgnT570mJy5ubnExsaSkpJCZmYmBoPhvONGo5GkpCQWL15MX18fFouF3t7eWZNPEAQMBgNZWVn4+PhM+3UDAwPExcURHR2NSqViYmLCoy6E6OhoYmNjCQ8PB6C7u5vOzk63KyqXgrz33ntZuHAher2eqqoq3nnnHaqqqnA6ndN6n7CwMBYvXkxKSgoNDQ1UVFQwMDDgtoUpPj6epKQkCgoKWLVqFQEBAfT29qJWqxkfH5+23DPBNaXcZTIZeXl5LF68mKSkJPr7+5mamqK1tZXDhw9jNpvp7u5meHh4Wu/ndDqx2+0MDQ1hs9nc8hD19fXR1NREW1sb77zzDq2trYyNjUnHg4KCWLFiBSaTCV9fX/r6+mhubr4qlbtWqyUyMpK5c+eyZ88e6uo818G5oKCA+fPnk56eTkRExAVb3+DgYNLT0xFFkebmZk6dOsXQ0NCsfa5qtZrAwEDmzJlzSf7zgYEBnE4noaGhqFQqrFarx6x3QRBISUkhJiYGnU4HwKlTpzhx4oTblbuPjw9BQUGsX7+e5ORkrFYre/bs4YMPPph2HEWhUBATE8Pq1avx8fHh6NGjFBcXMzg46BaZFQoFWVlZrFixgttvv52QkBDkcjlKpZLIyEjOnDkzq4v1NafcH3/8cRITEy+w1ERRZGxsjPb29ml/eUNDQ7S2tvKXv/yFuro6xsfHZ0xWh8PB3r17OX78uLRqDw4OXrBym0wm1qxZg0ajQRCEGbu+O9Dr9cTGxqLRaKipqaG0tNRjsnzrW98iIyPjUx+U+Ph44uLiuOWWW+jp6aG4uJhf//rX1NTUzIqyTEpKIi8vj+zsbF566SVOnjw5rdfp9XpCQ0OJi4vD19eXsbExjyh3QRDw8fHh7rvvJi8vT/p9WVkZJSUlbr++0WgkPT2dvLw8fH19qamp4Q9/+MMlKeaoqCgKCgq49957KSoqYtu2bRw8eNAt8spkMkwmEw888MAFMbPAwEB++tOf8txzz9HQ0HCecedOrinl7nA4eP7554mNjcXPz4/29nbCw8MJDg4mIiKC9PR0QkNDMZlMWCwWAgMDz/N1Op1ObDYbY2NjBAQE4HQ6SUhIoLW1lfb29hlV7i4GBweRyWQ4HI4LFPuXvvQlVq9eTXR0NDKZjO7ubo4fP87x48cvKRNgtkhKSuKOO+7AbDbT1NQ0reyPmSYsLIwf/OAHkrV+MeU+MjKCzWbD6XQSHBxMUFAQN954IykpKRQWFjIyMuI2+WQyGbGxsXz7299m8eLFtLe38/rrr09buc+bN4+EhAS3yTddVCoVCQkJpKSkEBISgiiKOBwOTp486fYdm16vJz4+nqVLlyKXy9m6dStFRUVYLJZpLXQKhYKoqCh+8pOfUFBQgMPhYP/+/Zw5c8Ztz5VarWbDhg0kJSWh0WjOO6bRaFi1ahXx8fEcPXqUXbt28f7772O1Wt1qxV9Tyl0URcrLy2lubkaj0dDT00NwcDD+/v6EhIRw6tQpQkNDUSgUtLW1ERcXh1KplF5vt9sZHx/HYrFwxx13oNfrsdvtqFQqt1nNF9u+ymQywsLCWLJkCQsWLECj0WCz2WhoaKC8vJzBwUGPB9I+icvfPmfOHKxWK8PDw7NmgbjQ6/VERkZK1twnGR8fp62tjdOnTzM2NoYoiiQnJ5OQkEBgYCA6nY6UlBTJReMOFAoFCxYsIDMzE51Ox65du2hqavrc6wmCgFKpJD4+HpPJ5PFdnFqtJiUlBaPRKGV5dXV10d/f7/bvPSMjg/z8fBYsWMDw8DBHjx6loqJi2q4gpVJJbGwsmZmZBAQE0NjYSHV1NWaz2S0+b6VSidFo5IYbbiAoKAin08nw8DCTk5Po9Xp0Oh2BgYFotVp0Oh1Op5Pdu3djt9vd6t66ppQ7QENDAw0NDRc95ufnR0xMDEqlkoaGBtLT01GpVNLxqakpRkdHGRgYYPny5fj4+GCz2ejq6nJb5Pxi+Pj4MHfuXG666SZycnKQy+X09fVx6NAhtm7dis1mmzVZpktgYCCRkZFER0fT1NSEzWab1c9MEAQiIyPJyMggPDwchUKBK
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
},
"metadata": {
"tags": [],
"needs_background": "light"
}
},
{
"output_type": "display_data",
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAADLCAYAAACVv9NEAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOy9aYxkWXqe99zY9z0iIzIj963WrH3pqenpYm9sDRtukrPIpimNIQkDSDBgwjYoyb8MyAbkP7b1hyYGoElRoMSxxWGT0qDpmd57qruquvasyqVyjczIjMjI2DP2iIzrH9XnTGZNr5URWVU98QKFqsol4sS993znW97v/RRVVemggw466ODrBc3jXkAHHXTQQQetR8e4d9BBBx18DdEx7h100EEHX0N0jHsHHXTQwdcQHePeQQcddPA1RMe4d9BBBx18DdE2464oyiuKoswqijKvKMq/aNf7dNBBBx108KtQ2sFzVxRFC9wHXgKiwMfAf6Wq6lTL36yDDjrooINfQbs897PAvKqqi6qq1oC/BF5r03t10EEHHXTwEHRtet0eYHXH/6PAuc/6YUVROm2yHXTQQQdfHUlVVf2f9o12GfcvhKIoPwR++Ljev4MOOujga4DIZ32jXcZ9Dejd8f/wJ1+TUFX1R8CPoOO5d9BBBx20Gu3KuX8MjCqKMqgoigH4L4G/bdN7ddBBBx108BDa4rmrqtpQFOW/Bf4/QAv836qq3mvHe3XQQQcddPCraAsV8isv4tckLaMoyq/8rSgKGo2GRqNBs9l8nMvroIMOnj5cV1X19Kd947EVVH9doNFo0Gg0GAwGTCYTOp0OrVYLPDDuWq0WrVZLNpulWq1Sq9XY3t5+zKt+uiAOSPHvZrOJqqo8CY5LBx08LnSMe5ugKAp6vR6Xy0UwGOT06dN85zvfIRQKYbfbpQFvNptUKhXeeOMNrl69ytTUFEtLS2xvb3eM0xdAHI5msxmn04nZbMZgMJBMJikWi1SrVer1+uNeZgcdPBZ0jHsbIIxOKBTiyJEjHDx4kAsXLjA+Po7dbsdoNAKgqirb29tUq1UuXrxIf38/MzMz/Pt//+9ZX1+nWq0+UQZefK5gMMjo6ChOp5NCocCtW7fY2tqiVqvt63qFYR8dHWViYoKuri70ej3vv/8+q6urJBIJGo3GY72GiqJgMplQFAVVVeU9/aw16XQ6dDoder2eRqNBo9Fge3v7saXsFEWR6zGbzZRKJer1Oo1G47Gs50nFwynXJyFy7Bj3NkGj0eByuejp6WFgYIBwOIzZbEaj0ci0gdi0Wq2Wnp4erFYrVquVjz/+mGw2S71ef6JSNCIaGR4e5uTJk3i9XuLxOJFIhHq9Tr1e37cHWlEUDAYDdrud8fFxTpw4QTgcRlVVVldXKZfL5HI5KpWKvN77DZ1Oh8ViIRgMAlCr1YjH49Tr9V1rEgZBq9ViMpmwWq3YbDaq1SrZbFZ+hscBg8GA1+ulu7sbv9/PysoKm5ubpFKptj6bwpEwmUyYTCb0ej2ATFs2m02azaasVamquisd92n3W6PRyDrXp/3cozwjO+tmOp0Og8GARqNBVVWKxeJjjcA7xr0NEDfbbrfLdIEwNoqiSG+sUqmg0+lwOp3Y7XbpeX7rW99icXGRUqn0xBl3g8HA+fPnOXfuHHa7nampKbq6uigWixQKhX1di8ViIRQKceHCBc6cOUMgEKBUKrGwsEA+n2dzc5Nisbivh46AVqvFYrHQ39/PM888Q6PRIJlMks/nKRaLuyIK8bwYDAY8Hg+BQIBgMEihUGBubk4+L4/DSNjtdk6cOMHLL7/MwYMH+dnPfsa1a9e4cuUK1Wq1bYeOTqfDZDIRDocJh8O43W4URSGZTFIqlSiXy1SrVRkxNhoNqtWqvFZi36iquuv66nQ6SWAQh4S4F8LofxVoNBq0Wi16vR6bzYbX68VoNKKqKktLS5TLZfn6+33/nkrjvvO0FP8X/96JnafzfhbZms0m9XqdhYUF6vU609PTeL1eDAYDqVSKjY0NKpUKdrudgYEBLl68yPnz59Hr9dTrdWw2GxaLBYPBQKVSaft6vyz0ej0Oh4OzZ88SCoXI5/PMzs6ytLREJpPZN+9SURRsNhtDQ0NcuHCB8+fP09PTg9lsxmKxcOjQIcrlstzgiURiX71fRVGw2+0cOHCAf/gP/yHhcJjl5WUmJyeB3c+leJa1Wi1Wq5XR0VFGR0cZGhpidnaWWCxGKpV6LIZdo9Fw5MgRLl68yLe//W0MBgMrKyskEgmuXr3aljWJ6NDtdtPf38/LL79Mf38/DoeDWq3G/fv3icViJBIJABmdNRoNKpUK5XKZcrlMoVCQe15RFHw+H11dXZhMJlKpFOl0WtZkxMH5qJ67TqfDbDYzNjbG+fPnGR0dZXBwkJmZGT744AOuX7/O+vo6tVptX6PIp86463Q6jEYjZrMZr9eL1WrFYrHgcDgwGo2SfWIwGHblv4Rnmc/nSSQSxONxGeK142I3m03y+TyRSITNzU2sVisGg4GtrS2ZcrFYLFSrVbq6uhgeHpbfT6VS+56//jJwuVyMj48zPDxMsVhkdXWVq1evksvl9nW9iqLg9XoZHR3l7NmzBINBzGYzOp2O7e1tQqEQR48elQfk9evXpRe/H2vUarX09vYyPj7O0NAQ5XKZjY0NFhYWqFQqv2JMdnrwXq8Xr9eL3W6nXC5Lg/A4oCgKAwMD9Pb24na72d7eplgsksvl2pJzFwed2Wymu7ubU6dOMTExgc1mo9FokEgk5N6NxWJsb29TLpdlDUB44Y1GQ6ZJhL3o6+uTz4ndbkdRFHK5HPl8fs/rFlFBIBBgdHSUgwcPEg6HcTqduFwuhoeHee+995ienmZra2vf6hVPlXEXp6Tdbsfr9XLgwAH8fr8MZW02G1qtFp1Oh9VqlTk2VVXJ5XKk02kSiQTz8/PcunWLZDLZtoutqiqlUolKpSLXZDQaqdVqMpytVCqYzWbi8TiZTAa9Xk82m2VtbU2Gc08KFEXB4/Fw5MgRuru7uXv3Lqurq0xNTe27V6zT6ejp6WFsbIyjR4/idDrRarU0m025TpErVhSFQqGAXq8nFotJA98uI68oCkajkaGhIQ4cOIDP5+PmzZssLS2xsLAg7/3DuV5VVdFoNPh8PrxeL2azmXw+T7VafSypOREN9/f3EwwGMZlMlEolUqkUm5ubLS9U74xgHA4Hvb29nDhxgsHBQfL5POl0mvn5eRYXF6Xn/nD+/eFI3mg0YrVa8Xg8jIyM4Pf70ev1GAwG0uk0pVIJeLRc+8Nr1+v1eDweuru7CYVCuN1unE4ngUCA4eFhqtUq6XSaRqNBoVDYFyfjqTLu8MBz9/v9HDp0iO9973v09fVht9ull/ZwM5B4aESurVarkc1mefPNN/n5z3/O1atX2xb27iyaCo9CpIiEEfB6vYyMjNBsNsnlcqyvr3Pjxg3pvT8p0Gg0DA0N8Vu/9VsYjUY2NjZYXV3d92KfSF+88MILXLhwgd7eXnQ6HY1Gg3q9TrVaxWg0YjKZ8Pv9DA0Ncfz4cSYnJ3nrrbe4dOnSr+S8WwmNRkMoFOLll1/m7NmzaLVa3n//fW7dusX6+vqvGOqdazCZTNLb1+l05PP5xxbBiedzYmKCvr4+6SAJA9vqe76T1nrw4EFOnTrFiRMn0Ov13Lx5k0uXLvHee+9RKBQ+kz0kInVh2F0uF4ODgxw9epRvfvObAKTTaTY2Ntja2pJ02b1cX3GwqKqK3+/HZrPJrIFwMOx2Oz/4wQ8olUpcuXKFycnJfXHcnjrjLi6kYKMImlmpVCIej7O2tsbW1pYsnoifDwQC+Hw+mcK5cOECqVSKZDJJOp1u6wYSr729vb3Lo7h48SLHjx/n3LlzOJ1OmU/MZDKSUfGkwGg04na7CYfD1Go1FhcXmZub21fDo9FosFqtHDt2jG984xuMjY1hMBjY3t4mk8mQz+fJ5XLYbDZsNptknRw4cIDu7m6OHTvG8ePH+eCDD7h3717L6wQajQaLxcJ3vvMdDh48iEaj4dq1a3z88
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
},
"metadata": {
"tags": [],
"needs_background": "light"
}
},
{
"output_type": "display_data",
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAdsAAAHVCAYAAAC5cFFEAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOy9Z3Rc13X3/bvT+www6B0gKgESADsJiqRYRNIqlKhmUW0pclxkK1l2IvlNsrJW/HzxWnGWk7gs27ElFzmSLFESRVJildgLCBawgShE78AAA2AGwPR5P/C5J6AsycBg4Ih+5v/FhkDcc8+555zd/ntvKRwOE0MMMcQQQwwxzB0U/9svEEMMMcQQQwx/6YgJ2xhiiCGGGGKYY8SEbQwxxBBDDDHMMWLCNoYYYoghhhjmGDFhG0MMMcQQQwxzjJiwjSGGGGKIIYY5xpwIW0mStkiS1CBJ0k1Jkv6/uRgjhhhiiCGGGO4USNHOs5UkSQk0ApuALqAGeCIcDtdFdaAYYoghhhhiuEMwF5btMuBmOBxuCYfDPuBNYNscjBNDDDHEEEMMdwRUc/DMdKBzys9dwPJP/iNJkr4KfPX//rh4Dt4jhhhiiCGGGP6ccITD4cRP+8VcCNtpIRwO/xfwXwCSJMVqRsYQQwwxxHCno/2zfjEXbuRuIHPKzxn/97/FEEMMMcQQw/+TmAthWwMUSJKUK0mSBvgysHsOxokhhhhimDYUCgUqlQqlUolSqUSSpP/tV7qjoVAoYms4A0TdjRwOhwOSJH0LOAAogVfD4fD1aI4hSRJz2a1IPpQKhYJAIEAgEJizsaaLuZ7z1EPzl9IJSpIkNBoNZrMZhULB5OQkXq8Xv9//FzPHGP40lEoliYmJlJSUYLFYcLvdDA8P09HRwcjICMFg8M/2LvI5uxP3n0KhQK/Xk5SURFZWFikpKUxOTjIwMEBDQwOjo6OEQqH/7df8wiLqqT8RvcQ0Y7aSJKFUKsXP4XCYUCgU1Y2rUCjQarUkJiaSkpICQH19PW63+8+ykZRKJXq9nvz8fEpLS4mPj8doNKJQKGhubqa6upq+vj68Xu+s5i1JklAq9Ho9Wq0WhUKBTqfDYrGIQzQxMTGnwkm+fCRJEgrFbNdZkiT0ej2lpaXcc889ZGbeimp0dHRQXV3N+fPnGRsb+7NeeArFLSdS7DL6dMhnW96X8nr5fL5ZnXG1Wk1WVhbbt29ny5YtqNVqOjo6qK+v5+jRo9TW1jIxMTGn30Wj0WCxWDCZTIRCIcbGxnC73QSDwYjnpdPpSEpKYvXq1VitVgYHB2lqaqKnpwen0xlVA0GSJHQ6HSUlJTz44IPMnz+fzMxM4uLi8Hq9DA4OUltby65du6ivr2dwcPCOVCZkKJVKkpKSWLBgATabjStXrtDS0oLP55vOn18Ih8NLPu0X/2sEqZlCoVBgNBrJzc0lJSWFkZERbt68+ZnaVCQfWxa0ycnJ3HXXXeTl5aFUKtHpdNTU1DA5ORmNqXwqZCssLS2NTZs2sXXrVgoLCzGZTGi1WsLhMG1tbbz11lvs3r2b9vb2iA6UfKmZTCbS09NJTEzEbrdjMBhQq9UYDAaSkpIwGo2cPHmS06dPMzw8jN/vn/UcFQoFZrOZrKwsMjIyMJlMKJVKQqEQQ0NDDAwM0NfXh9PpjPgikiQJi8XCmjVreP7556msrEStVjM5OUlTUxPNzc23KWyzhbyearUao9GI2WwmPj6e/Px8CgoKsNvtSJLE8PAwly9f5vDhw4yPj0dt/GhDkiRUKhVGo5Hs7GxKS0spLi4WgmJ4eJimpiauXr1Kd3c3k5OTEV/s8tqZzWZKS0spLCzEaDRit9vRarV4PB4aGxu5fPkyra2tMz5/CoWC+Ph47r77bp544gmys7Pp7e2lvb1dKJZ6vZ5wOIzP5yMQCERdSKhUKpKSkigtLcVutwNw9epVOjo6GB8fn/E+l++J9PR0HnvsMTZt2kRCQgKTk5OMjIzQ2trKa6+9RnV1dVQErkKhwGazUVVVxVe/+lVWrFghPEWyYpybm8v8+fMpLi7m7bff5p133sHlct0xAneqt0GSJNRqNXl5eTz99NPk5eXx9ttv8/rrr89aibgjhK1CoSApKYkNGzawdetWjEYjJ06coK+vj7GxMXFw4NaCBQIBgsFgRO4hhUKBxWIhIyOD0tJSkpOTiY+Px+FwUF9fPycasHyAcnJyeOKJJ3j44YdJT09Ho9EITT8UCpGenk56ejoqlSpiQSQrD6mpqSxdupS0tDRGRkZwuVwYDAZyc3NZsWIFNpsNv9/PmTNnojJnpVJJamoq9913H+vWrSMuLo5wOIzX6yUQCNDY2MilS5dwu904nc6IN7VWq2XJkiV885vfpKysjPHxcerr62lra+PKlSvU1tbidrtnfRGoVCri4uLIzc1l/fr1lJaWkpaWRlxcHCaTCYPBgMFgQKVSEQqFcLlc7Nu3jxMnTnxhha1CoSAhIYGqqirWr19PRUUFaWlpQtnz+/0EAgHGxsbo7Ozk4MGDnDx5ksbGRnw+34yFhtFoJC8vjy996Uts3ryZ7OxsdDqd2PfBYJDh4WEOHTrET3/6U27evDkjAaLRaCgvL+fRRx8lNzcXv9/PpUuXeP/99+ns7KS/vx+VSoXVasXlcjExMRFVl7IkSRgMBgoKCli+fDnp6elMTEzgcDjo6+tjYmIiovCQJEmMjIyIO9BisWCz2Vi7di1r1qxBkiRu3LjB8PBwVOaQmprKunXrWLJkCWazWezn8fFxJEnCbDZjNBpZunQpk5OTVFdXU19fH3VhO9ULFg6Ho/L8T66//Fy1Wk1ycjLZ2dnk5+dHJYx3RwhbjUbDmjVr+Id/+AesViutra0MDg4yOjoK3LpgZWGr0WgIBAJMTk4yOTk5o8MTCoXwer20t7ezb98+9Ho9CQkJrFmzhsbGRtrb2+fkolQoFCQnJ/P888/z+OOPY7fbCYfD4vBrtVrUajUTExO0t7fjcDgiEoCyJipr021tbTQ0NDA0NEQgECAxMZF58+aRm5uLXq8X7uTZCluFQkFaWhpPP/00W7duJRQK0draSn9/P3l5eSQnJ9PT08PQ0BDDw8MRX3gKhYLCwkL+6Z/+ieLiYoaGhnjvvfc4deoUTqdTuMZnq/ErlUoKCgp49tln2bhxI+np6cIzILs/AYLBIIFAAL/fT39/Pzdu3MDj8UxrDNmtLrtUDQYDcXFxbNy4EY1GI6z17u5uzp07N2utW6lUEhcXx/bt23n44YdJSkrC7/czODhIY2MjdXV1Yt6JiYmYzWbWrFmDQqGgv78fh8Mxo/G1Wi0LFizg+eefZ+PGjSQkJNy2fvKlp9frqayspKioiO7u7mkrSrIQWL16NfPnz0elUtHW1sbBgwc5fvy4OFvyWsrrHU1uhFqtJiMjg2XLllFeXk56ejqdnZ1ivEgEhqyg+nw+zp49y7lz51AoFGg0Gg4fPsyLL77I/PnzycnJmZXSOhVer1coI4FAgNraWnbu3Elrayvz5s1j+/btVFVVYTAYsFgss1JY5D2vVqsxmUxkZmZSXFxMeno6CQkJJCUlkZiYiMPh4JVXXuHcuXPTde9+Kj5tffx+v5AtRqMRpVIZFa/mF17Y6nQ61q5dyz/+4z+SmppKc3Mzr776Kh9++KFwVfh8PpRKJTabjWXLllFcXExvby9Hjx6lvb19Rh9f1tzr6urweDwoFAq2b99OdnY2Vqs16sJWkiRMJhM7duzg8ccfJz4+nsnJSVpaWmhqasJisZCbm0tcXBw3btzgzJkzs7LMwuEwwWCQgYEBenp6RExMpVKh1WpJS0vDbDYTDAZFnGK2seHU1FS+8Y1vsHr1aoaHh/nwww+5cOECZrOZp556iqSkJDo7O7lx48as3E9paWn88z//MyUlJQwPD7N7925ee+01HA4HWq0Wq9UalRh/fHw8Tz/9NDt27CAhIUF4Hvx+P16vl9HRURwOB7W1tXR1deF0Orl+/To3btz43EMrex70ej2JiYnk5+eTn59PVlYWhYWFJCUlCZdhO
"text/plain": [
"<Figure size 576x576 with 1 Axes>"
]
},
"metadata": {
"tags": [],
"needs_background": "light"
}
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "IfZJRgj1rudZ"
},
"source": [
"### Discussion\n",
"Provide a brief analysis of your loss curves and reconstructions: \n",
"* What do you observe in the behaviour of the log-likelihood loss and the KL loss (increasing/decreasing)?\n",
"* Can you intuitively explain if this behaviour is desirable? Have you observed posterior collapse during traing (i.e. when the KL is too small during the early stages of training)? \n",
" * If yes, how did you mitigate it? How did this phenomenon reflect on your output samples?\n",
" * If no, why do you think that is?"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "vy4KKp2UJtMJ"
},
"source": [
"**YOUR ANSWER**\n",
"\n",
"During training, the log-likelihood loss decreases with increasing epochs and then stabilises, and if we increase the beta, it will converge to a higher value. This behaviour is desirable, for we can get a better network to sample outputs which can be similar to the training data.\n",
"\n",
"While KL loss first increases with more epochs and then stabilises, and if we use a higher beta, it will converge to a lower value. Because we performed gradient descent the sign of KL loss in the loss function is flipped, so an increase is desirable.\n",
"\n",
"To combat posterior collapse, which was observed during initial training, first we increased the epochs, and also decrease the dimension of z, while if it is not enough, we can get little factors.\n",
"\n",
"As for beta, the higher beta, the generated image can be more desirable, however, the T-SNE result can be quite terrible. So we need to find a balance between them.\n"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "JTprojS7sLP8"
},
"source": [
"---\n",
"<h2> Part 1.3 (11 points) <h2/>\n",
"\n",
"Qualitative analysis of the learned representations\n",
"\n",
"In this question you are asked to qualitatively assess the representations that your model has learned. In particular:\n",
"\n",
"a. Dimensionality Reduction of learned embeddings\n",
"\n",
"b. Interpolating in the latent space\n",
"\n",
"## Part 1.3a: T-SNE on Embeddings (7 Points)\n",
"Extract the latent representations of the test set and visualize them using [T-SNE](https://en.wikipedia.org/wiki/T-distributed_stochastic_neighbor_embedding) [(see implementation)](https://scikit-learn.org/stable/modules/generated/sklearn.manifold.TSNE.html). \n",
"\n",
"We've provided a function to visualize a subset of the data, but you are encouraged to also produce a matplotlib plot (please use different colours for each digit class)."
]
},
{
"cell_type": "code",
"metadata": {
"id": "Xl4xZOg7s0ke"
},
"source": [
"# *CODE FOR PART 1.3a IN THIS CELL\n",
"\n",
"from sklearn.manifold import TSNE\n",
"import numpy as np\n",
"\n",
"z_batches = []\n",
"label_batches = []\n",
"for i, (data, label) in enumerate(loader_test):\n",
" data = data.to(device)\n",
" mu, logvar = model.encode(data)\n",
" z = model.reparametrize(mu, logvar).detach().cpu().numpy()\n",
" z_batches.append(z)\n",
" label_batches.append(label.detach().cpu().numpy())\n",
"\n",
"zs = []\n",
"labels = []\n",
"for z, label in zip(z_batches, label_batches):\n",
" for i in z:\n",
" zs.append(i)\n",
" for l in label:\n",
" labels.append(l)\n",
"\n",
"z_embedded = TSNE(n_components=2).fit_transform(np.array(zs))"
],
"execution_count": 26,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "-M_EI2ZnnXHZ",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 442
},
"outputId": "5350416e-de11-4a8d-de84-34c7626ba11e"
},
"source": [
"# Interactive Visualization - Code Provided\n",
"test_dataloader = DataLoader(test_dat, 10000, shuffle=False)\n",
"\"\"\" Inputs to the function are\n",
" z_embedded - X, Y positions for every point in test_dataloader\n",
" test_dataloader - dataloader with batchsize set to 10000\n",
" num_points - number of points plotted (will slow down with >1k)\n",
"\"\"\"\n",
"plot_tsne(z_embedded, test_dataloader, num_points=1000, darkmode=False)\n"
],
"execution_count": null,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/plain": [
"alt.HConcatChart(...)"
],
"text/html": [
"\n",
"<div id=\"altair-viz-430422da9a754312a71b5fdebf5806fc\"></div>\n",
"<script type=\"text/javascript\">\n",
" (function(spec, embedOpt){\n",
" let outputDiv = document.currentScript.previousElementSibling;\n",
" if (outputDiv.id !== \"altair-viz-430422da9a754312a71b5fdebf5806fc\") {\n",
" outputDiv = document.getElementById(\"altair-viz-430422da9a754312a71b5fdebf5806fc\");\n",
" }\n",
" const paths = {\n",
" \"vega\": \"https://cdn.jsdelivr.net/npm//vega@5?noext\",\n",
" \"vega-lib\": \"https://cdn.jsdelivr.net/npm//vega-lib?noext\",\n",
" \"vega-lite\": \"https://cdn.jsdelivr.net/npm//vega-lite@4.8.1?noext\",\n",
" \"vega-embed\": \"https://cdn.jsdelivr.net/npm//vega-embed@6?noext\",\n",
" };\n",
"\n",
" function loadScript(lib) {\n",
" return new Promise(function(resolve, reject) {\n",
" var s = document.createElement('script');\n",
" s.src = paths[lib];\n",
" s.async = true;\n",
" s.onload = () => resolve(paths[lib]);\n",
" s.onerror = () => reject(`Error loading script: ${paths[lib]}`);\n",
" document.getElementsByTagName(\"head\")[0].appendChild(s);\n",
" });\n",
" }\n",
"\n",
" function showError(err) {\n",
" outputDiv.innerHTML = `<div class=\"error\" style=\"color:red;\">${err}</div>`;\n",
" throw err;\n",
" }\n",
"\n",
" function displayChart(vegaEmbed) {\n",
" vegaEmbed(outputDiv, spec, embedOpt)\n",
" .catch(err => showError(`Javascript Error: ${err.message}<br>This usually means there's a typo in your chart specification. See the javascript console for the full traceback.`));\n",
" }\n",
"\n",
" if(typeof define === \"function\" && define.amd) {\n",
" requirejs.config({paths});\n",
" require([\"vega-embed\"], displayChart, err => showError(`Error loading script: ${err.message}`));\n",
" } else if (typeof vegaEmbed === \"function\") {\n",
" displayChart(vegaEmbed);\n",
" } else {\n",
" loadScript(\"vega\")\n",
" .then(() => loadScript(\"vega-lite\"))\n",
" .then(() => loadScript(\"vega-embed\"))\n",
" .catch(showError)\n",
" .then(() => displayChart(vegaEmbed));\n",
" }\n",
" })({\"config\": {\"view\": {\"continuousWidth\": 400, \"continuousHeight\": 300}}, \"hconcat\": [{\"mark\": \"circle\", \"encoding\": {\"color\": {\"type\": \"nominal\", \"condition\": {\"value\": \"lightgray\", \"selection\": \"selector001\"}, \"field\": \"label\"}, \"size\": {\"value\": 100}, \"tooltip\": {\"type\": \"nominal\", \"field\": \"label\"}, \"x\": {\"type\": \"nominal\", \"axis\": null, \"field\": \"x\"}, \"y\": {\"type\": \"nominal\", \"axis\": null, \"field\": \"y\"}}, \"height\": 400, \"selection\": {\"selector001\": {\"type\": \"single\", \"on\": \"mouseover\", \"clear\": \"false\", \"nearest\": true, \"init\": {\"x\": -2.4267890453338623, \"y\": 31.662296295166016}}}, \"width\": 400}, {\"mark\": {\"type\": \"rect\", \"stroke\": \"black\", \"strokeWidth\": 0}, \"encoding\": {\"color\": {\"type\": \"quantitative\", \"field\": \"image\", \"legend\": null, \"scale\": {\"scheme\": {\"name\": \"lightgreyteal\", \"extent\": [1, 0]}}, \"sort\": \"descending\"}, \"x\": {\"type\": \"ordinal\", \"axis\": null, \"field\": \"column\"}, \"y\": {\"type\": \"ordinal\", \"axis\": null, \"field\": \"row\"}}, \"height\": 400, \"transform\": [{\"filter\": {\"selection\": \"selector001\"}}, {\"window\": [{\"op\": \"count\", \"as\": \"index\"}]}, {\"flatten\": [\"image\"]}, {\"window\": [{\"op\": \"count\", \"as\": \"row\"}], \"groupby\": [\"index\"]}, {\"flatten\": [\"image\"]}, {\"window\": [{\"op\": \"count\", \"as\": \"column\"}], \"groupby\": [\"index\", \"row\"]}], \"width\": 400}], \"data\": {\"name\": \"data-c6616d77d2a525dc6ef1944f4d854107\"}, \"$schema\": \"https://vega.github.io/schema/vega-lite/v4.8.1.json\", \"datasets\": {\"data-c6616d77d2a525dc6ef1944f4d854107\": [{\"x\": -2.4267890453338623, \"y\": 31.662296295166016, \"label\": 8, \"image\": [[[0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0]], [[0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0]], [[0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0]], [[0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0]], [[0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0]], [[0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.250980406999588], [0.501960813999176], [0.7490196228027344], [1.0], [1.0], [1.0], [1.0], [1.0], [0.250980406999588], [0.0], [0.501960813999176], [0.250980406999588], [0.0], [0.0], [0.0], [0.0], [0.0]], [[0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.250980406999588], [1.0], [1.0], [1.0], [1.0], [1.0], [1.0], [1.0], [1.0], [1.0], [0.7490196228027344], [1.0], [1.0], [0.0], [0.0], [0.0], [0.0], [0.0]], [[0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.7490196228027344], [1.0], [1.0], [1.0], [1.0], [0.7490196228027344], [1.0], [1.0], [1.0], [1.0], [1.0], [1.0], [1.0], [0.0], [0.0], [0.0], [0.0], [0.0]], [[0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.501960813999176], [1.0], [1.0], [0.7490196228027344], [0.0], [0.0], [0.0], [0.0], [1.0], [1.0], [1.0], [1.0], [1.0], [0.7490196228027344], [0.0], [0.0], [0.0], [0.0], [0.0]], [[0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.501960813999176], [1.0], [1.0], [0.501960813999176], [0.0], [0.0], [0.0], [0.0], [1.0], [1.0], [1.0], [1.0], [1.0], [0.250980406999588], [0.0], [0.0], [0.0], [0.0], [0.0]], [[0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.501960813999176
"</script>"
]
},
"metadata": {
"tags": []
},
"execution_count": 12
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "xvQvtlDzIB3M",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 606
},
"outputId": "39afce13-8a16-45df-ab7a-ee08b5a0de78"
},
"source": [
"# Custom Visualizations\n",
"import seaborn as sns\n",
"import pandas as pd\n",
"\n",
"data_dict = {'x': z_embedded[:, 0], 'y': z_embedded[:, 1], 'labels': labels}\n",
"df = pd.DataFrame(data_dict)\n",
"\n",
"plt.figure(figsize = (10,10))\n",
"sns.scatterplot(x='x', y='y', data=df, hue='labels', legend='full', palette='Paired')\n",
"plt.show()\n",
"\n",
"zero_x=np.array(df.x[df[\"labels\"]==0])\n",
"zero_y=np.array(df.y[df[\"labels\"]==0])\n",
"one_x=np.array(df.x[df[\"labels\"]==1])\n",
"one_y=np.array(df.y[df[\"labels\"]==1])\n",
"two_x=np.array(df.x[df[\"labels\"]==2])\n",
"two_y=np.array(df.y[df[\"labels\"]==2])\n",
"three_x=np.array(df.x[df[\"labels\"]==3])\n",
"three_y=np.array(df.y[df[\"labels\"]==3])\n",
"four_x=np.array(df.x[df[\"labels\"]==4])\n",
"four_y=np.array(df.y[df[\"labels\"]==4])\n",
"five_x=np.array(df.x[df[\"labels\"]==5])\n",
"five_y=np.array(df.y[df[\"labels\"]==5])\n",
"six_x=np.array(df.x[df[\"labels\"]==6])\n",
"six_y=np.array(df.y[df[\"labels\"]==6])\n",
"siven_x=np.array(df.x[df[\"labels\"]==7])\n",
"siven_y=np.array(df.y[df[\"labels\"]==7])\n",
"eight_x=np.array(df.x[df[\"labels\"]==8])\n",
"eight_y=np.array(df.y[df[\"labels\"]==8])\n",
"night_x=np.array(df.x[df[\"labels\"]==9])\n",
"night_y=np.array(df.y[df[\"labels\"]==9])\n",
"\n",
"data=[zero_x,zero_y,one_x,one_y,two_x,two_y,three_x,three_y,four_x,four_y,five_x,five_y,six_x,six_y,siven_x,siven_y,eight_x,eight_y,night_x,night_y]\n",
"plt.figure(figsize = (10,10))\n",
"plt.boxplot(data)\n",
"plt.show()"
],
"execution_count": 46,
"outputs": [
{
"output_type": "display_data",
"data": {
"text/plain": "<Figure size 720x720 with 1 Axes>",
"image/svg+xml": "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"no\"?>\r\n<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.1//EN\"\r\n \"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\">\r\n<!-- Created with matplotlib (https://matplotlib.org/) -->\r\n<svg height=\"588.35625pt\" version=\"1.1\" viewBox=\"0 0 614.182812 588.35625\" width=\"614.182812pt\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\">\r\n <metadata>\r\n <rdf:RDF xmlns:cc=\"http://creativecommons.org/ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\">\r\n <cc:Work>\r\n <dc:type rdf:resource=\"http://purl.org/dc/dcmitype/StillImage\"/>\r\n <dc:date>2021-02-23T11:48:22.383275</dc:date>\r\n <dc:format>image/svg+xml</dc:format>\r\n <dc:creator>\r\n <cc:Agent>\r\n <dc:title>Matplotlib v3.3.4, https://matplotlib.org/</dc:title>\r\n </cc:Agent>\r\n </dc:creator>\r\n </cc:Work>\r\n </rdf:RDF>\r\n </metadata>\r\n <defs>\r\n <style type=\"text/css\">*{stroke-linecap:butt;stroke-linejoin:round;}</style>\r\n </defs>\r\n <g id=\"figure_1\">\r\n <g id=\"patch_1\">\r\n <path d=\"M 0 588.35625 \r\nL 614.182812 588.35625 \r\nL 614.182812 0 \r\nL 0 0 \r\nz\r\n\" style=\"fill:none;\"/>\r\n </g>\r\n <g id=\"axes_1\">\r\n <g id=\"patch_2\">\r\n <path d=\"M 48.982812 550.8 \r\nL 606.982813 550.8 \r\nL 606.982813 7.2 \r\nL 48.982812 7.2 \r\nz\r\n\" style=\"fill:#ffffff;\"/>\r\n </g>\r\n <g id=\"PathCollection_1\">\r\n <defs>\r\n <path d=\"M 0 3 \r\nC 0.795609 3 1.55874 2.683901 2.12132 2.12132 \r\nC 2.683901 1.55874 3 0.795609 3 -0 \r\nC 3 -0.795609 2.683901 -1.55874 2.12132 -2.12132 \r\nC 1.55874 -2.683901 0.795609 -3 0 -3 \r\nC -0.795609 -3 -1.55874 -2.683901 -2.12132 -2.12132 \r\nC -2.683901 -1.55874 -3 -0.795609 -3 0 \r\nC -3 0.795609 -2.683901 1.55874 -2.12132 2.12132 \r\nC -1.55874 2.683901 -0.795609 3 0 3 \r\nz\r\n\" id=\"C0_0_c711b00b69\"/>\r\n </defs>\r\n <g clip-path=\"url(#p8762374da0)\">\r\n <use style=\"fill:#ff7f00;stroke:#ffffff;stroke-width:0.48;\" x=\"546.73598\" xlink:href=\"#C0_0_c711b00b69\" y=\"232.304625\"/>\r\n </g>\r\n <g clip-path=\"url(#p8762374da0)\">\r\n <use style=\"fill:#b2df8a;stroke:#ffffff;stroke-width:0.48;\" x=\"321.344729\" xlink:href=\"#C0_0_c711b00b69\" y=\"501.473774\"/>\r\n </g>\r\n <g clip-path=\"url(#p8762374da0)\">\r\n <use style=\"fill:#1f78b4;stroke:#ffffff;stroke-width:0.48;\" x=\"483.016033\" xlink:href=\"#C0_0_c711b00b69\" y=\"372.900631\"/>\r\n </g>\r\n <g clip-path=\"url(#p8762374da0)\">\r\n <use style=\"fill:#a6cee3;stroke:#ffffff;stroke-width:0.48;\" x=\"211.187776\" xlink:href=\"#C0_0_c711b00b69\" y=\"257.519053\"/>\r\n </g>\r\n <g clip-path=\"url(#p8762374da0)\">\r\n <use style=\"fill:#fb9a99;stroke:#ffffff;stroke-width:0.48;\" x=\"411.226213\" xlink:href=\"#C0_0_c711b00b69\" y=\"72.220097\"/>\r\n </g>\r\n <g clip-path=\"url(#p8762374da0)\">\r\n <use style=\"fill:#1f78b4;stroke:#ffffff;stroke-width:0.48;\" x=\"502.814711\" xlink:href=\"#C0_0_c711b00b69\" y=\"320.067608\"/>\r\n </g>\r\n <g clip-path=\"url(#p8762374da0)\">\r\n <use style=\"fill:#fb9a99;stroke:#ffffff;stroke-width:0.48;\" x=\"450.776297\" xlink:href=\"#C0_0_c711b00b69\" y=\"81.216729\"/>\r\n </g>\r\n <g clip-path=\"url(#p8762374da0)\">\r\n <use style=\"fill:#6a3d9a;stroke:#ffffff;stroke-width:0.48;\" x=\"440.213454\" xlink:href=\"#C0_0_c711b00b69\" y=\"197.577859\"/>\r\n </g>\r\n <g clip-path=\"url(#p8762374da0)\">\r\n <use style=\"fill:#e31a1c;stroke:#ffffff;stroke-width:0.48;\" x=\"331.805821\" xlink:href=\"#C0_0_c711b00b69\" y=\"453.669598\"/>\r\n </g>\r\n <g clip-path=\"url(#p8762374da0)\">\r\n <use style=\"fill:#6a3d9a;stroke:#ffffff;stroke-width:0.48;\" x=\"423.362679\" xlink:href=\"#C0_0_c711b00b69\" y=\"233.709786\"/>\r\n </g>\r\n <g clip-path=\"url(#p8762374da0)\">\r\n <use style=\"fill:#a6cee3;stroke:#ffffff;stroke-width:0.48;\" x=\"219.349933\" xlink:href=\"#C0_0_c711b00b69\" y=\"
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAmYAAAJNCAYAAACbR7l/AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9d5Rcd3qeiz6/XTl0V3XOGamRcyAYwMxhHE5QmiBpxpLvaCRLlqUlH8k+x7pn2dKxJdmSJftaOrLSjEaBk4dhmMkZBhA55+5G5xyru/L+7h9fdajuBgkQIIHh7GctLhK7q3bt2gWiXnzhfY2I4ODg4ODg4ODgcPOxbvYFODg4ODg4ODg4KI4wc3BwcHBwcHC4RXCEmYODg4ODg4PDLYIjzBwcHBwcHBwcbhEcYebg4ODg4ODgcIvgCDMHBwcHBwcHh1sE982+gBtBaWmpNDY23uzLcHBwcHBwcHB4Tw4dOjQsImXL/ewjIcwaGxs5ePDgzb4MBwcHBwcHB4f3xBhz+Uo/c1qZDg4ODg4ODg63CDdVmBlj/rUx5pQx5qQx5mvGGL8xpskYs98Yc9EY84/GGO/NvEYHBwcHBwcHhw+LmybMjDE1wL8CtovIesAF/BTw/wD/VURWAGPAF2/WNTo4ODg4ODg4fJjc7BkzNxAwxqSBINAH3AP8TO7nfwP8B+B/3pSrc3BwcHBwcLglSafTdHd3k0gkbvalXBG/309tbS0ej+eqn3PThJmI9Bhj/gDoBOLA88AhYFxEMrmHdQM1N+kSHRwcHBwcHG5Ruru7KSgooLGxEWPMzb6cJYgIIyMjdHd309TUdNXPu5mtzCLgCaAJqAZCwEPX8PxfNMYcNMYcHBoa+oCu0sHBwcHBweFWJJFIUFJSckuKMgBjDCUlJddc0buZw//3Ae0iMiQiaeAbwF4gaoyZreTVAj3LPVlE/lxEtovI9rKyZa1AHBwcHBwcHD7C3KqibJb3c303U5h1AruNMUGjV34vcBp4BfhU7jE/C3z7Jl2fg4ODg4ODw48g4XD4XX/e0dHB+vXrr+mcP/dzP8dTTz11PZd1Vdw0YSYi+4GngMPAidy1/DnwW8CvG2MuAiXAX96sa3RwcHBwcHBw+DC5qT5mIvJ/icgaEVkvIp8TkaSItInIThFZISKfFpHkzbxGBwcHBwcHhx9NYrEY9957L1u3bmXDhg18+9vzTbhMJsNnPvMZWltb+dSnPsXMzAwAhw4d4q677mLbtm08+OCD9PX1LTnvv/23/5a1a9eyceNGfuM3fuOGXrPj/O/g4ODg4ODwkcTv9/PNb36Tw4cP88orr/Bv/s2/QUQAOHfuHL/0S7/EmTNnKCws5H/8j/9BOp3mV37lV3jqqac4dOgQX/jCF/id3/mdvHOOjIzwzW9+k1OnTnH8+HH+3b/7dzf0mm+2j5mDg4ODg4ODwweCiPDbv/3bvP7661iWRU9PDwMDAwDU1dWxd+9eAD772c/yJ3/yJzz00EOcPHmS+++/H4BsNktVVVXeOSORCH6/ny9+8Ys8+uijPProozf0mh1h5uDg4ODg4PCR5Ktf/SpDQ0McOnQIj8dDY2PjnH3F4o1JYwwiwrp163jrrbeueE63280777zDSy+9xFNPPcWf/umf8vLLL9+wa3ZamQ4ODg4ODg4fSSYmJigvL8fj8fDKK69w+fLluZ91dnbOCbC///u/5/bbb2f16tUMDQ3NHU+n05w6dSrvnLFYjImJCR5++GH+63/9rxw7duyGXrNTMXNwcHBwcHD4SPKZz3yGxx57jA0bNrB9+3bWrFkz97PVq1fzZ3/2Z3zhC19g7dq1fOlLX8Lr9fLUU0/xr/7Vv2JiYoJMJsOv/dqvsW7durnnTU1N8cQTT5BIJBAR/uiP/uiGXrOZHYL7UWb79u1y8ODBm30ZDg4ODg4ODh8SZ86cobW19WZfxnuy3HUaYw6JyPblHu+0Mh0cHBwcHBwcbhEcYebg4ODg4ODgcIvgCDMHBwcHBwcHh1sER5g5ODhcFWJnkYwTxOHg4ODwQeJsZTo4OLwn9uRlGDsPsV6ksBGKVmGFq97zeQ4ODg4O14YjzBwcHN4Ve3oQLn4DJnP+P0NHoHwr9opPYnlDN/fiHBwcHD5iOK1MBweHd8XM9M+LslkGD8NM/825IAcHB4dbhOeee47Vq1ezYsUKfv/3f/+GnNMRZg4ODu/KFb0ObyEPRNuZfXNwcPiQyWazfPnLX+bZZ5/l9OnTfO1rX+P06dPXfV6nleng8GOM2FmYaEMmL4MBU9AAhY0Yl2f+QeEqCJRCfHj+WLQFQjd/xsye7ISBQzDZhh1dCeVbsQpqb/ZlfSTJpDMMdYySTWcoqY0SKAze7EtycLipvPPOO6xYsYLm5mYAfuqnfopvf/vbrF279rrO6wgzB4cfZ8YvIif/EuwUAOLyYdZ9AYpXzz3ECpYjrZ9H+vfDZAcUt6oAusnzZfbMEJz7Gkz36YGpLhi/hL3uC1j+yE29tg8LSY4DBuP7YN/v1PAkr/7zKZ7/p1PYWaF1ezWf/uWdVDSXfaCv6+BwI+kYneZY7yQz6SxBj4tN1YU0Fr//P8d6enqoq6ub+3VtbS379++/7ut0hJmDw48xMnBgTpQBkE0iQ8egaBXGmLnDprAeCurATmNc3ht+HVk7QzI7jct48LmvshIz3T8vymaZugwzffARF2aSmkL6D0DnC2AspOEhTMU2jOeDqWJ1nBzkua+dnPv1mYO9vPG9czzxS8W43K4P5DUdHG4kHaPTvNM5TjY3gjGTzvJO5zjAdYmzDwJnxszB4ccUEYHkxNIfpCaXfbwx5gMRZbHUGMdGvs/LPX/BD/r/joGZS9hiv+fzjLX8H1/GfPT/WJPRs9D2HcjEIT0NF78O4xc+sNdrPz245NjRN7qJj898YK/p4HAjOdY7OSfKZsmKcKx3+T/vroaamhq6urrmft3d3U1NTc37Pt8sH/0/wRwcHJbFGIOpWJqha8q35FXLZpH0NDLRhoxdQBYJutTJY0z9//4HE3/0ByTfeQvJZK7qGrJ2hnMTb9AzfRpBiGcmeWfwm0ymBt7zuRKsgkhL/sHitcgtMPv2rqTi2P1nsSf63vuxyyBiQ9/SdokMHr7eK7silY3RJccaVxfjC/s+sNe0hwbJnjuHffEi9tjYB/Y6Dj8ezKSz13T8atixYwcXLlygvb2dVCrFP/zDP/D444+/7/PN4rQyHRx+jJGStbDiSeh6GbCg/l6kaDWLZZkkxpDz/wijZ/VAoBzWfwETqiR1/DBDn/4MEosBMPVf/zulX/0b/Hfue8/XT2Rj9E6fXfxqxNJjRH3vLrCsQDH2iifV+HbyMkSa1fjWW3BV7/1mYHcewbz277HankEijdj3/RHW2sfhCtW/5TDGQkIVMHFxwUELohuxhwbB7cYURjCuG9dibNlQycpNFVw4poI5HPVz/09uwOO/8RVUALu3B/v55yGdRgCKimDf3VilpR/I6zl89Al6XMuKsKDn/f9/4na7+dM//VMefPBBstksX/jCF1i3bt31XKae97rP4ODg8COL5S2A2ruQ8q2AwXjDyz5Oxi/MizKA+CDS+yas+DiJV16dE2UA2DZT//N/4d21G8vnf9fXdxkPfleYRHYq77jburpKjFVQCz8iW5iSmMS8/JuYjpcAMOPtmG98Gtv3KtbKvdd0LlO1Gxk4CNkkGBc0/yL2/pMwcgiMwWzZgrVuPcb/7vf/aimpK+YL/34f/e2jpJIZKhuKKG0ouerny8wMEpsCjxcTiVyxDQ1gZzLI8eOQTs8fHBtD+vvAEWYO75NN1YV5M2YALmPYVF14Xed9+OGHefjhh6/38vJwhJmDw48Bkk1BfAiyKfCXYnz5VSXzXlWmyc6lx8bOQzaFPT6+5Ef22DiSTsF7CDO/O8SG4ns5MPStuWPFvjqi3vJ3v54fQWTgIlZOlM1hZ5ChM3CtwqygDrb+a4j1YJtKGJ7AamhEGhpAQE6cQCoqMbU3TrQWlhdSWH7tX2IyPEz2xRdgagosC7NzJ9aaVozHs/wTkklkudbl1NTSYw4OV
},
"metadata": {
"needs_background": "light"
}
},
{
"output_type": "display_data",
"data": {
"text/plain": "<Figure size 720x720 with 1 Axes>",
"image/svg+xml": "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"no\"?>\r\n<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.1//EN\"\r\n \"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\">\r\n<!-- Created with matplotlib (https://matplotlib.org/) -->\r\n<svg height=\"574.678125pt\" version=\"1.1\" viewBox=\"0 0 600.504688 574.678125\" width=\"600.504688pt\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\">\r\n <metadata>\r\n <rdf:RDF xmlns:cc=\"http://creativecommons.org/ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\">\r\n <cc:Work>\r\n <dc:type rdf:resource=\"http://purl.org/dc/dcmitype/StillImage\"/>\r\n <dc:date>2021-02-23T11:48:23.873292</dc:date>\r\n <dc:format>image/svg+xml</dc:format>\r\n <dc:creator>\r\n <cc:Agent>\r\n <dc:title>Matplotlib v3.3.4, https://matplotlib.org/</dc:title>\r\n </cc:Agent>\r\n </dc:creator>\r\n </cc:Work>\r\n </rdf:RDF>\r\n </metadata>\r\n <defs>\r\n <style type=\"text/css\">*{stroke-linecap:butt;stroke-linejoin:round;}</style>\r\n </defs>\r\n <g id=\"figure_1\">\r\n <g id=\"patch_1\">\r\n <path d=\"M 0 574.678125 \r\nL 600.504688 574.678125 \r\nL 600.504688 0 \r\nL 0 0 \r\nz\r\n\" style=\"fill:none;\"/>\r\n </g>\r\n <g id=\"axes_1\">\r\n <g id=\"patch_2\">\r\n <path d=\"M 35.304688 550.8 \r\nL 593.304688 550.8 \r\nL 593.304688 7.2 \r\nL 35.304688 7.2 \r\nz\r\n\" style=\"fill:#ffffff;\"/>\r\n </g>\r\n <g id=\"matplotlib.axis_1\">\r\n <g id=\"xtick_1\">\r\n <g id=\"line2d_1\">\r\n <defs>\r\n <path d=\"M 0 0 \r\nL 0 3.5 \r\n\" id=\"m424cefe482\" style=\"stroke:#000000;stroke-width:0.8;\"/>\r\n </defs>\r\n <g>\r\n <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"49.254688\" xlink:href=\"#m424cefe482\" y=\"550.8\"/>\r\n </g>\r\n </g>\r\n <g id=\"text_1\">\r\n <!-- 1 -->\r\n <g transform=\"translate(46.073438 565.398438)scale(0.1 -0.1)\">\r\n <defs>\r\n <path d=\"M 12.40625 8.296875 \r\nL 28.515625 8.296875 \r\nL 28.515625 63.921875 \r\nL 10.984375 60.40625 \r\nL 10.984375 69.390625 \r\nL 28.421875 72.90625 \r\nL 38.28125 72.90625 \r\nL 38.28125 8.296875 \r\nL 54.390625 8.296875 \r\nL 54.390625 0 \r\nL 12.40625 0 \r\nz\r\n\" id=\"DejaVuSans-49\"/>\r\n </defs>\r\n <use xlink:href=\"#DejaVuSans-49\"/>\r\n </g>\r\n </g>\r\n </g>\r\n <g id=\"xtick_2\">\r\n <g id=\"line2d_2\">\r\n <g>\r\n <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"77.154687\" xlink:href=\"#m424cefe482\" y=\"550.8\"/>\r\n </g>\r\n </g>\r\n <g id=\"text_2\">\r\n <!-- 2 -->\r\n <g transform=\"translate(73.973437 565.398438)scale(0.1 -0.1)\">\r\n <defs>\r\n <path d=\"M 19.1875 8.296875 \r\nL 53.609375 8.296875 \r\nL 53.609375 0 \r\nL 7.328125 0 \r\nL 7.328125 8.296875 \r\nQ 12.9375 14.109375 22.625 23.890625 \r\nQ 32.328125 33.6875 34.8125 36.53125 \r\nQ 39.546875 41.84375 41.421875 45.53125 \r\nQ 43.3125 49.21875 43.3125 52.78125 \r\nQ 43.3125 58.59375 39.234375 62.25 \r\nQ 35.15625 65.921875 28.609375 65.921875 \r\nQ 23.96875 65.921875 18.8125 64.3125 \r\nQ 13.671875 62.703125 7.8125 59.421875 \r\nL 7.8125 69.390625 \r\nQ 13.765625 71.78125 18.9375 73 \r\nQ 24.125 74.21875 28.421875 74.21875 \r\nQ 39.75 74.21875 46.484375 68.546875 \r\nQ 53.21875 62.890625 53.21875 53.421875 \r\nQ 53.21875 48.921875 51.53125 44.890625 \r\nQ 49.859375 40.875 45.40625 35.40625 \r\nQ 44.1875 33.984375 37.640625 27.21875 \r\nQ 31.109375 20.453125 19.1875 8.296875 \r\nz\r\n\" id=\"DejaVuSans-50\"/>\r\n </defs>\r\n <use xlink:href=\"#DejaVuSans-50\"/>\r\n </g>\r\n </g>\r\n </g>\r\n <g id=\"xtick_3\">\r\n <g id=\"line2d_3\">\r\n <g>\r\n <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"105.054688\" xlink:href=\"#m424cefe482\" y=\"550.8\"/>\r\n </g>\r\n </g>\r\n <g id=\"text_3\">\r\n <!-- 3 -->\r\n <g transform=\"translate(101.873437 565.398438)scale(0.1 -0.1)\">\r\n <defs>\
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAlgAAAI/CAYAAACrl6c+AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAACdc0lEQVR4nO39fVhc130vfH/Xnhn2oEESMwYD1shCx7FSBHGjWs9J43DSjBxJVZw6nKY5zki9YheMAr5nqlO7BUvT69g65xo5UKPzqKPa2A6cWK1nbrVNqzhxdFuuIe6N3Z4+cpumWNRxTmxZSLYsW2CjEcPAsJ4/YMZsDAiGvdZ+4fe5rrkQG83+rdmvv1l7vTDOOQghhBBCiH4UowtACCGEEGI3lGARQgghhOiMEixCCCGEEJ1RgkUIIYQQojNKsAghhBBCdEYJFiGEEEKIzpxGF2CmkpISXllZaXQxCCGEEEKu6dVXX32fc146199MlWBVVlbi9OnTRheDEEIIIeSaGGNn5/sbPSIkhBBCCNEZJViEEEIIITqjBIsQQgghRGeUYBFCCCGE6IwSLEIIIYQQnVGCRQghhBCiM0qwCCGEEEJ0RgkWIYQQQojOKMEihBBCCNEZJViEEEIIITqjBIsQQgghRGeUYBFCCCGE6IwSLEIIIYQQnVGCRQghhBCiM0qwCCGEEEJ0RgkWIYQQQojOKMEihBBCCNEZJViEEEIIITqjBIsQQgghRGeUYBFCCCGE6IwSLEIIIYQQnVGCRQghhBCiM0qwCCGEEEJ05jS6AIQQQogeGGML/p1zLqkkhFCCRQghxCZmJ1CMMUqqiGHoESEhhBBCiM4owSKEEEII0RklWIQQQgghOqMEixBCCCFEZ5RgEUIIIYTojBIsQgghhBCdUYJFCCGEEKIzSrAIESSRSKCmpgYOhwM1NTVIJBJGF4kQQogkNNAoIQIkEglEIhF0dXWhtrYWfX19aGhoAAAEg0GDS0cIIUQ0qsEihrFzDU80GkVXVxcCgQBcLhcCgQC6uroQjUaNLhohhBAJKMEihkgkEti3bx+SySQAIJlMYt++fbZJsgYGBlBbW6tZVltbi4GBAYNKRAghRCZKsIghWlpa4HQ60d3djVQqhe7ubjidTrS0tBhdNF1UVVWhr69Ps6yvrw9VVVUGlYgQQohMlGARQwwODuLpp5/WPEJ7+umnMTg4aHTRdBGJRNDQ0IDe3l6Mj4+jt7cXDQ0NiEQiRheNEEKIBJRgESJAMBhENBpFOByG2+1GOBxGNBqlBu6EELIAO7XNpV6ExBB+vx/f+ta3EI/Hc73svvWtb8Hv9xtdNN0Eg0FKqAghZJHs1vuaarCIIdrb25HJZFBfXw9VVVFfX49MJoP29naji0YIIcQAdut9TQkWMUQwGMSRI0fg8XjAGIPH48GRI0cs+S2FEELI8tmt9/WyHxEyxj4N4PiMRf8BwH8DUAygEcCl6eUHOOc/Xm48Yh/0CI0QQkhWtvd1IBDILbNy7+tl12Bxzl/nnH+Wc/5ZALcCuArgb6f//D+zf6PkihBCCCHzsVvva70bud8O4P9wzs8yxnReNSGEEELsKvtEIxwOY2BgAFVVVZbufa13gvVNADP7VIYYY98CcBrAA5zzIZ3jEUIIIcQm7NR0RLdG7oyxAgB3Avir6UWPA7gJwGcBvAOgY5737WWMnWaMnb506dJc/4UQQgghxFL07EW4C8A/c84vAgDn/CLnPMM5nwTwFID/ONebOOdPcs63cs63lpaW6lgcQgghRmKMLfgixM70TLCCmPF4kDFWMeNv/xlAv46xCCGEmBznXPOavYyQ2WSP5C4yni4JFmPMA2A7gL+ZsbidMfZvjLGfAQgA+AM9YhH7sNOUCIQQQpYnkUhg3759SCaT4JwjmUxi3759wu4N2ZHjY7EYUqkUYrEYIpGIfvFmf8Mw8nXrrbdysjLE43G+ceNG3tPTw9PpNO/p6eEbN27k8Xjc6KIRQgSZuuXYNx5ZHr/fz8vLyzX3hfLycu73+4XEq66u5j09PZplPT09vLq6etHrAHCaz5PTMG6iatqtW7fy06dPG10MIkFNTQ1isZhmQLne3l6Ew2H099PTZPKxa7XVMdM1jCyMMSZ1f8mOR5aHMYZTp05h+/btuWUvvPACduzYIWQ/OhwOpFIpuFyu3LLx8XG43W5kMpnFlvlVzvnWuf5GU+UQQ9htSgQizsxvhLN/t8vNMxwOw+12gzEGt9uNcDhsdJHIElBzB2vKjhw/k54jx1OCRQwh+sCeiXoyETMLh8Po7OzEoUOHkEwmcejQIXR2dlKSZRHC2/GsIH6/H3fffbdmJPe7774bfr9fSDzhI8fP9+zQiBe1wVo5jGyDBWqXYVl23HeqqvKOjg7Nso6ODq6qqkElEkf2/pMRT492PGRKPB7npaWlvLKykiuKwisrK3lpaanQ+0I8HufV1dVcURReXV295FhYoA2W4UnVzBclWCvLcg/sfNnxJr1S2HHfAeDJZFKzLJlM2vaz2i2eoij82LFjmmvZsWPHuKIowmPbkVH3hXwtlGBRI3ey4lDDV+uy475zu904dOgQ7r///tyyw4cP48CBA0ilUgaWTH92bOS+fv16jIyMwOv14u2338aNN96IoaEhrF69GufOnRMamxiPGrkTQohJNTY2orW1FYcPH8bVq1dx+PBhtLa2orGx0eiikUW4evUqrly5gnA4jJGREYTDYVy5cgVXr141umjEYJRgEcNQzxtCgFgshqamJhw4cAAejwcHDhxAU1MTYrGY0UUji3D58mXccccdmv13xx134PLly0YXjRiMEixiCOp5Q8jHsucB5zx3PhDr+Kd/+iecPHkS6XQaJ0+exD/90z8JjWen6WSMjCXcfI2zjHiZvZG71RrfmZmRPW9gw8bDKwXtO2uTvf9kxHM6ndzr9Wp6RHu9Xu50OoXEk90DW2Y8K87wAepFuHxGdB+1M0VReDqd1ixLp9NSet7QTdq6aN9Zmx0TLMZY7t7AGMvdGxhjQuLJ/nJaXV3NI5GIpnIh+7uIWFYb8mKhBIseES5SS0sLnE4nuru7kUql0N3dDafTiZaWFiHx7D44psyBRgkhRJTNmzdj79698Hg8YIzB4/Fg79692Lx5s5B4smfBOHPmDOLxuKY5Rzwex5kzZ3SPZbcZPijBWqTBwUE8/fTTCAQCcLlcCAQCePrppzE4OCgk3swsePbv2WVWJnwEXUIIkSASieDJJ59EMpkE5xzJZBJPPvmksGtZVVUVDh48qGmndPDgQWFfTgsKChAKhTT3vlAohIKCAt1j2e2LNyVYS3D06FHNfGFHjx41ukiWFQwGEY1Gc3OwhcNhRKNRBINBo4tGCCF5kfF0IRAIoK2tDfX19RgZGUF9fT3a2toQCASExEun04jFYpovw7FYDOl0WvdYtvviPd+zQyNeZm6D5fF4OADe3NzMh4eHeXNzMwfAPR6P8Nigdie6ou1pXbTvrE32/pMRz85tooyIFwqFuKqqHABXVZWHQiEhcfQCauS+fE6nk3s8Hk1DRo/HI6ynyEyyLkorpZck3aSti/adtdkxwZI9VY7sDkLUi3BhlGDpAADv7u7WnETd3d1STmAZMax4YOeLbtLWRfvO2uyYYPn9fl5RUaG5dlZUVHC/3y8knhE97WR9+aZehCuUqqoYGhpCf38/MpkM+vv7MTQ0BFVVjS6aLqLRKLq6ujQNGbu6uhCNRo0uGiGEzMvn8y3Y23q+v/l8Pt3KMHWfnf93PRnRTikYDGrufaLaytqtF6HhtVYzX2auwQqFQtzpdPKOjg6eTCZ5R0cHdzqdUp4PQ1I1t1HjUskmY3sSMWjfWZuI/ZfvOvUqi+xHhJzbtzmH3WqwDE+qZr7MnGBxblzjOxk3FSse2Pmim7R10b6zNjsmWCvp2imaFZuqUIJlcTJuKlY8sPNFN2nron1nbXZMsFbStVMGO/UidEp8GklMLPtMPRwOY2BgAFVVVTQuFSGEXANdO/WTSCRw/PhxVFRU4OzZs6ioqMDx48dx2223WXJ7sqkEzBy2bt3KT58+bXQxT
},
"metadata": {
"needs_background": "light"
}
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "6HiAHb0ztTW8"
},
"source": [
"### Discussion\n",
"What do you observe? Discuss the structure of the visualized representations. \n",
"* What do you observe? What role do the KL loss term and $\\beta$ have, if any, in what you observe (multiple matplotlib plots may be desirable here)?\n",
" * Consider Outliers\n",
" * Counsider Boundaries\n",
" * Consider Clusters\n",
"* Is T-SNE reliable? What happens if you change the parameters (don't worry about being particularly thorough). [This link](https://distill.pub/2016/misread-tsne/) may be helpful."
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "u0_3QlEYteYk"
},
"source": [
"**YOUR ANSWER**\n",
"\n",
"Actually, when seeing the boxplot, some labels have quite huge outliers in either x or y, but not in both, so it looks quite good in scatterplot. We can observe that points in the 2D embedding are similar to their neighbours, so the clusters and boundaries are obviously.\n",
"\n",
"We also observe relatively smooth boundaries of points, which is the effect of the reconstruction loss and the KL loss. The KL loss make the network to learn more or less latent distributions, and the trade off can be infulenced by $\\beta$.\n",
"\n",
"The label 2 is really spetial for having two clusters, and it may be due to two different styles of writing.\n",
"\n",
"T-SNE is reliable, for it succeeded in reducing dimensions in the latent sapce, whilst it managed to distinguish one digit from an other in the 2-d space. The perplexity is a very important parameter, if it is too small we can get too many clusters, while if it is too big, we will find all the points are in the same cluster."
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "uCtbTLv4thEH"
},
"source": [
"## Part 1.3b: Interpolating in $z$ (4 Points)\n",
"Perform a linear interpolation in the latent space of the autoencoder by choosing any two digits from the test set. What do you observe regarding the transition from on digit to the other?\n"
]
},
{
"cell_type": "code",
"metadata": {
"id": "MVk7GUIxtgiF"
},
"source": [
"# CODE FOR PART 1.3b IN THIS CELL\n",
"\n",
"vae = torch.jit.load('./model/VAE_model.pth')\n",
"model.load_state_dict(vae.state_dict())\n",
"# model.eval()\n",
"test_data, test_labels = next(iter(loader_test))\n",
"\n",
"x1 = torch.FloatTensor(test_data[100]).reshape(1,1,28,28)\n",
"x2 = torch.FloatTensor(test_data[10]).reshape(1,1,28,28)\n",
"\n",
"with torch.no_grad():\n",
" x1 = x1.to(device)\n",
" x2 = x2.to(device)\n",
" mu1, var1 = model.encode(x1)\n",
" mu2, var2 = model.encode(x2)\n",
" z1 = model.reparametrize(mu1, var1)\n",
" z2 = model.reparametrize(mu2, var2)\n",
" a = np.linspace(0,1,num=11)\n",
" inter = []\n",
" for i in range(len(a)):\n",
" Z = a[i] * z1 + (1 - a[i]) * z2 \n",
" x_hat = model.decode(Z)\n",
" inter.append(x_hat)\n",
"\n",
"fig, axs = plt.subplots(1, 11, figsize=(20, 20))\n",
"for i in range(11):\n",
" y = inter[i].view(28,28).squeeze().squeeze().cpu().numpy()\n",
" axs[i].imshow(y, cmap='gray')\n",
" axs[i].axis('off')"
],
"execution_count": 59,
"outputs": [
{
"output_type": "display_data",
"data": {
"text/plain": "<Figure size 1440x1440 with 11 Axes>",
"image/svg+xml": "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"no\"?>\r\n<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.1//EN\"\r\n \"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\">\r\n<!-- Created with matplotlib (https://matplotlib.org/) -->\r\n<svg height=\"100.246154pt\" version=\"1.1\" viewBox=\"0 0 1130.4 100.246154\" width=\"1130.4pt\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\">\r\n <metadata>\r\n <rdf:RDF xmlns:cc=\"http://creativecommons.org/ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\">\r\n <cc:Work>\r\n <dc:type rdf:resource=\"http://purl.org/dc/dcmitype/StillImage\"/>\r\n <dc:date>2021-02-23T12:16:48.771361</dc:date>\r\n <dc:format>image/svg+xml</dc:format>\r\n <dc:creator>\r\n <cc:Agent>\r\n <dc:title>Matplotlib v3.3.4, https://matplotlib.org/</dc:title>\r\n </cc:Agent>\r\n </dc:creator>\r\n </cc:Work>\r\n </rdf:RDF>\r\n </metadata>\r\n <defs>\r\n <style type=\"text/css\">*{stroke-linecap:butt;stroke-linejoin:round;}</style>\r\n </defs>\r\n <g id=\"figure_1\">\r\n <g id=\"patch_1\">\r\n <path d=\"M 0 100.246154 \r\nL 1130.4 100.246154 \r\nL 1130.4 0 \r\nL 0 0 \r\nz\r\n\" style=\"fill:none;\"/>\r\n </g>\r\n <g id=\"axes_1\">\r\n <g clip-path=\"url(#p35507d3f8f)\">\r\n <image height=\"86\" id=\"imagec03a18f11e\" transform=\"scale(1 -1)translate(0 -86)\" width=\"86\" x=\"7.2\" xlink:href=\"data:image/png;base64,\r\niVBORw0KGgoAAAANSUhEUgAAAFYAAABWCAYAAABVVmH3AAAFpklEQVR4nO2cOUheXRCGj3EjLrghKi4oxgWJiAtYuZsiEERBizRJIYiFtZ0I1raCRRBSKAghWmmhFm64obgRCBbGqLglROKWuOXvXt978aLff78xEuatHvUuh/GcuTNz5l4fY8wfo/K6nvztAfyrUsMKSQ0rJDWskNSwQlLDCkkNKyQ1rJDUsEJSwwpJDSskv4e4yZMnN/+/6OhocGlpKfjVq1fg2NhY8I8fP8BfvnwBDw8Pg2dmZsBHR0fg6+vr/z9ol9IZKyQ1rJB8jEDZMDw83PJzQ0MDuL6+HpycnAwOCAi487p//twM9eLiAryzswN+//49uLOzE3xwcAC+vLy8815upTNWSGpYIXnNFYSFhYFbW1stf3v79i04ODj45uY+PuCrq6tb2dfXF8zRhb+//63j+P37N3h5efnWMY2NjYF//fplOZ/djRvpjBWSGlZIalghufKx7COLiorA3d3dluPi4uLA7AM5k+rp6QEvLi6C4+PjwYmJieDi4mJwQkICmDM7P7+bxHJjYwPc1tYG7u/vt4yVfa4bf6szVkhqWCG5KsJwKFRYWAiOjIy0HMfFkNXVVXBzczN4bm4OfH5+Dn769CmYQ7Xe3l4wu4K6ujpwVVUVOCUlBdzS0gLmrM0YYyYnJ8Gc3XkqnbFCUsMKyZUrCAwMBPNT2p4VHR4egrkwMj8/D+blz5nX2dkZmCOKnz9/gr9+/Qr+9OkT+Nu3b+DGxkYwRxdNTU2WsXK2xrVgTyMEnbFCUsMKyZUr4ASBl4p9S4SfvLyN4hSM83VZHIVwTZVdB9+ro6MDnJaWBq6srASXlJRY7lFRUQH++PHjrfe4j3TGCkkNKyRXroCX/P7+Pti+bHiZOy0pvpbTE5gjByfxuewWOBrJysoCc4RgjDGvX78GDwwMgE9OTu68N0tnrJDUsEJy5Qo4l97a2gLbowKuHURERICdnv5O0cZ9xMdz5LCwsACenZ0Fc3OIMcY8e/YMzLvN6goeidSwQvJaVMA5OvdPGWOtKXD5bmlpCezUROHGLbB4KQ8ODoLLysosx4WEhIDT09PBu7u74PskCzpjhaSGFZIrV8BLkxMEe+U9KCgInJOTAx4aGrr1HHYxblwBN3jwdXhZ28caGhoKzszMBE9MTIDVFfxFqWGF5LWo4PPnz+C1tTXLcdnZ2WDO05OSksBcQuRdA17OzLwcnVwEj4+jDo4Qvn//bjnHqT9MdxAeidSwQvLayx288caBvzHGFBQUgNkt5ObmgrkFyNOdBZbT8fx7J5dijNV9nJ6e3nr+faQzVkhqWCF5zRXwU3d8fNzyt5qaGjAH4Pn5+eCRkREw1xqclqDTxqKT+HjeNbC/iHJ8fAze3Ny887pO0hkrJDWskLzmCvhpyo3Dxhizvr4O5kggLy8P/Pz5czAH7dxWdJ+kgMVRAZcDeQzcwWiMNTmxlz89kc5YIalhhSTiCriEaIwx7969A7e3t4N5N6G6uhrMuxHcG8D3uM+b3ewKeJOwvLwcbH/Pa2VlBby9vQ3WBOGRSA0rJJEPQdhbgUZHR8FciX/x4gX45cuXYI4Kurq6wFz55zzeqYbA/Qy1tbXgqKgosP0dhA8fPoC5cdnTj0rojBWSGlZIalghifhYe42Ta7XcThkTEwNOTU0Fsz/kr3BMTU2B2Tc6ZVgZGRlg7uLm7Kqvr88yVn4e2EMxT6QzVkhqWCGJfGzHLt4K4aXKL1K8efMGzE0dfC6HPNwEwr/n5evE09PTYM4EjbFuEXn6QgdLZ6yQ1LBCehBXwOItEv5AD2/TcBbGu7rc4MFbPFyz3dvbA3MTCbduckc3F1qMsWaN+iGIRyg1rJAe3BU4iYN87pni77qwG2FxKybv2Hpav/WmdMYKSQ0rpEfjCv416YwVkhpWSGpYIalhhaSGFZIaVkhqWCGpYYX0H2FFW+1urwvpAAAAAElFTkSuQmCC\" y=\"-7.046154\"/>\r\n </g>\r\n </g>\r\n <g id=\"axes_2\">\r\n <g clip-path=\"url(#p1e7caee913)\">\r\n <image height=\"86\" id=\"image492820f141\" transform=\"scale(1 -1)translate(0 -86)\" width=\"86\" x=\"110.215385\" xlink:href=\"data:image/png;base64,\r\niVBORw0KGgoAAAANSUhEUgAAAFYAAABWCAYAAABVVmH3AAAFlUlEQVR4nO2cyyt1bxTHt3u55JK7yK0QoWRAMjFRolAu+StMpAzMmJMBSqFcSpkob29JxECKYmCAXHK/5373m31/3306+32Pzl7obX1HH/Zzzt6WZ629nmetvT0Mw/gwVLbL87sv4F+VGlZIalghqWGFpIYVkhpWSGpYIalhhaSGFZIaVkhqWCF5f/UJ/fz8wKmpqeCqqipwcnIy+Pr6Gry/vw+emZkBr66ugh8eHsAfH9+3DaIzVkhqWCGJhAJvb/P
"image/png": "iVBORw0KGgoAAAANSUhEUgAABGoAAABkCAYAAADe8tZlAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAr0klEQVR4nO2dedSVZdm+b76ySM1MU8gRGUQcUlHEUAlBUBFFcQanHMihNDGHtcyy1qq0UtO0sgGzHHNWTHEIBBFRFCcUcwIEhxzAIW3m98dvdX3H83z7ft3Mz7M5jr9O9rt5936f67mHvdd53le7BQsWJBERERERERERWf78z/J+AyIiIiIiIiIi8v/xixoRERERERERkYrgFzUiIiIiIiIiIhXBL2pERERERERERCqCX9SIiIiIiIiIiFQEv6gREREREREREakIH2/rh+3atbN391JgwYIF7Rb3d1ibpYO1qS7WprpYm+pibaqLtaku1qa6WJvqYm2qSyvVpl27xn8KH1+wYEFDXUXaqo2OGhERERERERGRiuAXNSIiIiIiIiIiFaHN6JOIiIiIiIgsPXKxDVk+sB4f+9jHQv/nP/8JXad4Td3J1eOTn/xk6H/9618N9b///e+l/O6WHjpqREREREREREQqgl/UiIiIiIiIiIhUBKNPIiIiDch1FtDivHxgPVZaaaWGz6HFmRZ1Yv2WPKzHZz7zmYbP+eCDD0LnbOl1tqhXCY4V1qNTp06h//nPf4Z+/fXXQ//1r39t+BzWTBadj3/8fz96denSJXTPnj1Dz5s3L/SMGTNCv/nmm6E//PDD0I6bRYdjpX379qEHDBgQulevXqFnzpwZ+oEHHgg9Z86c0KxNbh2Sj+Z//ud//SScx4444ojQHDfTp08Pfeutt4Z+6aWXQv/tb38LXYe9gI4aEREREREREZGK4Bc1IiIiIiIiIiIVoRbRp5zdmfZBngBNcrZNWtG0pS06vO4rr7xy6E984hMNn0N75j/+8Y/QtKI1Y12XxnCssAaf+9znQn/qU59q+H9ZD1rU33vvvdCtcor68oAWztVWWy109+7dGz5O6yzr8e6774Z+5ZVXQv/9738PbW0WDq4l6623XuhBgwaFXmONNUK/8cYbod96663Qr732WuhnnnkmNKMExgcWDnZ0oP18xIgRoVddddXQL7zwQuiXX3459Isvvhh66tSpoVkb15uPhmsMr/vBBx/cUPOaPv7446Gfe+650NOmTQv92GOPheacVgeL+vKGtenYsWPos846K/SXvvSl0G+//Xboe+65J/TkyZNDP/nkk6H/8pe/hHaNWTi4D+aaf8EFF4RmDGru3Lmhb7jhhtBjx44NPXv27NDcLzhWFg6u/xwf5557buhVVlklNNd2rv/cF/AzjSwcuc/8++67b+hRo0aF5hrDcXbXXXeFrvOY0FEjIiIiIiIiIlIR/KJGRERERERERKQiVCr6RMsST3feZpttQu++++6ht9hii9AbbLBB6E9/+tOhaZ3lqfbPPvts6DvuuCP0Qw89FJrWw5SK0ZA626gWBdamQ4cOofv27Rt6t912C73RRhuFpgU3Vxueon7fffeFHjNmTGha2hn/SGnFtuHSGsjrvuuuu4bu379/aNbvs5/9bGja2GnbfPjhh0PfcsstoSdOnBia9k+Ok5RWvLFCWBvOV7Rw9u7dOzTrwYgaa/POO++E5vi4+eabQz/11FOh7T7QGEYDd9hhh9CHH354aNYsF+dkNIdRtN/85jehx40b1/A5jEGtyOOkDK/p4MGDQx933HGhOdcx4tytW7fQvKaM19CiznWlbt0glgesDeNnxx9/fOjVV189NOMy3AvQ3s4YFGMIK/Kea1Fg/Jz12HPPPRs+n9FA7se4TjgmFp1cTJCxja233jo097WManJ//P7774fmvGdtFg7WhvPViSeeGHrdddcNzZjZpEmTQj///POh3WsteXhsA/dmrBnjmbfddlto1qzOY0VHjYiIiIiIiIhIRfCLGhERERERERGRirDco0/shMJTtXny9mGHHRZ6yy23bPh/aTPj47Tprr322qHZXYX29gcffDD0j3/848J7nTVrVugVIWrD68go2tChQ0MfdNBBoTt16hSaNjPWhnZDRjs23HDD0Iy39ezZMzQjatdcc03hvc6fPz903WxtiwKjF2uuuWZoWgMZGWDHmpx1ltCmyzHH+Memm24a+vbbbw/Nzh4pFS3VKwKszTrrrBOallpGbfj8efPmhaYNmtedtdl+++1Ds3sNfyc7FDA2ldKKZ8/ldeF8ddppp4X+whe+EJq2f0aWGMng+Gvfvn3oPn36NPy/7KLGGq/o3aAYeWF86Ywzzmj4OMcKowGMYXLe4xjafPPNQ7/55puhOR9yjV8R1pS2YG147b7+9a+HzsUEHn300dCMOOWuNeOfHGd1tq4vTVgbHhPAfTNjAuyqNX78+NBctzkmeK05f/Jx69EYXi+uB9yb8TMKx8rdd98dmuOG+7fc3tp6fDSsDY8G2HbbbUPz+rIejDLzSI3cfloWDtaGnTd79OgRmp8rbrrpptA8qiE3VuqGjhoRERERERERkYrgFzUiIiIiIiIiIhVhmUefaM9LqWj7o23zpJNOCt2lS5fQtMJOmTIl9OTJk0O/+uqrDV+PkYHu3buHHjhwYOgdd9wxNO2GKaU0evTo0LRUtUoMqlwbRpMYRTvmmGNCM9rBWAVPRacVjfEMWpx5snfnzp1DM5Z2wAEHhP7zn/9ceK8TJkwI3YqdVMq1YXeHPfbYIzS7CdH2zw5mf/rTn0LztHRaqBlFW2uttUKzYxS7SvHxc845p/BeaYOvs/0wR7k27Gx24IEHhu7Xr19o1u/pp58OzXgf73Fa1zfbbLOGjzMWst5664VmLO3ee+8tvNcPPvggtTLl2vB6HXnkkaG32mqr0LTdco1hVy2OJ0afWBtGeRnhZKSUnYgYN0ipddaVHOXacD044YQTQnft2jU0rwnt59dff31odhliZJdRN851HCu0U3ONL8fSWmVdyVGuDe/xU045JTTXf84l1113XWiOG8bV+BocK9wTUueiga1eizLl2nB9Pv3000NzT/X222+Hvvzyy0MzzsH9W24PxbnRaOD/pVwbHrdw6qmnhubRC4zF/va3vw3NtaetuUiao63aMJbOzz38DHjttdeG5p6N8WjHxJKBcxo7PbI2jGqy0xM72bVK10AdNSIiIiIiIiIiFcEvakREREREREREKsIyjz7ROplS0e587LHHhmbnjQ8//DA0Lc6MIr322muhacEt293+C2MhfA47Gu2zzz6F//PCCy+EZoSAcZ46U64NrbNf+cpXQjOKxq4lV199deirrroqNK3otAny9ah5svfZZ58deuONNw7NjgYpFaMIrFOrnMJers3nP//50IxwsPMGbea0bdKWTks0rxWjOeyWwrjTyJEjQ7P7UHncXHHFFaHZkaVVYlCMUaSU0gYbbBB6+PDhoWnnZKcA1mPMmDGhObZo25w4cWJoxjkZDWSHCdqsORZTSumJJ54IzbFZZ5soKddmo402Cr333nuHZkSG1tkbbrghNKM2uQ5bjKtxbdtll11Cs6sdn3PPPfcU3ivHbyva3cu1YRyZ14hzEa3orM3UqVND8z5eaaWVQnM88XHOmXxPc+bMCV2OpdFS3SrzGCnXhvsxxqC5NjBGy3mM6zHvY46bXAdPxkj5OK9/OSLYivUg5b0AjwxgxxruaxmxzHWsyUUtGT9jncrv47+syN2gytekd+/eodktjfcoo+icxxhFy9WGNc591lnRapCD925KKW233XahGRvnHHXLLbeEfvbZZ0PzMylr6bVeNMq1YVScezZGk7lv5p6tFbsD6qgREREREREREakIflEjIiIiIiIiIlIR/KJGRERERERERKQiLPMzaphpTqmYd2ZrbObM2LL20ksvDc023Hx+Ls/JvNobb7wRmufe8D3wHJCUUtpvv/1CszVYLq9YB5hrZRY5pZQGDRoUmjlo5sNvvfXW0JdddllonoXB2jRzfZh151lAPBuHGcaUiueiXHLJJ
},
"metadata": {
"needs_background": "light"
}
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "gdk6yyrittNx"
},
"source": [
"### Discussion\n",
"What did you observe in the interpolation? Is this what you expected?\n",
"* Can you relate the interpolation to your T-SNE visualization"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "ZF2jUHWHtt3V"
},
"source": [
"**YOUR ANSWER**\n",
"\n",
"From the above picture, we can see the change from 0 to 6. On the most left picture, we can see a clear 0, and on the right we can see a clear 6, while in the middle it looks like both 0 and 6.\n",
"\n",
"I guess, maybe the interpolation is just like one point have a linear movement from one cluster to another."
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "EG68ntJ2qfIC"
},
"source": [
"# Part 2 - Deep Convolutional GAN\n",
"\n",
"In this task, your main objective is to train a DCGAN (https://arxiv.org/abs/1511.06434) on the CIFAR-10 dataset. You should experiment with different architectures, tricks for stability in training (such as using different activation functions, batch normalization, different values for the hyper-parameters, etc.). In the end, you should provide us with: \n",
"\n",
"- your best trained model (which we will be able to run), \n",
"- some generations for the fixed latent vectors $\\mathbf{z}\\sim \\mathcal{N}\\left(\\mathbf{0}, \\mathbf{I}\\right)$ we have provided you with (train for a number of epochs and make sure there is no mode collapse), \n",
"- plots with the losses for the discriminator $D$ and the generator $G$ as the training progresses and explain whether your produced plots are theoretically sensible and why this is (or not) the case. \n",
"- a discussion on whether you noticed any mode collapse, where this behaviour may be attributed to, and explanations of what you did in order to cope with mode collapse. \n",
"\n",
"## Part 2.1 (30 points)\n",
"**Your Task**: \n",
"\n",
"a. Implement the DCGAN architecture. \n",
"\n",
"b. Define a loss and implement the Training Loop\n",
"\n",
"c. Visualize images sampled from your best model's generator (\"Extension\" Assessed on quality)\n",
"\n",
"d. Discuss the experimentations which led to your final architecture. You can plot losses or generated results by other architectures that you tested to back your arguments (but this is not necessary to get full marks).\n",
"\n",
"\n",
"_Clarification: You should not be worrying too much about getting an \"optimal\" performance on your trained GAN. We want you to demonstrate to us that you experimented with different types of DCGAN variations, report what difficulties transpired throughout the training process, etc. In other words, if we see that you provided us with a running implementation, that you detail different experimentations that you did before providing us with your best one, and that you have grapsed the concepts, you can still get good marks. The attached model does not have to be perfect, and the extension marks for performance are only worth 10 points._"
]
},
{
"cell_type": "code",
"metadata": {
"id": "uFEt7wGXP_aE",
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "3735a195-7da1-4091-97ac-53bcc099f8c4"
},
"source": [
"import os\n",
"import numpy as np\n",
"import torch\n",
"import torch.nn as nn\n",
"from torch.utils.data import DataLoader\n",
"from torch.utils.data import sampler\n",
"from torchvision import datasets, transforms\n",
"from torchvision.utils import save_image, make_grid\n",
"from torch.optim.lr_scheduler import StepLR, MultiStepLR\n",
"import torch.nn.functional as F\n",
"import matplotlib.pyplot as plt\n",
"\n",
"\n",
"def denorm(x, channels=None, w=None ,h=None, resize = False):\n",
" x = 0.5 * (x + 1)\n",
" x = x.clamp(0, 1)\n",
" if resize:\n",
" if channels is None or w is None or h is None:\n",
" print('Number of channels, width and height must be provided for resize.')\n",
" x = x.view(x.size(0), channels, w, h)\n",
" return x\n",
"\n",
"def show(img):\n",
" npimg = img.cpu().numpy()\n",
" plt.imshow(np.transpose(npimg, (1,2,0)))\n",
"\n",
"if not os.path.exists('/content/drive/MyDrive/icl_dl_cw2/GAN2'):\n",
" os.makedirs('/content/drive/MyDrive/icl_dl_cw2/GAN2')\n",
"\n",
"GPU = True # Choose whether to use GPU\n",
"if GPU:\n",
" device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n",
"else:\n",
" device = torch.device(\"cpu\")\n",
"print(f'Using {device}')\n",
"\n",
"# We set a random seed to ensure that your results are reproducible.\n",
"if torch.cuda.is_available():\n",
" torch.backends.cudnn.deterministic = True\n",
"torch.manual_seed(0)"
],
"execution_count": 38,
"outputs": [
{
"output_type": "stream",
"text": [
"Using cuda\n"
],
"name": "stdout"
},
{
"output_type": "execute_result",
"data": {
"text/plain": [
"<torch._C.Generator at 0x7ff38595a540>"
]
},
"metadata": {
"tags": []
},
"execution_count": 38
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "VosOpcpfGvWO"
},
"source": [
"### Part 2.1a: Implement DCGAN (8 Points)\n",
"Fill in the missing parts in the cells below in order to complete the Generator and Discriminator classes. You will need to define:\n",
"\n",
"- The hyperparameters\n",
"- The constructors\n",
"- `decode`\n",
"- `discriminator`\n",
"\n",
"Recomendations for experimentation:\n",
"- use the architecture that you implemented for the Autoencoder of Part 1 (encoder as discriminator, decoder as generator).\n",
"- use the architecture desribed in the DCGAN paper (https://arxiv.org/abs/1511.06434).\n",
"\n",
"Some general reccomendations:\n",
"- add several convolutional layers (3-4).\n",
"- accelerate training with batch normalization after every convolutional layer.\n",
"- use the appropriate activation functions. \n",
"- Generator module: the upsampling can be done with various methods, such as nearest neighbor upsampling (`torch.nn.Upsample`) or transposed convolutions(`torch.nn.ConvTranspose2d`). \n",
"- Discriminator module: Experiment with batch normalization (`torch.nn.BatchNorm2d`) and leaky relu (`torch.nn.LeakyReLu`) units after each convolutional layer.\n",
"\n",
"Try to follow the common practices for CNNs (e.g small receptive fields, max pooling, RELU activations), in order to narrow down your possible choices.\n",
"\n",
"<font color=\"red\">**Your model should not have more than 25 Million Parameters**</font>\n",
"\n",
"The number of epochs that will be needed in order to train the network will vary depending on your choices. As an advice, we recommend that while experimenting you should allow around 20 epochs and if the loss doesn't sufficiently drop, restart the training with a more powerful architecture. You don't need to train the network to an extreme if you don't have the time."
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "pOi_Q_jleQJq"
},
"source": [
"#### Data loading"
]
},
{
"cell_type": "code",
"metadata": {
"id": "__ENlW2aeQJr",
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "a75d07a3-bc07-40b7-f1a0-d05a1947c020"
},
"source": [
"batch_size = 128 # change that\n",
"\n",
"transform = transforms.Compose([\n",
" transforms.Resize(64),\n",
" transforms.RandomHorizontalFlip(p=0.5), \n",
" transforms.ToTensor(),\n",
" transforms.Normalize(mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),), \n",
"])\n",
"\n",
"transform_test = transforms.Compose([\n",
" transforms.Resize(64), \n",
" transforms.ToTensor(),\n",
" transforms.Normalize(mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),), \n",
"])\n",
"\n",
"data_dir = './datasets'\n",
"\n",
"cifar10_train = datasets.CIFAR10(data_dir, train=True, download=True, transform=transform)\n",
"cifar10_test = datasets.CIFAR10(data_dir, train=False, download=True, transform=transform_test)\n",
"loader_train = DataLoader(cifar10_train, batch_size=batch_size)\n",
"loader_test = DataLoader(cifar10_test, batch_size=batch_size)"
],
"execution_count": 15,
"outputs": [
{
"output_type": "stream",
"text": [
"Files already downloaded and verified\n",
"Files already downloaded and verified\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "-TmrRudFRhOB"
},
"source": [
"We'll visualize a subset of the test set: "
]
},
{
"cell_type": "code",
"metadata": {
"id": "CY2ka775Rfxm",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 846
},
"outputId": "c090672a-1bd3-4381-d40a-099638b9c312"
},
"source": [
"samples, _ = next(iter(loader_test))\n",
"\n",
"samples = samples.cpu()\n",
"samples = make_grid(denorm(samples), nrow=8, padding=2, normalize=False,\n",
" range=None, scale_each=False, pad_value=0)\n",
"plt.figure(figsize = (15,15))\n",
"plt.axis('off')\n",
"show(samples)"
],
"execution_count": 16,
"outputs": [
{
"output_type": "display_data",
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAaYAAAM9CAYAAADXT9eqAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOy9Ta9kSZKe95i5+zknIu5HflRXd8/0gCAhiRRBCBQoUSQgERKohRYCtBEg/Qr+qAEJ/gsCXFAbAlyIlCiAkkj2cKanqzIr835FxDnubqaFeURmVfd0j0gRmkV6deB23sh74/iX2WuvvWYp7s6X8WV8GV/Gl/Fl/FkZ+v/3A3wZX8aX8WV8GV/G5+OLY/oyvowv48v4Mv5MjS+O6cv4Mr6ML+PL+DM1vjimL+PL+DK+jC/jz9T44pi+jC/jy/gyvow/UyP/pjdF5Itk78v4Mr6ML+PL+Pcy3F1+3fd/o2MC+PM/3vOXfnYDgAqICIiAO+aGuwGCqFw+CBwQRwAEcI0XIGKojp+5vu+YO+6OA7gAgiOYg/3APV7+6Gbx3nVujuDjZ5zmYOZs3fkn/+KRv/k3/yZ/7a/9Nf5tJfIiv3YNv/9Qn/9fufzZkV9Zf49ljMVCkF/7XJ9/prvz9PTE7//+7/NX/vO/xe/9hb+Eu9FqpdUNM2OaJpbdjqkUUkooICqoCCqgEusuOLgh4qSkY2/j/dgTqK0iQEoZVUVEiLdlPPfl9elZ3R3rhuOoKKqx7+aGmyHI9Xd9++5b/v7f+7v81f/0P+GnP/0ZadqTNJNVSSnhCM2M3ipufaxnrFfMRUmin575B+sby+njf37dm5jH5fx9+sHreRzzuuzHZQ9UlaQJQQEdP2v0euSP//iP+Af/8B/x9Y9fs+wXJCVq62x1o9ZKbx0cylzQKSO5cF10wJpRj2fW4wlwylKYdxM5Kwq4Ke6xj2VWpqUwLRlRhc+e0wzaZmzrRmsdScrtzY79MtNb4+HjE8fTiWmeyDmDOeu6YQaSMjllTi8n3v3hO/7bv/LnWObM0/nMh+cTp7VScuZmN3MzTRRALPbV3ZGkiCoi0M2wbiCCatzl1o3aO90dQyAlVAvdYWudddvoraPmZNWLhcCBjmHu4/fHOXOL7copUXJGRDlvG7V3bNihx/OZb59e+G/uC7cZlgRzgiKg4mSFnKAkEHFELE5KXBLMoRv07nQDG7bpct7MiXkZmIEBaMbLAb+5xVKmPXxkqxsrwmYCvaO9cq5CJeEpQ840oFlHeiePr45jhC39tjrfNOc/+I//w3Gvxi10H2cgTqaIoJIQuRBijoiDGqoxx94NM3ADM8FdwQVzx7x/su1u4zMuVz3sq1t8rrthNnyBxZ/d/Pp3GT97uf+xb862bbTafsXeXcZvdUz/0c9u+B//y98FjKSEsRMNg9g6Zg1RUE1joy4X2sZGA17ACoKjqZJSR5FxJ2MBujndHHPBXEESZkpzodv17g1HGPe5907v4C5gIPQwFB7OaDPYuvN8bvyTf/HI3/7bf5u/83f+Dmb226b9a8dvdEzwPTvtcvGXnxzP9f0x70tA6pf34Vec0w8/8w/+4A/4/d//ff6zv/Xf8V//9/8z3hun4wvHlydardze3PLm7Vtubm6YSxl7JhRVkipZhSwSF7BXRIxlSpSsJAXVcBxuxukURnKeZlLOZE2oahgaN7q1mMNwVmEYnbptuMdZKaUA0FrFWkNVKWVCVfln//R/5e//vb/L3/gb/wV//W/8V0w3XzOXPXMuLPNMRzhvlfPpCWsbsck+Lp5QNDOlTBZFUjiJ61a4Y+MiXDbnAnxU9WrgLs8di32ZSzhW3MOwipJzouRCTgWVBCRICfdGPX7DP/7H/wv/4B/+I373937Em6/fIvPE6bzy+PzE8/ML27mSVDjc70k3e5hmJCkkDRBwqjz/8jsevn2PJLh5c+DNV3csc0ZRrCnWQZNwc79w92bH/nZGUx5WAjQleoPTc+Xx4Znz+UyeZn7vd7/i67f3rMcz//f/+a/5419+y839Lbtlxjs8P77QUfK8MOWZ9794z7s/fMf/8Nf/Iq9vJ/7wuw/8619+4ONxZb/M/OTVLV/f7NmZoK1jreHiSFZSzqBQa6VuLUBRSpjD1hrHrXHuneYCeSJNe9YuPJ1XHp6eqecN7c6ihTTAruFU71TrSEqAXI1fEmU3zRx2O1SVD88vnLaN2g1w/tW7D3z79ML/9OOJ31uE29m5y8KSnIwxJWeZhN0EORlOx7FIcqjQurM2WDdYm9Ds4pji2ao75+ocq7M16A6eMn1/R/3qJ2zzzPaHxvH4wjPKY1dk28hVeDhlnljY5plaMieErW7MrXKoK6lumHcajrjzz46db5rzl//qXyaPe3U952YBIgmwlnNBpQzwa4g0JDdy7og2Wuu06rSm9CZYy5gJ5kbrjWaVbg23i5MPoGfm9O5Yd7qFg+u90Vun9Y71jveODe8lxO/srdPH+2bG08Pzv5tjcoQ2MMslGhLCuwb6DfSgw+6qEJ7XYhKIIB4vVQZyd8QNsfi94jYcmtJJdFFMMl2Ubkqzy0F0xDsqQlIQF5IEonAZ7h8bGMPD4Msn4y4iVwT/bzN+u2O6es/PUFegRT5HWgOai/g19hA0UOev+YzL9z5/P2dlnjJ08JbpW4ZeSckpWZhKvNLYm6ROTk5JStF4nu4+UKOQ03BMF4QrQkoybHM8p6qTU7zfeyArwVAUTQkRobvQRmSV0oh8zFCPs5RFKSkhIyqKkXAKLju6HuhpwnSJhdKV1lfq1sAD8eklqFPHFHJSNAGi2IiszJ3WOriPOSkOmBkiYbQua5vGs4Nj9mk/hDD28zQhupDKTMoDsYuAKtDwOuNpAkCnTN5N6LLQRZjOZ1SVecnMu4mbN7cwTdSLcZNgBTCj9w5upJLZ7SYONzPzNEEX1rPjNHSCsk9I1kDxrWHdYn8K4BFZeXe8OwllKRP7peB1w61zPm2IHGlrJ4uCQZkmUplxCwcYc5mQlFARXt8deHV3w343cTMXdqqkDdBEmhKpJAKgh51wVTyNjbKOmFFc2CUFczYzrHekbUh3pFUSnSZxc/uAazqid3VQF/rW6D0ASMmJUjIlp3BiIkw5YV5IKQxjGWfsz9/Dn9vBLsOU473e+8AiglvcBxsA2RFcoHVYm3Nu8bVe8IpeIjnnXIVzIxwT0Kqx1ReeTr/glAQ5PeFto3egj2jPhZQXPO15KoVvW+XYVubeeePGjTt52DZwxBgnMmzKJWq8RP+XyOn6nw+biIB0zBu0DaSTxBB1NDnaDRNw6XGm/cJVEYCIsNVhsfxTFOWXsMnAOu4dGa+w0xFNCI56zEMJUIjqFXT8SeNP4ZjG4ngLJDk2TVwRC2Qhg9W4GFlwXEFMMBKxpBFlxXw8fpfbddJhnBKdzGqZc1dqU7au1G2E7e4UVbJCUUiaBxvSQexKLwlhM9Qv9NT353TZ1P/Ph8iYz6Dp5OKFBBtrKaJxVj77eLn++K8+1w8d1eXPSYJ+QISWhZKgq5PVKcmYEkwZFAcz1J1EpiQhq+C9gYxDREc9TLHgiIShThKh/YUiHeQqbk6rK9t6JiVhKuUaaaGX9RaSxsv4dD7CSYbjukIEH79ZlC6ZLgXThKqQZSLv9vEMfaVYRawPhymIZrxMgdbFcA/H5A6qFTNHVMg5xef496NlFbk6VbtSEfH8OpznNM1MZSbniaSAOK6OJ8U94SnhgzaRaSLtZvIy4w7TPDFNGU2Zw+2O/f2OSsLqML2q0BzvHe8RgZaSKHMm5URKFyS70XtDbDAN7tRzpa5hqFWV+SDkNIEQVE0zVISpZOapsCXBzajnikoh60RZMstuQecF18z6slHXGouTMpozyzzz42Vhmgo5Cck7sjW6hzHTlNEyoXTcgr4jJRhnxXo4JnEo7rgEIDI3xMORnWmoNaw3egdhROcEQNKkJHNab5jEfEvOTDlTNO6de5yvrHGO4ZMB/Mli/GQO+o7krGa0bpgJawvGBXd6N2oPx+EOzYS1w9ng3Ij0gAc97hoR0tbjv
"text/plain": [
"<Figure size 1080x1080 with 1 Axes>"
]
},
"metadata": {
"tags": [],
"needs_background": "light"
}
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "XSkveP0mZ-Pd"
},
"source": [
"#### Model Definition\n",
"Define hyperparameters and the model"
]
},
{
"cell_type": "code",
"metadata": {
"id": "FBoxMTihZ-Pd"
},
"source": [
"# *CODE FOR PART 2.1 IN THIS CELL*\n",
"\n",
"# Choose the number of epochs, the learning rate\n",
"# and the size of the Generator's input noise vetor.\n",
"\n",
"num_epochs = 100\n",
"learning_rate = 0.0002\n",
"latent_vector_size = 100\n",
"\n",
"# Other hyperparams\n",
"num_gen_features = 64\n",
"num_disc_features = 64"
],
"execution_count": 44,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "ATTUAhCDZ-Pg"
},
"source": [
"# *CODE FOR PART 2.1 IN THIS CELL*\n",
"\n",
"\n",
"class Generator(nn.Module):\n",
" def __init__(self):\n",
" super(Generator, self).__init__()\n",
" #######################################################################\n",
" # ** START OF YOUR CODE **\n",
" #######################################################################\n",
" #input 100*1*1\n",
" self.layer1 = nn.Sequential(nn.ConvTranspose2d(latent_vector_size,num_gen_features*8,4,1,0,bias = False),\n",
" nn.BatchNorm2d(num_gen_features*8),\n",
" nn.ReLU(True))\n",
"\n",
" #input 512*4*4\n",
" self.layer2 = nn.Sequential(nn.ConvTranspose2d(num_gen_features*8,num_gen_features*4,4,2,1,bias = False),\n",
" nn.BatchNorm2d(num_gen_features*4),\n",
" nn.ReLU(True))\n",
" #input 256*8*8\n",
" self.layer3 = nn.Sequential(nn.ConvTranspose2d(num_gen_features*4,num_gen_features*2,4,2,1,bias = False),\n",
" nn.BatchNorm2d(num_gen_features*2),\n",
" nn.ReLU(True))\n",
" #input 128*16*16\n",
" self.layer4 = nn.Sequential(nn.ConvTranspose2d(num_gen_features*2,num_gen_features,4,2,1,bias = False),\n",
" nn.BatchNorm2d(64),\n",
" nn.ReLU(True))\n",
" #input 64*32*32\n",
" self.layer5 = nn.Sequential(nn.ConvTranspose2d(num_gen_features,3,4,2,1,bias = False),\n",
" nn.Tanh())\n",
" #output 3*64*64\n",
" \n",
" self.embedding = nn.Embedding(10, latent_vector_size)\n",
" #######################################################################\n",
" # ** END OF YOUR CODE **\n",
" ####################################################################### \n",
"\n",
"\n",
" def forward(self, z, label):\n",
" #######################################################################\n",
" # ** START OF YOUR CODE **\n",
" #######################################################################\n",
" label_embedding = self.embedding(label)\n",
" z = z.view(-1, latent_vector_size)\n",
" out = torch.mul(z, label_embedding)\n",
" out = out.view(-1, latent_vector_size, 1, 1)\n",
" out = self.layer1(out)\n",
" out = self.layer2(out)\n",
" out = self.layer3(out)\n",
" out = self.layer4(out)\n",
" out = self.layer5(out)\n",
" #######################################################################\n",
" # ** END OF YOUR CODE **\n",
" #######################################################################\n",
" return out\n",
"\n",
"\n",
"class Discriminator(nn.Module):\n",
" def __init__(self):\n",
" super(Discriminator, self).__init__()\n",
" #######################################################################\n",
" # ** START OF YOUR CODE **\n",
" #######################################################################\n",
" #input 3*64*64\n",
" self.layer1 = nn.Sequential(nn.Conv2d(3,num_disc_features,4,2,1,bias = False),\n",
" nn.BatchNorm2d(num_disc_features),\n",
" nn.LeakyReLU(0.2,True),\n",
" nn.Dropout2d(0.5))\n",
" \n",
" #input 64*32*32\n",
" self.layer2 = nn.Sequential(nn.Conv2d(num_disc_features,num_disc_features*2,4,2,1,bias = False),\n",
" nn.BatchNorm2d(num_disc_features*2),\n",
" nn.LeakyReLU(0.2,True),\n",
" nn.Dropout2d(0.5))\n",
" #input 128*16*16\n",
" self.layer3 = nn.Sequential(nn.Conv2d(num_disc_features*2,num_disc_features*4,4,2,1,bias = False),\n",
" nn.BatchNorm2d(num_disc_features*4),\n",
" nn.LeakyReLU(0.2,True),\n",
" nn.Dropout2d(0.5))\n",
" #input 256*8*8\n",
" self.layer4 = nn.Sequential(nn.Conv2d(num_disc_features*4,num_disc_features*8,4,2,1,bias = False),\n",
" nn.BatchNorm2d(num_disc_features*8),\n",
" nn.LeakyReLU(0.2,True))\n",
" #input 512*4*4\n",
" self.out_layer = nn.Sequential(nn.Conv2d(num_disc_features*8,1,4,1,0,bias = False),\n",
" nn.Sigmoid())\n",
" \n",
" self.label_layer = nn.Sequential(nn.Conv2d(num_disc_features*8,11,4,1,0,bias = False),\n",
" nn.LogSoftmax(dim = 1))\n",
" #######################################################################\n",
" # ** END OF YOUR CODE **\n",
" ####################################################################### \n",
" \n",
" def forward(self, x):\n",
" #######################################################################\n",
" # ** START OF YOUR CODE **\n",
" #######################################################################\n",
" x = self.layer1(x)\n",
" x = self.layer2(x)\n",
" x = self.layer3(x)\n",
" x = self.layer4(x)\n",
" out = self.out_layer(x)\n",
" label = self.label_layer(x)\n",
" \n",
" out = out.view(-1)\n",
" label = label.view(-1,11)\n",
" #######################################################################\n",
" # ** END OF YOUR CODE **\n",
" ####################################################################### \n",
" \n",
" return out, label\n"
],
"execution_count": 45,
"outputs": []
},
{
"cell_type": "markdown",
"metadata": {
"id": "o8YDyYf8Z-Pi"
},
"source": [
"<h2> Initialize Model and print number of parameters </h2>"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "Xh3NpfD_Z-Pj"
},
"source": [
"You can use method `weights_init` to initialize the weights of the Generator and Discriminator networks. Otherwise, implement your own initialization, or do not use at all. You will not be penalized for not using initialization."
]
},
{
"cell_type": "code",
"metadata": {
"id": "JAVpgpmUZ-Pk"
},
"source": [
"# custom weights initialization called on netG and netD\n",
"def weights_init(m):\n",
" classname = m.__class__.__name__\n",
" if classname.find('Conv') != -1:\n",
" m.weight.data.normal_(0.0, 0.02)\n",
" elif classname.find('BatchNorm') != -1:\n",
" m.weight.data.normal_(1.0, 0.02)\n",
" m.bias.data.fill_(0)"
],
"execution_count": 46,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "Ew-OdvNJZ-Pm",
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "6e4ae1a9-f288-4b7b-91e6-90d56cb92fe1"
},
"source": [
"use_weights_init = True\n",
"\n",
"model_G = Generator().to(device)\n",
"if use_weights_init:\n",
" model_G.apply(weights_init)\n",
"params_G = sum(p.numel() for p in model_G.parameters() if p.requires_grad)\n",
"print(\"Total number of parameters in Generator is: {}\".format(params_G))\n",
"print(model_G)\n",
"print('\\n')\n",
"\n",
"model_D = Discriminator().to(device)\n",
"if use_weights_init:\n",
" model_D.apply(weights_init)\n",
"params_D = sum(p.numel() for p in model_D.parameters() if p.requires_grad)\n",
"print(\"Total number of parameters in Discriminator is: {}\".format(params_D))\n",
"print(model_D)\n",
"print('\\n')\n",
"\n",
"print(\"Total number of parameters is: {}\".format(params_G + params_D))"
],
"execution_count": 47,
"outputs": [
{
"output_type": "stream",
"text": [
"Total number of parameters in Generator is: 3577704\n",
"Generator(\n",
" (layer1): Sequential(\n",
" (0): ConvTranspose2d(100, 512, kernel_size=(4, 4), stride=(1, 1), bias=False)\n",
" (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU(inplace=True)\n",
" )\n",
" (layer2): Sequential(\n",
" (0): ConvTranspose2d(512, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)\n",
" (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU(inplace=True)\n",
" )\n",
" (layer3): Sequential(\n",
" (0): ConvTranspose2d(256, 128, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)\n",
" (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU(inplace=True)\n",
" )\n",
" (layer4): Sequential(\n",
" (0): ConvTranspose2d(128, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)\n",
" (1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU(inplace=True)\n",
" )\n",
" (layer5): Sequential(\n",
" (0): ConvTranspose2d(64, 3, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)\n",
" (1): Tanh()\n",
" )\n",
" (embedding): Embedding(10, 100)\n",
")\n",
"\n",
"\n",
"Total number of parameters in Discriminator is: 2855808\n",
"Discriminator(\n",
" (layer1): Sequential(\n",
" (0): Conv2d(3, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)\n",
" (1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): LeakyReLU(negative_slope=0.2, inplace=True)\n",
" (3): Dropout2d(p=0.5, inplace=False)\n",
" )\n",
" (layer2): Sequential(\n",
" (0): Conv2d(64, 128, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)\n",
" (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): LeakyReLU(negative_slope=0.2, inplace=True)\n",
" (3): Dropout2d(p=0.5, inplace=False)\n",
" )\n",
" (layer3): Sequential(\n",
" (0): Conv2d(128, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)\n",
" (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): LeakyReLU(negative_slope=0.2, inplace=True)\n",
" (3): Dropout2d(p=0.5, inplace=False)\n",
" )\n",
" (layer4): Sequential(\n",
" (0): Conv2d(256, 512, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)\n",
" (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): LeakyReLU(negative_slope=0.2, inplace=True)\n",
" )\n",
" (out_layer): Sequential(\n",
" (0): Conv2d(512, 1, kernel_size=(4, 4), stride=(1, 1), bias=False)\n",
" (1): Sigmoid()\n",
" )\n",
" (label_layer): Sequential(\n",
" (0): Conv2d(512, 11, kernel_size=(4, 4), stride=(1, 1), bias=False)\n",
" (1): LogSoftmax(dim=1)\n",
" )\n",
")\n",
"\n",
"\n",
"Total number of parameters is: 6433512\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "by_TNUPXJamb"
},
"source": [
"### Part 2.1b: Training the Model (12 Points)"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "00wgs1VNZ-Pp"
},
"source": [
"#### Defining a Loss"
]
},
{
"cell_type": "code",
"metadata": {
"id": "gPlxaL_cZ-Pq"
},
"source": [
"criterion = nn.BCELoss()\n",
"def loss_function(out, label):\n",
" loss = criterion(out, label)\n",
" return loss"
],
"execution_count": 48,
"outputs": []
},
{
"cell_type": "markdown",
"metadata": {
"id": "GrgmhlSXZ-Ps"
},
"source": [
"<h3>Choose and initialize optimizers</h3>"
]
},
{
"cell_type": "code",
"metadata": {
"id": "pFM8iI24Z-Pt"
},
"source": [
"# setup optimizer\n",
"# You are free to add a scheduler or change the optimizer if you want. We chose one for you for simplicity.\n",
"beta1 = 0.5\n",
"optimizerD = torch.optim.Adam(model_D.parameters(), lr=learning_rate, betas=(beta1, 0.999))\n",
"optimizerG = torch.optim.Adam(model_G.parameters(), lr=learning_rate, betas=(beta1, 0.999))\n",
"schedD = torch.optim.lr_scheduler.MultiStepLR(optimizerD, milestones=[50], gamma= 0.1)\n",
"schedG = torch.optim.lr_scheduler.MultiStepLR(optimizerG, milestones=[50, 75], gamma= 0.1)"
],
"execution_count": 49,
"outputs": []
},
{
"cell_type": "markdown",
"metadata": {
"id": "qZ311RPlZ-Pv"
},
"source": [
"<h3> Define fixed input vectors to monitor training and mode collapse. </h3>"
]
},
{
"cell_type": "code",
"metadata": {
"id": "EGB_9A1UZ-Pw",
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "39f99938-2190-4a28-ae1f-b44408824d4b"
},
"source": [
"fixed_noise = torch.randn(batch_size, latent_vector_size, 1, 1, device=device)\n",
"fixed_labels = torch.randint(0,10,(batch_size,),dtype = torch.long,device = device)\n",
"print(fixed_noise.size())\n",
"print(batch_size)\n",
"real_labels = 0.7 + 0.5 * torch.rand(10, device = device)\n",
"fake_labels = 0.3 * torch.rand(10, device = device)"
],
"execution_count": 50,
"outputs": [
{
"output_type": "stream",
"text": [
"torch.Size([80, 100, 1, 1])\n",
"80\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "gD9S_3yZZ-Py"
},
"source": [
"#### Training Loop"
]
},
{
"cell_type": "code",
"metadata": {
"id": "w5vD--X6Z-Pz",
"colab": {
"base_uri": "https://localhost:8080/"
},
"tags": [],
"outputId": "4d40b520-9d18-4351-b572-e809db57196e"
},
"source": [
"train_losses_G = []\n",
"train_losses_D = []\n",
"\n",
"for epoch in range(num_epochs):\n",
" train_loss_D = 0\n",
" train_loss_G = 0\n",
" for i, data in enumerate(loader_train, 0):\n",
" ############################\n",
" # (1) Update D network: maximize log(D(x)) + log(1 - D(G(z)))\n",
" ###########################device\n",
" # train with real\n",
"\n",
" model_D.zero_grad()\n",
" real_cpu = data[0].to(device)\n",
" labels = data[1].to(device)\n",
" real_label = real_labels[i % 10]\n",
" fake_label = fake_labels[i % 10]\n",
" batch_size = real_cpu.size(0)\n",
" fake_class_labels = 10 * torch.ones((batch_size,), dtype = torch.long, device = device)\n",
"\n",
" if i % 30 == 0:\n",
" real_label, fake_label = fake_label, real_label\n",
" label = torch.full((batch_size,), real_label, device=device)\n",
"\n",
"\n",
" output, out_labels = model_D(real_cpu)\n",
" errD_real = loss_function(output, label) + F.nll_loss(out_labels, labels)\n",
" errD_real.backward()\n",
" D_x = output.mean().item()\n",
"\n",
" # train with fake\n",
" noise = torch.randn(batch_size, latent_vector_size, 1, 1, device=device)\n",
" sample_labels = torch.randint(0,10,(batch_size,), dtype = torch.long, device = device)\n",
"\n",
" fake = model_G(noise, sample_labels)\n",
"\n",
" label.fill_(fake_label)\n",
"\n",
" output, out_labels = model_D(fake.detach())\n",
" errD_fake = loss_function(output, label) + F.nll_loss(out_labels, fake_class_labels)\n",
" errD_fake.backward()\n",
"\n",
" D_G_z1 = output.mean().item()\n",
" errD = errD_real + errD_fake\n",
" train_loss_D += errD.item()\n",
" optimizerD.step()\n",
"\n",
"\n",
" ############################\n",
" # (2) Update G network: maximize log(D(G(z)))\n",
" ###########################\n",
" model_G.zero_grad()\n",
" label.fill_(1)\n",
" output, outlabels = model_D(fake)\n",
"\n",
" errG = loss_function(output, label) + F.nll_loss(outlabels, sample_labels)\n",
" errG.backward()\n",
" D_G_z2 = output.mean().item()\n",
" train_loss_G += errG.item()\n",
" optimizerG.step()\n",
"\n",
" print('[%d/%d][%d/%d] Loss_D: %.4f Loss_G: %.4f D(x): %.4f D(G(z)): %.4f / %.4f'\n",
" % (epoch+1, num_epochs, i+1, len(loader_train),\n",
" errD.item(), errG.item(), D_x, D_G_z1, D_G_z2))\n",
"\n",
" schedD.step()\n",
" schedG.step()\n",
" if epoch == 0:\n",
" save_image(denorm(real_cpu.cpu()).float(), '/content/drive/MyDrive/icl_dl_cw2/GAN2/real_samples.png')\n",
" with torch.no_grad():\n",
" fake = model_G(fixed_noise, fixed_labels)\n",
" save_image(denorm(fake.cpu()).float(), '/content/drive/MyDrive/icl_dl_cw2/GAN2/fake_samples_epoch_%03d.png' % epoch)\n",
" train_losses_D.append(train_loss_D / len(loader_train))\n",
" train_losses_G.append(train_loss_G / len(loader_train))"
],
"execution_count": 51,
"outputs": [
{
"output_type": "stream",
"text": [
"\u001b[1;30;43m流式输出内容被截断只能显示最后 5000 行内容。\u001b[0m\n",
"[88/100][84/391] Loss_D: 3.5527 Loss_G: 3.0607 D(x): 0.5631 D(G(z)): 0.5151 / 0.3833\n",
"[88/100][85/391] Loss_D: 2.7218 Loss_G: 2.4398 D(x): 0.7468 D(G(z)): 0.4465 / 0.4565\n",
"[88/100][86/391] Loss_D: 3.3780 Loss_G: 2.6007 D(x): 0.6515 D(G(z)): 0.5268 / 0.4394\n",
"[88/100][87/391] Loss_D: 3.3666 Loss_G: 3.6614 D(x): 0.6080 D(G(z)): 0.4639 / 0.3211\n",
"[88/100][88/391] Loss_D: 3.1745 Loss_G: 3.7921 D(x): 0.5723 D(G(z)): 0.4475 / 0.3165\n",
"[88/100][89/391] Loss_D: 2.7192 Loss_G: 2.3323 D(x): 0.6846 D(G(z)): 0.4141 / 0.4812\n",
"[88/100][90/391] Loss_D: 3.1284 Loss_G: 1.8367 D(x): 0.6773 D(G(z)): 0.5080 / 0.5611\n",
"[88/100][91/391] Loss_D: 3.4666 Loss_G: 2.4045 D(x): 0.6386 D(G(z)): 0.4810 / 0.5057\n",
"[88/100][92/391] Loss_D: 2.6873 Loss_G: 2.5932 D(x): 0.7031 D(G(z)): 0.4250 / 0.4516\n",
"[88/100][93/391] Loss_D: 3.2966 Loss_G: 3.1624 D(x): 0.6746 D(G(z)): 0.5215 / 0.3739\n",
"[88/100][94/391] Loss_D: 2.6231 Loss_G: 3.1163 D(x): 0.6292 D(G(z)): 0.3555 / 0.3754\n",
"[88/100][95/391] Loss_D: 2.8173 Loss_G: 2.9865 D(x): 0.6864 D(G(z)): 0.4105 / 0.3959\n",
"[88/100][96/391] Loss_D: 2.8343 Loss_G: 2.8137 D(x): 0.6016 D(G(z)): 0.3192 / 0.4180\n",
"[88/100][97/391] Loss_D: 3.2312 Loss_G: 3.0118 D(x): 0.6646 D(G(z)): 0.4846 / 0.3979\n",
"[88/100][98/391] Loss_D: 3.2750 Loss_G: 3.8746 D(x): 0.5898 D(G(z)): 0.4793 / 0.2909\n",
"[88/100][99/391] Loss_D: 2.6684 Loss_G: 2.6387 D(x): 0.6640 D(G(z)): 0.3467 / 0.4464\n",
"[88/100][100/391] Loss_D: 3.0999 Loss_G: 2.3298 D(x): 0.5729 D(G(z)): 0.4041 / 0.4809\n",
"[88/100][101/391] Loss_D: 3.5016 Loss_G: 2.2815 D(x): 0.6777 D(G(z)): 0.5642 / 0.5073\n",
"[88/100][102/391] Loss_D: 2.9385 Loss_G: 2.7786 D(x): 0.6895 D(G(z)): 0.4379 / 0.4212\n",
"[88/100][103/391] Loss_D: 2.8947 Loss_G: 2.1295 D(x): 0.6183 D(G(z)): 0.4014 / 0.5127\n",
"[88/100][104/391] Loss_D: 3.3214 Loss_G: 2.7851 D(x): 0.6135 D(G(z)): 0.5039 / 0.4189\n",
"[88/100][105/391] Loss_D: 2.7190 Loss_G: 2.8076 D(x): 0.6852 D(G(z)): 0.3674 / 0.4119\n",
"[88/100][106/391] Loss_D: 2.5153 Loss_G: 2.6725 D(x): 0.7196 D(G(z)): 0.3984 / 0.4225\n",
"[88/100][107/391] Loss_D: 3.7164 Loss_G: 2.5476 D(x): 0.5260 D(G(z)): 0.4774 / 0.4489\n",
"[88/100][108/391] Loss_D: 3.0308 Loss_G: 3.0837 D(x): 0.6343 D(G(z)): 0.4478 / 0.3638\n",
"[88/100][109/391] Loss_D: 3.4944 Loss_G: 1.8024 D(x): 0.5745 D(G(z)): 0.5108 / 0.5883\n",
"[88/100][110/391] Loss_D: 3.0668 Loss_G: 2.5326 D(x): 0.7147 D(G(z)): 0.5295 / 0.4618\n",
"[88/100][111/391] Loss_D: 2.7107 Loss_G: 2.2804 D(x): 0.6808 D(G(z)): 0.3873 / 0.4851\n",
"[88/100][112/391] Loss_D: 2.9741 Loss_G: 2.8283 D(x): 0.6093 D(G(z)): 0.4248 / 0.4144\n",
"[88/100][113/391] Loss_D: 3.0972 Loss_G: 2.7351 D(x): 0.6109 D(G(z)): 0.3998 / 0.4192\n",
"[88/100][114/391] Loss_D: 2.9475 Loss_G: 2.3050 D(x): 0.6462 D(G(z)): 0.4812 / 0.4806\n",
"[88/100][115/391] Loss_D: 3.0661 Loss_G: 2.6235 D(x): 0.6497 D(G(z)): 0.4459 / 0.4389\n",
"[88/100][116/391] Loss_D: 3.4674 Loss_G: 2.0054 D(x): 0.5807 D(G(z)): 0.5184 / 0.5116\n",
"[88/100][117/391] Loss_D: 2.9985 Loss_G: 2.0659 D(x): 0.6966 D(G(z)): 0.4530 / 0.5278\n",
"[88/100][118/391] Loss_D: 2.5373 Loss_G: 2.8538 D(x): 0.7624 D(G(z)): 0.4887 / 0.4154\n",
"[88/100][119/391] Loss_D: 2.5330 Loss_G: 2.8244 D(x): 0.7768 D(G(z)): 0.4312 / 0.4134\n",
"[88/100][120/391] Loss_D: 2.5793 Loss_G: 3.1359 D(x): 0.7209 D(G(z)): 0.4174 / 0.4027\n",
"[88/100][121/391] Loss_D: 3.6358 Loss_G: 2.6066 D(x): 0.5909 D(G(z)): 0.5135 / 0.4603\n",
"[88/100][122/391] Loss_D: 2.5295 Loss_G: 3.5637 D(x): 0.7227 D(G(z)): 0.3735 / 0.3334\n",
"[88/100][123/391] Loss_D: 2.9649 Loss_G: 2.5778 D(x): 0.6157 D(G(z)): 0.3708 / 0.4475\n",
"[88/100][124/391] Loss_D: 2.7467 Loss_G: 2.9711 D(x): 0.6282 D(G(z)): 0.3563 / 0.3901\n",
"[88/100][125/391] Loss_D: 3.9871 Loss_G: 3.1296 D(x): 0.5333 D(G(z)): 0.5364 / 0.3752\n",
"[88/100][126/391] Loss_D: 2.9049 Loss_G: 2.6806 D(x): 0.5907 D(G(z)): 0.3860 / 0.4340\n",
"[88/100][127/391] Loss_D: 2.6803 Loss_G: 3.0630 D(x): 0.7241 D(G(z)): 0.3747 / 0.3902\n",
"[88/100][128/391] Loss_D: 2.1101 Loss_G: 2.9855 D(x): 0.7424 D(G(z)): 0.3602 / 0.3992\n",
"[88/100][129/391] Loss_D: 2.6992 Loss_G: 3.0225 D(x): 0.6837 D(G(z)): 0.4243 / 0.4017\n",
"[88/100][130/391] Loss_D: 3.2083 Loss_G: 2.2376 D(x): 0.6321 D(G(z)): 0.4608 / 0.5094\n",
"[88/100][131/391] Loss_D: 2.9858 Loss_G: 2.4283 D(x): 0.6676 D(G(z)): 0.4555 / 0.4697\n",
"[88/100][132/391] Loss_D: 2.9828 Loss_G: 2.4314 D(x): 0.6254 D(G(z)): 0.4383 / 0.4638\n",
"[88/100][133/391] Loss_D: 2.9985 Loss_G: 2.0369 D(x): 0.6404 D(G(z)): 0.4886 / 0.5347\n",
"[88/100][134/391] Loss_D: 3.0950 Loss_G: 3.2619 D(x): 0.5742 D(G(z)): 0.4239 / 0.3598\n",
"[88/100][135/391] Loss_D: 2.9127 Loss_G: 2.3863 D(x): 0.6312 D(G(z)): 0.4253 / 0.4635\n",
"[88/100][136/391] Loss_D: 3.3264 Loss_G: 3.1541 D(x): 0.6807 D(G(z)): 0.5423 / 0.3844\n",
"[88/100][137/391] Loss_D: 3.0097 Loss_G: 2.3201 D(x): 0.5787 D(G(z)): 0.3584 / 0.4830\n",
"[88/100][138/391] Loss_D: 2.7457 Loss_G: 2.5216 D(x): 0.6953 D(G(z)): 0.4941 / 0.4345\n",
"[88/100][139/391] Loss_D: 2.9406 Loss_G: 2.3891 D(x): 0.6435 D(G(z)): 0.3906 / 0.4788\n",
"[88/100][140/391] Loss_D: 3.3459 Loss_G: 2.6409 D(x): 0.6044 D(G(z)): 0.4979 / 0.4294\n",
"[88/100][141/391] Loss_D: 2.9299 Loss_G: 2.2932 D(x): 0.6834 D(G(z)): 0.4213 / 0.4968\n",
"[88/100][142/391] Loss_D: 2.9993 Loss_G: 2.6459 D(x): 0.7269 D(G(z)): 0.5212 / 0.4411\n",
"[88/100][143/391] Loss_D: 2.8873 Loss_G: 2.0892 D(x): 0.6250 D(G(z)): 0.3851 / 0.5145\n",
"[88/100][144/391] Loss_D: 3.1384 Loss_G: 2.5540 D(x): 0.6716 D(G(z)): 0.5284 / 0.4523\n",
"[88/100][145/391] Loss_D: 3.0436 Loss_G: 3.1029 D(x): 0.6661 D(G(z)): 0.4326 / 0.3794\n",
"[88/100][146/391] Loss_D: 2.9119 Loss_G: 3.1975 D(x): 0.6420 D(G(z)): 0.3796 / 0.3550\n",
"[88/100][147/391] Loss_D: 2.6076 Loss_G: 3.3534 D(x): 0.7791 D(G(z)): 0.4181 / 0.3529\n",
"[88/100][148/391] Loss_D: 3.4869 Loss_G: 2.8858 D(x): 0.5293 D(G(z)): 0.4629 / 0.4122\n",
"[88/100][149/391] Loss_D: 2.8906 Loss_G: 2.8080 D(x): 0.6901 D(G(z)): 0.4507 / 0.4270\n",
"[88/100][150/391] Loss_D: 3.3469 Loss_G: 2.6155 D(x): 0.7072 D(G(z)): 0.5716 / 0.4594\n",
"[88/100][151/391] Loss_D: 3.6237 Loss_G: 2.0681 D(x): 0.6283 D(G(z)): 0.4297 / 0.5413\n",
"[88/100][152/391] Loss_D: 3.3564 Loss_G: 2.9075 D(x): 0.5838 D(G(z)): 0.5105 / 0.4096\n",
"[88/100][153/391] Loss_D: 3.2188 Loss_G: 2.5257 D(x): 0.6029 D(G(z)): 0.4431 / 0.4849\n",
"[88/100][154/391] Loss_D: 3.2626 Loss_G: 2.3240 D(x): 0.5400 D(G(z)): 0.4171 / 0.4978\n",
"[88/100][155/391] Loss_D: 3.3651 Loss_G: 2.5369 D(x): 0.5975 D(G(z)): 0.4159 / 0.4516\n",
"[88/100][156/391] Loss_D: 2.9869 Loss_G: 2.5590 D(x): 0.5943 D(G(z)): 0.3744 / 0.4570\n",
"[88/100][157/391] Loss_D: 3.6233 Loss_G: 2.6560 D(x): 0.5924 D(G(z)): 0.5151 / 0.4157\n",
"[88/100][158/391] Loss_D: 3.2094 Loss_G: 3.0221 D(x): 0.5709 D(G(z)): 0.4124 / 0.3849\n",
"[88/100][159/391] Loss_D: 2.5474 Loss_G: 1.9693 D(x): 0.7565 D(G(z)): 0.4260 / 0.5427\n",
"[88/100][160/391] Loss_D: 2.8850 Loss_G: 1.9493 D(x): 0.7150 D(G(z)): 0.4900 / 0.5493\n",
"[88/100][161/391] Loss_D: 2.8603 Loss_G: 2.4506 D(x): 0.6760 D(G(z)): 0.4153 / 0.4633\n",
"[88/100][162/391] Loss_D: 2.9063 Loss_G: 2.6111 D(x): 0.6354 D(G(z)): 0.4450 / 0.4393\n",
"[88/100][163/391] Loss_D: 2.8634 Loss_G: 2.2318 D(x): 0.6529 D(G(z)): 0.4519 / 0.4950\n",
"[88/100][164/391] Loss_D: 2.4432 Loss_G: 3.1943 D(x): 0.6750 D(G(z)): 0.3500 / 0.3824\n",
"[88/100][165/391] Loss_D: 2.8595 Loss_G: 2.7609 D(x): 0.6209 D(G(z)): 0.3987 / 0.4203\n",
"[88/100][166/391] Loss_D: 3.0905 Loss_G: 2.5281 D(x): 0.6408 D(G(z)): 0.4711 / 0.4541\n",
"[88/100][167/391] Loss_D: 2.8504 Loss_G: 3.2215 D(x): 0.7013 D(G(z)): 0.4000 / 0.3565\n",
"[88/100][168/391] Loss_D: 3.0777 Loss_G: 2.6194 D(x): 0.6052 D(G(z)): 0.3675 / 0.4549\n",
"[88/100][169/391] Loss_D: 3.3837 Loss_G: 2.7942 D(x): 0.6584 D(G(z)): 0.5734 / 0.4198\n",
"[88/100][170/391] Loss_D: 3.4212 Loss_G: 2.2132 D(x): 0.7510 D(G(z)): 0.6086 / 0.4981\n",
"[88/100][171/391] Loss_D: 3.4010 Loss_G: 2.7737 D(x): 0.6304 D(G(z)): 0.5241 / 0.4312\n",
"[88/100][172/391] Loss_D: 3.1207 Loss_G: 2.3737 D(x): 0.7349 D(G(z)): 0.5519 / 0.4951\n",
"[88/100][173/391] Loss_D: 2.9022 Loss_G: 2.3895 D(x): 0.5921 D(G(z)): 0.3228 / 0.4778\n",
"[88/100][174/391] Loss_D: 3.0972 Loss_G: 3.0734 D(x): 0.5954 D(G(z)): 0.4562 / 0.3916\n",
"[88/100][175/391] Loss_D: 3.1669 Loss_G: 2.4423 D(x): 0.6566 D(G(z)): 0.4919 / 0.4834\n",
"[88/100][176/391] Loss_D: 2.9655 Loss_G: 2.6714 D(x): 0.6033 D(G(z)): 0.3264 / 0.4228\n",
"[88/100][177/391] Loss_D: 3.1158 Loss_G: 3.5796 D(x): 0.6329 D(G(z)): 0.4685 / 0.3313\n",
"[88/100][178/391] Loss_D: 3.1749 Loss_G: 2.3403 D(x): 0.5950 D(G(z)): 0.4803 / 0.4868\n",
"[88/100][179/391] Loss_D: 2.7621 Loss_G: 2.0422 D(x): 0.6949 D(G(z)): 0.4193 / 0.5401\n",
"[88/100][180/391] Loss_D: 2.6892 Loss_G: 2.0212 D(x): 0.7633 D(G(z)): 0.4499 / 0.5444\n",
"[88/100][181/391] Loss_D: 3.5513 Loss_G: 2.9390 D(x): 0.6753 D(G(z)): 0.3618 / 0.3939\n",
"[88/100][182/391] Loss_D: 2.6900 Loss_G: 2.7709 D(x): 0.6468 D(G(z)): 0.3897 / 0.4185\n",
"[88/100][183/391] Loss_D: 3.0993 Loss_G: 2.4856 D(x): 0.6721 D(G(z)): 0.5244 / 0.4598\n",
"[88/100][184/391] Loss_D: 3.0283 Loss_G: 3.7996 D(x): 0.6233 D(G(z)): 0.4073 / 0.3043\n",
"[88/100][185/391] Loss_D: 2.9653 Loss_G: 3.8930 D(x): 0.6295 D(G(z)): 0.4154 / 0.2912\n",
"[88/100][186/391] Loss_D: 2.9971 Loss_G: 3.3011 D(x): 0.6769 D(G(z)): 0.4847 / 0.3534\n",
"[88/100][187/391] Loss_D: 2.8881 Loss_G: 2.1916 D(x): 0.6706 D(G(z)): 0.4350 / 0.5022\n",
"[88/100][188/391] Loss_D: 3.5642 Loss_G: 3.1417 D(x): 0.5921 D(G(z)): 0.5603 / 0.3793\n",
"[88/100][189/391] Loss_D: 2.8689 Loss_G: 2.5676 D(x): 0.6253 D(G(z)): 0.3776 / 0.4517\n",
"[88/100][190/391] Loss_D: 3.1398 Loss_G: 2.4429 D(x): 0.6513 D(G(z)): 0.4920 / 0.4677\n",
"[88/100][191/391] Loss_D: 3.0719 Loss_G: 2.9148 D(x): 0.6741 D(G(z)): 0.4576 / 0.3943\n",
"[88/100][192/391] Loss_D: 3.1609 Loss_G: 2.7979 D(x): 0.5672 D(G(z)): 0.4383 / 0.4284\n",
"[88/100][193/391] Loss_D: 2.5416 Loss_G: 2.6886 D(x): 0.6834 D(G(z)): 0.3997 / 0.4304\n",
"[88/100][194/391] Loss_D: 3.2965 Loss_G: 2.7708 D(x): 0.5460 D(G(z)): 0.3902 / 0.4202\n",
"[88/100][195/391] Loss_D: 2.8057 Loss_G: 2.5197 D(x): 0.6783 D(G(z)): 0.4307 / 0.4422\n",
"[88/100][196/391] Loss_D: 3.4339 Loss_G: 1.7968 D(x): 0.5607 D(G(z)): 0.5345 / 0.5676\n",
"[88/100][197/391] Loss_D: 2.9132 Loss_G: 2.2329 D(x): 0.7043 D(G(z)): 0.4528 / 0.4851\n",
"[88/100][198/391] Loss_D: 3.2224 Loss_G: 2.0588 D(x): 0.5595 D(G(z)): 0.3738 / 0.5161\n",
"[88/100][199/391] Loss_D: 2.9884 Loss_G: 1.7462 D(x): 0.6401 D(G(z)): 0.4601 / 0.5863\n",
"[88/100][200/391] Loss_D: 2.6613 Loss_G: 2.9026 D(x): 0.7359 D(G(z)): 0.4535 / 0.4053\n",
"[88/100][201/391] Loss_D: 3.5694 Loss_G: 2.0888 D(x): 0.6946 D(G(z)): 0.5782 / 0.5185\n",
"[88/100][202/391] Loss_D: 2.6668 Loss_G: 2.2803 D(x): 0.6994 D(G(z)): 0.4396 / 0.5011\n",
"[88/100][203/391] Loss_D: 2.5929 Loss_G: 3.6629 D(x): 0.7258 D(G(z)): 0.3937 / 0.3055\n",
"[88/100][204/391] Loss_D: 2.2985 Loss_G: 2.2967 D(x): 0.7218 D(G(z)): 0.4068 / 0.4989\n",
"[88/100][205/391] Loss_D: 2.5001 Loss_G: 2.4790 D(x): 0.6973 D(G(z)): 0.3707 / 0.4597\n",
"[88/100][206/391] Loss_D: 3.0130 Loss_G: 2.9073 D(x): 0.6384 D(G(z)): 0.4638 / 0.4048\n",
"[88/100][207/391] Loss_D: 3.0214 Loss_G: 3.2025 D(x): 0.6128 D(G(z)): 0.4102 / 0.3730\n",
"[88/100][208/391] Loss_D: 2.8561 Loss_G: 3.1630 D(x): 0.5894 D(G(z)): 0.3837 / 0.3819\n",
"[88/100][209/391] Loss_D: 3.3133 Loss_G: 1.9995 D(x): 0.5505 D(G(z)): 0.4399 / 0.5485\n",
"[88/100][210/391] Loss_D: 2.7221 Loss_G: 1.9917 D(x): 0.6524 D(G(z)): 0.3703 / 0.5420\n",
"[88/100][211/391] Loss_D: 3.7302 Loss_G: 1.5868 D(x): 0.6125 D(G(z)): 0.3392 / 0.5986\n",
"[88/100][212/391] Loss_D: 2.9443 Loss_G: 2.1335 D(x): 0.7070 D(G(z)): 0.4812 / 0.5046\n",
"[88/100][213/391] Loss_D: 3.0689 Loss_G: 2.4270 D(x): 0.7104 D(G(z)): 0.5245 / 0.4698\n",
"[88/100][214/391] Loss_D: 2.3532 Loss_G: 2.3282 D(x): 0.6971 D(G(z)): 0.3617 / 0.4804\n",
"[88/100][215/391] Loss_D: 2.9081 Loss_G: 3.9604 D(x): 0.5867 D(G(z)): 0.3461 / 0.2825\n",
"[88/100][216/391] Loss_D: 2.6740 Loss_G: 2.3688 D(x): 0.7228 D(G(z)): 0.4306 / 0.4740\n",
"[88/100][217/391] Loss_D: 3.1548 Loss_G: 2.6026 D(x): 0.6000 D(G(z)): 0.4441 / 0.4445\n",
"[88/100][218/391] Loss_D: 2.5768 Loss_G: 2.3307 D(x): 0.7519 D(G(z)): 0.4850 / 0.4845\n",
"[88/100][219/391] Loss_D: 2.7823 Loss_G: 2.5495 D(x): 0.6992 D(G(z)): 0.4435 / 0.4582\n",
"[88/100][220/391] Loss_D: 3.2040 Loss_G: 2.3595 D(x): 0.6858 D(G(z)): 0.5346 / 0.4783\n",
"[88/100][221/391] Loss_D: 3.1674 Loss_G: 3.4162 D(x): 0.6640 D(G(z)): 0.5162 / 0.3458\n",
"[88/100][222/391] Loss_D: 3.0514 Loss_G: 2.6536 D(x): 0.6188 D(G(z)): 0.4578 / 0.4384\n",
"[88/100][223/391] Loss_D: 2.5609 Loss_G: 2.4868 D(x): 0.7333 D(G(z)): 0.3781 / 0.4546\n",
"[88/100][224/391] Loss_D: 2.9630 Loss_G: 3.4387 D(x): 0.6135 D(G(z)): 0.4293 / 0.3490\n",
"[88/100][225/391] Loss_D: 3.2990 Loss_G: 2.9620 D(x): 0.5579 D(G(z)): 0.4018 / 0.3894\n",
"[88/100][226/391] Loss_D: 2.7806 Loss_G: 2.4489 D(x): 0.6528 D(G(z)): 0.4341 / 0.4561\n",
"[88/100][227/391] Loss_D: 3.0047 Loss_G: 2.8624 D(x): 0.6254 D(G(z)): 0.4447 / 0.3970\n",
"[88/100][228/391] Loss_D: 3.5874 Loss_G: 2.2119 D(x): 0.5539 D(G(z)): 0.5153 / 0.5133\n",
"[88/100][229/391] Loss_D: 2.8490 Loss_G: 2.5416 D(x): 0.6792 D(G(z)): 0.4477 / 0.4614\n",
"[88/100][230/391] Loss_D: 2.6575 Loss_G: 3.3070 D(x): 0.7165 D(G(z)): 0.4694 / 0.3651\n",
"[88/100][231/391] Loss_D: 2.7332 Loss_G: 1.9146 D(x): 0.7031 D(G(z)): 0.4316 / 0.5517\n",
"[88/100][232/391] Loss_D: 2.8604 Loss_G: 3.5083 D(x): 0.6587 D(G(z)): 0.4135 / 0.3366\n",
"[88/100][233/391] Loss_D: 2.6995 Loss_G: 3.1690 D(x): 0.6532 D(G(z)): 0.3624 / 0.3781\n",
"[88/100][234/391] Loss_D: 3.1279 Loss_G: 2.7503 D(x): 0.6103 D(G(z)): 0.4775 / 0.4208\n",
"[88/100][235/391] Loss_D: 2.7979 Loss_G: 2.7326 D(x): 0.6188 D(G(z)): 0.4090 / 0.4249\n",
"[88/100][236/391] Loss_D: 2.8753 Loss_G: 3.1815 D(x): 0.7083 D(G(z)): 0.4404 / 0.3712\n",
"[88/100][237/391] Loss_D: 2.9215 Loss_G: 2.5077 D(x): 0.6757 D(G(z)): 0.4608 / 0.4498\n",
"[88/100][238/391] Loss_D: 3.1250 Loss_G: 2.7361 D(x): 0.6431 D(G(z)): 0.5171 / 0.4302\n",
"[88/100][239/391] Loss_D: 2.7180 Loss_G: 2.3622 D(x): 0.7313 D(G(z)): 0.4478 / 0.4880\n",
"[88/100][240/391] Loss_D: 3.8743 Loss_G: 2.1819 D(x): 0.4673 D(G(z)): 0.4276 / 0.5056\n",
"[88/100][241/391] Loss_D: 3.7894 Loss_G: 2.4681 D(x): 0.6048 D(G(z)): 0.3364 / 0.4884\n",
"[88/100][242/391] Loss_D: 3.0736 Loss_G: 2.3449 D(x): 0.7350 D(G(z)): 0.5457 / 0.4800\n",
"[88/100][243/391] Loss_D: 2.9383 Loss_G: 2.6455 D(x): 0.6188 D(G(z)): 0.4258 / 0.4292\n",
"[88/100][244/391] Loss_D: 3.1865 Loss_G: 2.3503 D(x): 0.6634 D(G(z)): 0.5183 / 0.4692\n",
"[88/100][245/391] Loss_D: 3.2795 Loss_G: 2.8987 D(x): 0.6504 D(G(z)): 0.5160 / 0.4007\n",
"[88/100][246/391] Loss_D: 2.7898 Loss_G: 3.2558 D(x): 0.6286 D(G(z)): 0.3659 / 0.3739\n",
"[88/100][247/391] Loss_D: 3.2198 Loss_G: 2.5193 D(x): 0.5734 D(G(z)): 0.4390 / 0.4339\n",
"[88/100][248/391] Loss_D: 2.7722 Loss_G: 2.0651 D(x): 0.6738 D(G(z)): 0.4214 / 0.5320\n",
"[88/100][249/391] Loss_D: 3.1523 Loss_G: 2.0997 D(x): 0.6673 D(G(z)): 0.5137 / 0.5307\n",
"[88/100][250/391] Loss_D: 2.6082 Loss_G: 1.9338 D(x): 0.6927 D(G(z)): 0.3707 / 0.5637\n",
"[88/100][251/391] Loss_D: 3.3811 Loss_G: 3.0467 D(x): 0.6786 D(G(z)): 0.5444 / 0.3902\n",
"[88/100][252/391] Loss_D: 4.1063 Loss_G: 2.7371 D(x): 0.6525 D(G(z)): 0.6332 / 0.4391\n",
"[88/100][253/391] Loss_D: 3.0761 Loss_G: 2.7582 D(x): 0.6637 D(G(z)): 0.4782 / 0.4376\n",
"[88/100][254/391] Loss_D: 2.6116 Loss_G: 2.6435 D(x): 0.5874 D(G(z)): 0.2906 / 0.4436\n",
"[88/100][255/391] Loss_D: 3.3151 Loss_G: 3.3666 D(x): 0.5696 D(G(z)): 0.4353 / 0.3606\n",
"[88/100][256/391] Loss_D: 3.3240 Loss_G: 2.2230 D(x): 0.5679 D(G(z)): 0.4175 / 0.5102\n",
"[88/100][257/391] Loss_D: 3.8344 Loss_G: 2.6252 D(x): 0.5367 D(G(z)): 0.4923 / 0.4475\n",
"[88/100][258/391] Loss_D: 2.6911 Loss_G: 2.2461 D(x): 0.7039 D(G(z)): 0.4623 / 0.5015\n",
"[88/100][259/391] Loss_D: 3.0443 Loss_G: 2.3660 D(x): 0.6447 D(G(z)): 0.4611 / 0.4905\n",
"[88/100][260/391] Loss_D: 2.5456 Loss_G: 2.3960 D(x): 0.7720 D(G(z)): 0.4460 / 0.4722\n",
"[88/100][261/391] Loss_D: 3.1408 Loss_G: 2.7714 D(x): 0.6776 D(G(z)): 0.4863 / 0.4291\n",
"[88/100][262/391] Loss_D: 3.5089 Loss_G: 2.6829 D(x): 0.5609 D(G(z)): 0.4530 / 0.4474\n",
"[88/100][263/391] Loss_D: 2.7188 Loss_G: 2.4919 D(x): 0.7336 D(G(z)): 0.3766 / 0.4596\n",
"[88/100][264/391] Loss_D: 3.3523 Loss_G: 2.6088 D(x): 0.5487 D(G(z)): 0.4727 / 0.4491\n",
"[88/100][265/391] Loss_D: 2.5904 Loss_G: 2.0232 D(x): 0.6694 D(G(z)): 0.3419 / 0.5203\n",
"[88/100][266/391] Loss_D: 3.3414 Loss_G: 2.5322 D(x): 0.6520 D(G(z)): 0.4917 / 0.4479\n",
"[88/100][267/391] Loss_D: 2.8486 Loss_G: 2.7717 D(x): 0.6597 D(G(z)): 0.4274 / 0.4075\n",
"[88/100][268/391] Loss_D: 2.5775 Loss_G: 2.1413 D(x): 0.7072 D(G(z)): 0.4586 / 0.5246\n",
"[88/100][269/391] Loss_D: 3.3472 Loss_G: 3.2413 D(x): 0.6355 D(G(z)): 0.5221 / 0.3675\n",
"[88/100][270/391] Loss_D: 2.6755 Loss_G: 2.3990 D(x): 0.6719 D(G(z)): 0.3781 / 0.4537\n",
"[88/100][271/391] Loss_D: 3.7365 Loss_G: 2.9589 D(x): 0.5783 D(G(z)): 0.3598 / 0.4098\n",
"[88/100][272/391] Loss_D: 2.8261 Loss_G: 2.2654 D(x): 0.6621 D(G(z)): 0.4179 / 0.5056\n",
"[88/100][273/391] Loss_D: 3.2519 Loss_G: 2.5939 D(x): 0.6519 D(G(z)): 0.5091 / 0.4397\n",
"[88/100][274/391] Loss_D: 3.8922 Loss_G: 3.1431 D(x): 0.5748 D(G(z)): 0.5472 / 0.3807\n",
"[88/100][275/391] Loss_D: 2.9898 Loss_G: 2.7211 D(x): 0.5940 D(G(z)): 0.3807 / 0.4325\n",
"[88/100][276/391] Loss_D: 3.0105 Loss_G: 2.8220 D(x): 0.6584 D(G(z)): 0.4278 / 0.4156\n",
"[88/100][277/391] Loss_D: 3.0019 Loss_G: 2.3182 D(x): 0.7179 D(G(z)): 0.4835 / 0.4792\n",
"[88/100][278/391] Loss_D: 3.0694 Loss_G: 2.1298 D(x): 0.6044 D(G(z)): 0.4744 / 0.5241\n",
"[88/100][279/391] Loss_D: 3.4288 Loss_G: 2.8319 D(x): 0.5954 D(G(z)): 0.4963 / 0.4132\n",
"[88/100][280/391] Loss_D: 2.7519 Loss_G: 2.6489 D(x): 0.7100 D(G(z)): 0.4237 / 0.4440\n",
"[88/100][281/391] Loss_D: 3.1406 Loss_G: 2.8647 D(x): 0.7101 D(G(z)): 0.5138 / 0.4147\n",
"[88/100][282/391] Loss_D: 3.5904 Loss_G: 3.4424 D(x): 0.5908 D(G(z)): 0.5067 / 0.3591\n",
"[88/100][283/391] Loss_D: 3.8643 Loss_G: 2.8543 D(x): 0.4741 D(G(z)): 0.4062 / 0.3993\n",
"[88/100][284/391] Loss_D: 3.7360 Loss_G: 2.6616 D(x): 0.5899 D(G(z)): 0.5652 / 0.4356\n",
"[88/100][285/391] Loss_D: 3.1921 Loss_G: 2.3206 D(x): 0.5993 D(G(z)): 0.4433 / 0.5066\n",
"[88/100][286/391] Loss_D: 2.8682 Loss_G: 3.6479 D(x): 0.6543 D(G(z)): 0.4201 / 0.3214\n",
"[88/100][287/391] Loss_D: 3.2564 Loss_G: 2.8943 D(x): 0.5977 D(G(z)): 0.4356 / 0.4031\n",
"[88/100][288/391] Loss_D: 2.8728 Loss_G: 2.5555 D(x): 0.7071 D(G(z)): 0.5256 / 0.4663\n",
"[88/100][289/391] Loss_D: 3.1925 Loss_G: 3.4295 D(x): 0.6217 D(G(z)): 0.4828 / 0.3521\n",
"[88/100][290/391] Loss_D: 3.1837 Loss_G: 3.8836 D(x): 0.6674 D(G(z)): 0.5171 / 0.3032\n",
"[88/100][291/391] Loss_D: 2.6178 Loss_G: 2.1918 D(x): 0.6786 D(G(z)): 0.3549 / 0.5078\n",
"[88/100][292/391] Loss_D: 3.0553 Loss_G: 2.4097 D(x): 0.6341 D(G(z)): 0.4209 / 0.4790\n",
"[88/100][293/391] Loss_D: 2.8360 Loss_G: 3.0772 D(x): 0.6311 D(G(z)): 0.3750 / 0.3898\n",
"[88/100][294/391] Loss_D: 2.7480 Loss_G: 2.2272 D(x): 0.6447 D(G(z)): 0.4272 / 0.5135\n",
"[88/100][295/391] Loss_D: 3.2269 Loss_G: 2.1911 D(x): 0.6216 D(G(z)): 0.4716 / 0.5027\n",
"[88/100][296/391] Loss_D: 3.5130 Loss_G: 2.4463 D(x): 0.5998 D(G(z)): 0.5412 / 0.4642\n",
"[88/100][297/391] Loss_D: 2.8168 Loss_G: 2.9135 D(x): 0.7364 D(G(z)): 0.4502 / 0.4072\n",
"[88/100][298/391] Loss_D: 3.6499 Loss_G: 2.1789 D(x): 0.6322 D(G(z)): 0.5854 / 0.5049\n",
"[88/100][299/391] Loss_D: 2.7609 Loss_G: 3.0448 D(x): 0.6873 D(G(z)): 0.4384 / 0.3901\n",
"[88/100][300/391] Loss_D: 2.9301 Loss_G: 2.1461 D(x): 0.6377 D(G(z)): 0.4011 / 0.5137\n",
"[88/100][301/391] Loss_D: 3.6536 Loss_G: 2.7420 D(x): 0.6072 D(G(z)): 0.4131 / 0.4292\n",
"[88/100][302/391] Loss_D: 2.8973 Loss_G: 2.4944 D(x): 0.6640 D(G(z)): 0.4433 / 0.4624\n",
"[88/100][303/391] Loss_D: 3.0437 Loss_G: 2.4703 D(x): 0.6591 D(G(z)): 0.4859 / 0.4598\n",
"[88/100][304/391] Loss_D: 2.3098 Loss_G: 2.4906 D(x): 0.7148 D(G(z)): 0.4270 / 0.4735\n",
"[88/100][305/391] Loss_D: 3.4507 Loss_G: 3.1717 D(x): 0.5468 D(G(z)): 0.4471 / 0.3854\n",
"[88/100][306/391] Loss_D: 2.9949 Loss_G: 2.3187 D(x): 0.6708 D(G(z)): 0.4384 / 0.4842\n",
"[88/100][307/391] Loss_D: 3.0084 Loss_G: 2.5033 D(x): 0.6031 D(G(z)): 0.4075 / 0.4516\n",
"[88/100][308/391] Loss_D: 2.3938 Loss_G: 3.2590 D(x): 0.6874 D(G(z)): 0.2940 / 0.3607\n",
"[88/100][309/391] Loss_D: 3.4616 Loss_G: 2.3992 D(x): 0.5862 D(G(z)): 0.5264 / 0.4886\n",
"[88/100][310/391] Loss_D: 3.0577 Loss_G: 2.7764 D(x): 0.7398 D(G(z)): 0.5246 / 0.4428\n",
"[88/100][311/391] Loss_D: 2.6113 Loss_G: 2.7517 D(x): 0.7249 D(G(z)): 0.3662 / 0.4258\n",
"[88/100][312/391] Loss_D: 2.5169 Loss_G: 2.9207 D(x): 0.7039 D(G(z)): 0.3660 / 0.3959\n",
"[88/100][313/391] Loss_D: 3.3550 Loss_G: 4.0037 D(x): 0.5959 D(G(z)): 0.5317 / 0.2909\n",
"[88/100][314/391] Loss_D: 2.1147 Loss_G: 3.3272 D(x): 0.7205 D(G(z)): 0.3613 / 0.3602\n",
"[88/100][315/391] Loss_D: 2.8591 Loss_G: 3.1776 D(x): 0.6860 D(G(z)): 0.4204 / 0.3883\n",
"[88/100][316/391] Loss_D: 2.6213 Loss_G: 3.9345 D(x): 0.7063 D(G(z)): 0.3792 / 0.2912\n",
"[88/100][317/391] Loss_D: 3.1167 Loss_G: 2.9970 D(x): 0.6689 D(G(z)): 0.5034 / 0.3979\n",
"[88/100][318/391] Loss_D: 2.4384 Loss_G: 3.2169 D(x): 0.6957 D(G(z)): 0.4054 / 0.3810\n",
"[88/100][319/391] Loss_D: 3.0779 Loss_G: 3.5325 D(x): 0.6031 D(G(z)): 0.4175 / 0.3356\n",
"[88/100][320/391] Loss_D: 3.3720 Loss_G: 3.1267 D(x): 0.5531 D(G(z)): 0.3907 / 0.3880\n",
"[88/100][321/391] Loss_D: 2.9563 Loss_G: 2.8849 D(x): 0.6646 D(G(z)): 0.4425 / 0.4171\n",
"[88/100][322/391] Loss_D: 2.6827 Loss_G: 1.9435 D(x): 0.6694 D(G(z)): 0.4141 / 0.5245\n",
"[88/100][323/391] Loss_D: 3.6162 Loss_G: 3.0193 D(x): 0.6081 D(G(z)): 0.5268 / 0.3926\n",
"[88/100][324/391] Loss_D: 2.6176 Loss_G: 2.1816 D(x): 0.7033 D(G(z)): 0.4493 / 0.5096\n",
"[88/100][325/391] Loss_D: 2.7843 Loss_G: 1.9022 D(x): 0.7103 D(G(z)): 0.4068 / 0.5490\n",
"[88/100][326/391] Loss_D: 3.3189 Loss_G: 2.8475 D(x): 0.5796 D(G(z)): 0.4738 / 0.4071\n",
"[88/100][327/391] Loss_D: 2.8786 Loss_G: 2.9885 D(x): 0.6497 D(G(z)): 0.4502 / 0.3808\n",
"[88/100][328/391] Loss_D: 3.4841 Loss_G: 3.3220 D(x): 0.5566 D(G(z)): 0.4979 / 0.3508\n",
"[88/100][329/391] Loss_D: 2.6705 Loss_G: 2.7366 D(x): 0.7242 D(G(z)): 0.4235 / 0.4271\n",
"[88/100][330/391] Loss_D: 2.9454 Loss_G: 2.3292 D(x): 0.7048 D(G(z)): 0.4892 / 0.4890\n",
"[88/100][331/391] Loss_D: 3.4077 Loss_G: 2.4350 D(x): 0.5868 D(G(z)): 0.4269 / 0.4873\n",
"[88/100][332/391] Loss_D: 3.3015 Loss_G: 2.5969 D(x): 0.5749 D(G(z)): 0.4696 / 0.4383\n",
"[88/100][333/391] Loss_D: 3.5628 Loss_G: 3.3957 D(x): 0.5962 D(G(z)): 0.4959 / 0.3452\n",
"[88/100][334/391] Loss_D: 2.6447 Loss_G: 2.5080 D(x): 0.6678 D(G(z)): 0.4316 / 0.4520\n",
"[88/100][335/391] Loss_D: 2.9054 Loss_G: 2.9207 D(x): 0.6851 D(G(z)): 0.4564 / 0.3990\n",
"[88/100][336/391] Loss_D: 3.0782 Loss_G: 2.4719 D(x): 0.6425 D(G(z)): 0.4256 / 0.4596\n",
"[88/100][337/391] Loss_D: 2.8472 Loss_G: 2.8912 D(x): 0.6828 D(G(z)): 0.4201 / 0.4026\n",
"[88/100][338/391] Loss_D: 3.0215 Loss_G: 2.7933 D(x): 0.6559 D(G(z)): 0.4837 / 0.4257\n",
"[88/100][339/391] Loss_D: 2.8269 Loss_G: 3.1742 D(x): 0.7400 D(G(z)): 0.5114 / 0.3773\n",
"[88/100][340/391] Loss_D: 2.9462 Loss_G: 3.2316 D(x): 0.6687 D(G(z)): 0.4465 / 0.3817\n",
"[88/100][341/391] Loss_D: 3.1979 Loss_G: 2.7931 D(x): 0.5633 D(G(z)): 0.4058 / 0.4298\n",
"[88/100][342/391] Loss_D: 3.6494 Loss_G: 2.3255 D(x): 0.4768 D(G(z)): 0.4122 / 0.4948\n",
"[88/100][343/391] Loss_D: 3.5457 Loss_G: 2.7264 D(x): 0.5706 D(G(z)): 0.4592 / 0.4252\n",
"[88/100][344/391] Loss_D: 3.6642 Loss_G: 1.8664 D(x): 0.5794 D(G(z)): 0.5219 / 0.5436\n",
"[88/100][345/391] Loss_D: 2.6845 Loss_G: 2.3811 D(x): 0.6576 D(G(z)): 0.3828 / 0.4850\n",
"[88/100][346/391] Loss_D: 3.2705 Loss_G: 1.8426 D(x): 0.6019 D(G(z)): 0.4867 / 0.5594\n",
"[88/100][347/391] Loss_D: 3.1515 Loss_G: 2.5188 D(x): 0.6366 D(G(z)): 0.4635 / 0.4479\n",
"[88/100][348/391] Loss_D: 2.5448 Loss_G: 1.8526 D(x): 0.6868 D(G(z)): 0.4200 / 0.5770\n",
"[88/100][349/391] Loss_D: 2.9398 Loss_G: 2.1016 D(x): 0.6617 D(G(z)): 0.4449 / 0.5140\n",
"[88/100][350/391] Loss_D: 2.7152 Loss_G: 2.6869 D(x): 0.7109 D(G(z)): 0.4547 / 0.4452\n",
"[88/100][351/391] Loss_D: 3.6075 Loss_G: 3.1826 D(x): 0.6483 D(G(z)): 0.5516 / 0.3806\n",
"[88/100][352/391] Loss_D: 3.0212 Loss_G: 3.1515 D(x): 0.6967 D(G(z)): 0.5200 / 0.3796\n",
"[88/100][353/391] Loss_D: 2.8329 Loss_G: 2.2712 D(x): 0.6801 D(G(z)): 0.4031 / 0.4934\n",
"[88/100][354/391] Loss_D: 2.6404 Loss_G: 1.9337 D(x): 0.6313 D(G(z)): 0.3468 / 0.5483\n",
"[88/100][355/391] Loss_D: 2.8859 Loss_G: 3.3384 D(x): 0.6596 D(G(z)): 0.4489 / 0.3569\n",
"[88/100][356/391] Loss_D: 2.7355 Loss_G: 3.1505 D(x): 0.6766 D(G(z)): 0.4279 / 0.3662\n",
"[88/100][357/391] Loss_D: 3.7136 Loss_G: 2.1861 D(x): 0.4952 D(G(z)): 0.4684 / 0.5015\n",
"[88/100][358/391] Loss_D: 3.2736 Loss_G: 1.9986 D(x): 0.5794 D(G(z)): 0.4544 / 0.5398\n",
"[88/100][359/391] Loss_D: 2.8898 Loss_G: 1.9455 D(x): 0.6225 D(G(z)): 0.4226 / 0.5480\n",
"[88/100][360/391] Loss_D: 3.3524 Loss_G: 2.0573 D(x): 0.6707 D(G(z)): 0.5391 / 0.5148\n",
"[88/100][361/391] Loss_D: 3.5729 Loss_G: 2.5449 D(x): 0.7116 D(G(z)): 0.4349 / 0.4584\n",
"[88/100][362/391] Loss_D: 2.8763 Loss_G: 2.8784 D(x): 0.6615 D(G(z)): 0.4207 / 0.4181\n",
"[88/100][363/391] Loss_D: 3.2286 Loss_G: 3.0764 D(x): 0.5694 D(G(z)): 0.4382 / 0.3972\n",
"[88/100][364/391] Loss_D: 3.1732 Loss_G: 2.7718 D(x): 0.6328 D(G(z)): 0.4616 / 0.4285\n",
"[88/100][365/391] Loss_D: 2.6079 Loss_G: 2.6515 D(x): 0.7157 D(G(z)): 0.4038 / 0.4408\n",
"[88/100][366/391] Loss_D: 2.8585 Loss_G: 1.7829 D(x): 0.5739 D(G(z)): 0.3572 / 0.5690\n",
"[88/100][367/391] Loss_D: 2.8536 Loss_G: 1.9592 D(x): 0.6758 D(G(z)): 0.4091 / 0.5412\n",
"[88/100][368/391] Loss_D: 3.2123 Loss_G: 1.9994 D(x): 0.6625 D(G(z)): 0.5301 / 0.5353\n",
"[88/100][369/391] Loss_D: 2.9311 Loss_G: 2.4461 D(x): 0.6592 D(G(z)): 0.4669 / 0.4690\n",
"[88/100][370/391] Loss_D: 2.6715 Loss_G: 2.5330 D(x): 0.7280 D(G(z)): 0.4431 / 0.4669\n",
"[88/100][371/391] Loss_D: 3.0011 Loss_G: 2.9646 D(x): 0.6857 D(G(z)): 0.5043 / 0.4091\n",
"[88/100][372/391] Loss_D: 2.8513 Loss_G: 2.2423 D(x): 0.5949 D(G(z)): 0.3709 / 0.5220\n",
"[88/100][373/391] Loss_D: 3.9373 Loss_G: 2.6487 D(x): 0.5779 D(G(z)): 0.5935 / 0.4379\n",
"[88/100][374/391] Loss_D: 2.4410 Loss_G: 2.6389 D(x): 0.7051 D(G(z)): 0.3855 / 0.4484\n",
"[88/100][375/391] Loss_D: 3.1205 Loss_G: 3.0797 D(x): 0.6570 D(G(z)): 0.4807 / 0.4042\n",
"[88/100][376/391] Loss_D: 3.0809 Loss_G: 3.6812 D(x): 0.6671 D(G(z)): 0.4484 / 0.3181\n",
"[88/100][377/391] Loss_D: 2.9573 Loss_G: 3.3410 D(x): 0.5894 D(G(z)): 0.3168 / 0.3592\n",
"[88/100][378/391] Loss_D: 3.2591 Loss_G: 1.7249 D(x): 0.6089 D(G(z)): 0.5301 / 0.5755\n",
"[88/100][379/391] Loss_D: 2.8428 Loss_G: 2.1425 D(x): 0.6610 D(G(z)): 0.4493 / 0.5233\n",
"[88/100][380/391] Loss_D: 2.6921 Loss_G: 3.0378 D(x): 0.6966 D(G(z)): 0.4176 / 0.3960\n",
"[88/100][381/391] Loss_D: 3.5769 Loss_G: 3.2316 D(x): 0.6221 D(G(z)): 0.5700 / 0.3705\n",
"[88/100][382/391] Loss_D: 3.1707 Loss_G: 3.0777 D(x): 0.6425 D(G(z)): 0.5094 / 0.3880\n",
"[88/100][383/391] Loss_D: 2.6344 Loss_G: 2.2552 D(x): 0.6399 D(G(z)): 0.3161 / 0.4959\n",
"[88/100][384/391] Loss_D: 2.6360 Loss_G: 2.5018 D(x): 0.6458 D(G(z)): 0.4181 / 0.4650\n",
"[88/100][385/391] Loss_D: 3.2748 Loss_G: 2.7515 D(x): 0.6622 D(G(z)): 0.5254 / 0.4289\n",
"[88/100][386/391] Loss_D: 3.2011 Loss_G: 2.0104 D(x): 0.6123 D(G(z)): 0.4462 / 0.5284\n",
"[88/100][387/391] Loss_D: 2.8122 Loss_G: 2.5022 D(x): 0.6605 D(G(z)): 0.3957 / 0.4559\n",
"[88/100][388/391] Loss_D: 2.7328 Loss_G: 2.3364 D(x): 0.6732 D(G(z)): 0.4546 / 0.4849\n",
"[88/100][389/391] Loss_D: 3.2061 Loss_G: 3.1179 D(x): 0.6334 D(G(z)): 0.4942 / 0.3898\n",
"[88/100][390/391] Loss_D: 2.8821 Loss_G: 2.3474 D(x): 0.6235 D(G(z)): 0.4169 / 0.4965\n",
"[88/100][391/391] Loss_D: 3.7222 Loss_G: 2.5341 D(x): 0.5909 D(G(z)): 0.3710 / 0.4581\n",
"[89/100][1/391] Loss_D: 3.6653 Loss_G: 2.3071 D(x): 0.6886 D(G(z)): 0.5351 / 0.5074\n",
"[89/100][2/391] Loss_D: 2.9445 Loss_G: 2.0054 D(x): 0.6139 D(G(z)): 0.4150 / 0.5436\n",
"[89/100][3/391] Loss_D: 2.9577 Loss_G: 1.9354 D(x): 0.7096 D(G(z)): 0.4902 / 0.5502\n",
"[89/100][4/391] Loss_D: 2.5650 Loss_G: 2.6334 D(x): 0.6443 D(G(z)): 0.3767 / 0.4312\n",
"[89/100][5/391] Loss_D: 3.1824 Loss_G: 2.3975 D(x): 0.6318 D(G(z)): 0.4625 / 0.4781\n",
"[89/100][6/391] Loss_D: 2.6913 Loss_G: 3.3833 D(x): 0.6711 D(G(z)): 0.3603 / 0.3386\n",
"[89/100][7/391] Loss_D: 3.3261 Loss_G: 2.3597 D(x): 0.6295 D(G(z)): 0.4981 / 0.4698\n",
"[89/100][8/391] Loss_D: 3.1401 Loss_G: 2.9709 D(x): 0.5236 D(G(z)): 0.3842 / 0.3889\n",
"[89/100][9/391] Loss_D: 3.2891 Loss_G: 1.9146 D(x): 0.6466 D(G(z)): 0.5367 / 0.5651\n",
"[89/100][10/391] Loss_D: 2.9085 Loss_G: 2.6244 D(x): 0.6795 D(G(z)): 0.4693 / 0.4591\n",
"[89/100][11/391] Loss_D: 3.0021 Loss_G: 3.0627 D(x): 0.6234 D(G(z)): 0.4433 / 0.3795\n",
"[89/100][12/391] Loss_D: 3.4654 Loss_G: 2.3018 D(x): 0.6512 D(G(z)): 0.5584 / 0.4886\n",
"[89/100][13/391] Loss_D: 3.6100 Loss_G: 3.2411 D(x): 0.5629 D(G(z)): 0.4938 / 0.3638\n",
"[89/100][14/391] Loss_D: 3.0311 Loss_G: 2.5859 D(x): 0.5649 D(G(z)): 0.3880 / 0.4545\n",
"[89/100][15/391] Loss_D: 3.4540 Loss_G: 3.0865 D(x): 0.5251 D(G(z)): 0.4413 / 0.3778\n",
"[89/100][16/391] Loss_D: 3.2941 Loss_G: 3.1891 D(x): 0.6764 D(G(z)): 0.4940 / 0.3826\n",
"[89/100][17/391] Loss_D: 3.1455 Loss_G: 2.4870 D(x): 0.5767 D(G(z)): 0.4429 / 0.4582\n",
"[89/100][18/391] Loss_D: 2.4755 Loss_G: 2.4401 D(x): 0.7021 D(G(z)): 0.3870 / 0.4711\n",
"[89/100][19/391] Loss_D: 2.7003 Loss_G: 2.6559 D(x): 0.6969 D(G(z)): 0.4541 / 0.4310\n",
"[89/100][20/391] Loss_D: 2.8785 Loss_G: 1.8706 D(x): 0.6289 D(G(z)): 0.3958 / 0.5474\n",
"[89/100][21/391] Loss_D: 2.0952 Loss_G: 2.5570 D(x): 0.7766 D(G(z)): 0.3214 / 0.4392\n",
"[89/100][22/391] Loss_D: 3.4806 Loss_G: 2.7414 D(x): 0.5714 D(G(z)): 0.4840 / 0.4133\n",
"[89/100][23/391] Loss_D: 2.9036 Loss_G: 2.7029 D(x): 0.6901 D(G(z)): 0.5002 / 0.4307\n",
"[89/100][24/391] Loss_D: 2.8389 Loss_G: 2.6557 D(x): 0.7758 D(G(z)): 0.5087 / 0.4320\n",
"[89/100][25/391] Loss_D: 3.0114 Loss_G: 2.7816 D(x): 0.6200 D(G(z)): 0.4150 / 0.4049\n",
"[89/100][26/391] Loss_D: 2.7125 Loss_G: 3.4493 D(x): 0.6768 D(G(z)): 0.4000 / 0.3648\n",
"[89/100][27/391] Loss_D: 3.8211 Loss_G: 2.5964 D(x): 0.5296 D(G(z)): 0.4422 / 0.4281\n",
"[89/100][28/391] Loss_D: 2.6672 Loss_G: 2.8055 D(x): 0.7120 D(G(z)): 0.4653 / 0.4170\n",
"[89/100][29/391] Loss_D: 2.9153 Loss_G: 2.2625 D(x): 0.6878 D(G(z)): 0.4554 / 0.4940\n",
"[89/100][30/391] Loss_D: 3.1863 Loss_G: 2.9123 D(x): 0.5820 D(G(z)): 0.4795 / 0.4056\n",
"[89/100][31/391] Loss_D: 3.6571 Loss_G: 2.5713 D(x): 0.6727 D(G(z)): 0.4003 / 0.4729\n",
"[89/100][32/391] Loss_D: 3.0110 Loss_G: 2.7738 D(x): 0.6833 D(G(z)): 0.4919 / 0.4330\n",
"[89/100][33/391] Loss_D: 3.1771 Loss_G: 2.2788 D(x): 0.6364 D(G(z)): 0.4466 / 0.4786\n",
"[89/100][34/391] Loss_D: 3.0508 Loss_G: 2.9585 D(x): 0.5864 D(G(z)): 0.4120 / 0.4039\n",
"[89/100][35/391] Loss_D: 3.4515 Loss_G: 2.8073 D(x): 0.5505 D(G(z)): 0.4649 / 0.4159\n",
"[89/100][36/391] Loss_D: 3.2996 Loss_G: 2.4878 D(x): 0.6296 D(G(z)): 0.4771 / 0.4627\n",
"[89/100][37/391] Loss_D: 3.0323 Loss_G: 2.6253 D(x): 0.6257 D(G(z)): 0.4337 / 0.4329\n",
"[89/100][38/391] Loss_D: 2.8148 Loss_G: 2.2478 D(x): 0.6919 D(G(z)): 0.4973 / 0.4880\n",
"[89/100][39/391] Loss_D: 2.9959 Loss_G: 2.3364 D(x): 0.6679 D(G(z)): 0.4524 / 0.4764\n",
"[89/100][40/391] Loss_D: 3.0706 Loss_G: 3.7701 D(x): 0.6604 D(G(z)): 0.4322 / 0.3011\n",
"[89/100][41/391] Loss_D: 3.1104 Loss_G: 2.5389 D(x): 0.6574 D(G(z)): 0.4799 / 0.4726\n",
"[89/100][42/391] Loss_D: 2.6201 Loss_G: 3.3497 D(x): 0.6738 D(G(z)): 0.3780 / 0.3646\n",
"[89/100][43/391] Loss_D: 3.5153 Loss_G: 2.6948 D(x): 0.6855 D(G(z)): 0.5467 / 0.4303\n",
"[89/100][44/391] Loss_D: 2.6437 Loss_G: 2.6664 D(x): 0.6586 D(G(z)): 0.4133 / 0.4350\n",
"[89/100][45/391] Loss_D: 2.9994 Loss_G: 3.0728 D(x): 0.6104 D(G(z)): 0.3907 / 0.3926\n",
"[89/100][46/391] Loss_D: 2.8846 Loss_G: 2.7900 D(x): 0.6686 D(G(z)): 0.4408 / 0.4181\n",
"[89/100][47/391] Loss_D: 2.9009 Loss_G: 2.3801 D(x): 0.6169 D(G(z)): 0.4138 / 0.4572\n",
"[89/100][48/391] Loss_D: 2.7539 Loss_G: 2.2822 D(x): 0.6465 D(G(z)): 0.3557 / 0.4623\n",
"[89/100][49/391] Loss_D: 2.8531 Loss_G: 1.7424 D(x): 0.6401 D(G(z)): 0.4516 / 0.5875\n",
"[89/100][50/391] Loss_D: 3.4071 Loss_G: 2.0312 D(x): 0.5349 D(G(z)): 0.4351 / 0.5361\n",
"[89/100][51/391] Loss_D: 3.7337 Loss_G: 2.8271 D(x): 0.6685 D(G(z)): 0.5997 / 0.4076\n",
"[89/100][52/391] Loss_D: 2.5288 Loss_G: 2.8002 D(x): 0.7386 D(G(z)): 0.4159 / 0.4343\n",
"[89/100][53/391] Loss_D: 2.4873 Loss_G: 2.5962 D(x): 0.7533 D(G(z)): 0.4258 / 0.4502\n",
"[89/100][54/391] Loss_D: 2.3871 Loss_G: 2.8004 D(x): 0.7378 D(G(z)): 0.3957 / 0.4190\n",
"[89/100][55/391] Loss_D: 3.0163 Loss_G: 2.4887 D(x): 0.6716 D(G(z)): 0.4899 / 0.4608\n",
"[89/100][56/391] Loss_D: 3.0809 Loss_G: 2.8649 D(x): 0.5961 D(G(z)): 0.3960 / 0.4109\n",
"[89/100][57/391] Loss_D: 3.1953 Loss_G: 2.5582 D(x): 0.5732 D(G(z)): 0.4049 / 0.4480\n",
"[89/100][58/391] Loss_D: 2.9947 Loss_G: 2.3274 D(x): 0.6164 D(G(z)): 0.4174 / 0.4714\n",
"[89/100][59/391] Loss_D: 3.1392 Loss_G: 2.8174 D(x): 0.5600 D(G(z)): 0.3582 / 0.4282\n",
"[89/100][60/391] Loss_D: 2.6158 Loss_G: 2.8455 D(x): 0.7476 D(G(z)): 0.4134 / 0.4224\n",
"[89/100][61/391] Loss_D: 3.6816 Loss_G: 2.0336 D(x): 0.6391 D(G(z)): 0.5187 / 0.5390\n",
"[89/100][62/391] Loss_D: 2.7989 Loss_G: 2.4867 D(x): 0.7975 D(G(z)): 0.5234 / 0.4546\n",
"[89/100][63/391] Loss_D: 2.7144 Loss_G: 3.1826 D(x): 0.7164 D(G(z)): 0.4611 / 0.3744\n",
"[89/100][64/391] Loss_D: 2.4084 Loss_G: 3.5888 D(x): 0.6662 D(G(z)): 0.3338 / 0.3230\n",
"[89/100][65/391] Loss_D: 2.7064 Loss_G: 2.8724 D(x): 0.6632 D(G(z)): 0.3730 / 0.4037\n",
"[89/100][66/391] Loss_D: 2.6173 Loss_G: 2.1511 D(x): 0.6597 D(G(z)): 0.3620 / 0.5026\n",
"[89/100][67/391] Loss_D: 3.2252 Loss_G: 2.9756 D(x): 0.6470 D(G(z)): 0.4475 / 0.3915\n",
"[89/100][68/391] Loss_D: 2.6942 Loss_G: 3.0253 D(x): 0.6255 D(G(z)): 0.3611 / 0.4016\n",
"[89/100][69/391] Loss_D: 3.0847 Loss_G: 2.1831 D(x): 0.7420 D(G(z)): 0.5706 / 0.5206\n",
"[89/100][70/391] Loss_D: 2.9208 Loss_G: 3.2141 D(x): 0.6952 D(G(z)): 0.4400 / 0.3716\n",
"[89/100][71/391] Loss_D: 3.4549 Loss_G: 2.3966 D(x): 0.6009 D(G(z)): 0.4729 / 0.4835\n",
"[89/100][72/391] Loss_D: 2.5234 Loss_G: 2.4865 D(x): 0.6844 D(G(z)): 0.3682 / 0.4672\n",
"[89/100][73/391] Loss_D: 2.1941 Loss_G: 2.3180 D(x): 0.7383 D(G(z)): 0.3037 / 0.4984\n",
"[89/100][74/391] Loss_D: 2.7261 Loss_G: 2.5623 D(x): 0.6561 D(G(z)): 0.3996 / 0.4508\n",
"[89/100][75/391] Loss_D: 3.3006 Loss_G: 2.6075 D(x): 0.6587 D(G(z)): 0.5128 / 0.4671\n",
"[89/100][76/391] Loss_D: 3.4551 Loss_G: 3.4184 D(x): 0.6040 D(G(z)): 0.5195 / 0.3404\n",
"[89/100][77/391] Loss_D: 2.9187 Loss_G: 2.2976 D(x): 0.6584 D(G(z)): 0.3789 / 0.5066\n",
"[89/100][78/391] Loss_D: 2.9022 Loss_G: 2.0575 D(x): 0.6504 D(G(z)): 0.4464 / 0.5283\n",
"[89/100][79/391] Loss_D: 2.4596 Loss_G: 3.4838 D(x): 0.7163 D(G(z)): 0.4106 / 0.3488\n",
"[89/100][80/391] Loss_D: 3.2585 Loss_G: 2.9781 D(x): 0.6313 D(G(z)): 0.4617 / 0.3960\n",
"[89/100][81/391] Loss_D: 2.4804 Loss_G: 2.7953 D(x): 0.7079 D(G(z)): 0.3820 / 0.4158\n",
"[89/100][82/391] Loss_D: 3.1355 Loss_G: 3.3281 D(x): 0.6688 D(G(z)): 0.4862 / 0.3532\n",
"[89/100][83/391] Loss_D: 2.8010 Loss_G: 2.7367 D(x): 0.7097 D(G(z)): 0.4257 / 0.4220\n",
"[89/100][84/391] Loss_D: 3.1175 Loss_G: 2.8223 D(x): 0.5897 D(G(z)): 0.4656 / 0.4042\n",
"[89/100][85/391] Loss_D: 2.8632 Loss_G: 2.7687 D(x): 0.6922 D(G(z)): 0.4671 / 0.4122\n",
"[89/100][86/391] Loss_D: 3.2269 Loss_G: 3.4115 D(x): 0.6428 D(G(z)): 0.4806 / 0.3501\n",
"[89/100][87/391] Loss_D: 2.8399 Loss_G: 3.2070 D(x): 0.7062 D(G(z)): 0.4432 / 0.3655\n",
"[89/100][88/391] Loss_D: 3.3599 Loss_G: 3.5453 D(x): 0.5944 D(G(z)): 0.5289 / 0.3371\n",
"[89/100][89/391] Loss_D: 3.4159 Loss_G: 3.7023 D(x): 0.5644 D(G(z)): 0.4701 / 0.3284\n",
"[89/100][90/391] Loss_D: 2.8887 Loss_G: 2.5940 D(x): 0.6578 D(G(z)): 0.4311 / 0.4373\n",
"[89/100][91/391] Loss_D: 3.5951 Loss_G: 2.6789 D(x): 0.5740 D(G(z)): 0.3761 / 0.4440\n",
"[89/100][92/391] Loss_D: 2.8773 Loss_G: 2.4787 D(x): 0.7113 D(G(z)): 0.4901 / 0.4751\n",
"[89/100][93/391] Loss_D: 2.8126 Loss_G: 3.1740 D(x): 0.6609 D(G(z)): 0.4105 / 0.3662\n",
"[89/100][94/391] Loss_D: 3.3080 Loss_G: 3.5952 D(x): 0.6081 D(G(z)): 0.4972 / 0.3217\n",
"[89/100][95/391] Loss_D: 3.3680 Loss_G: 2.7688 D(x): 0.6082 D(G(z)): 0.4726 / 0.4145\n",
"[89/100][96/391] Loss_D: 3.0160 Loss_G: 2.4799 D(x): 0.6974 D(G(z)): 0.4827 / 0.4718\n",
"[89/100][97/391] Loss_D: 3.0810 Loss_G: 2.7200 D(x): 0.6871 D(G(z)): 0.4587 / 0.4293\n",
"[89/100][98/391] Loss_D: 2.7731 Loss_G: 2.7979 D(x): 0.6493 D(G(z)): 0.3815 / 0.4217\n",
"[89/100][99/391] Loss_D: 2.8669 Loss_G: 3.6035 D(x): 0.6348 D(G(z)): 0.3794 / 0.3273\n",
"[89/100][100/391] Loss_D: 2.8739 Loss_G: 2.2293 D(x): 0.6811 D(G(z)): 0.4492 / 0.5210\n",
"[89/100][101/391] Loss_D: 3.1572 Loss_G: 3.0505 D(x): 0.5662 D(G(z)): 0.4038 / 0.3846\n",
"[89/100][102/391] Loss_D: 3.0622 Loss_G: 3.2998 D(x): 0.5641 D(G(z)): 0.3908 / 0.3596\n",
"[89/100][103/391] Loss_D: 3.6002 Loss_G: 2.3957 D(x): 0.5851 D(G(z)): 0.5317 / 0.4780\n",
"[89/100][104/391] Loss_D: 3.1863 Loss_G: 2.7203 D(x): 0.7345 D(G(z)): 0.5488 / 0.4328\n",
"[89/100][105/391] Loss_D: 2.9208 Loss_G: 2.6764 D(x): 0.5967 D(G(z)): 0.3466 / 0.4354\n",
"[89/100][106/391] Loss_D: 2.6804 Loss_G: 3.0148 D(x): 0.6580 D(G(z)): 0.4002 / 0.4041\n",
"[89/100][107/391] Loss_D: 2.9540 Loss_G: 2.5824 D(x): 0.6903 D(G(z)): 0.4517 / 0.4245\n",
"[89/100][108/391] Loss_D: 3.0666 Loss_G: 1.8392 D(x): 0.6622 D(G(z)): 0.4827 / 0.5655\n",
"[89/100][109/391] Loss_D: 3.0096 Loss_G: 2.8989 D(x): 0.6471 D(G(z)): 0.4717 / 0.4077\n",
"[89/100][110/391] Loss_D: 3.1262 Loss_G: 2.1188 D(x): 0.6601 D(G(z)): 0.4855 / 0.5240\n",
"[89/100][111/391] Loss_D: 3.1191 Loss_G: 2.9670 D(x): 0.6757 D(G(z)): 0.4949 / 0.4050\n",
"[89/100][112/391] Loss_D: 3.4929 Loss_G: 3.5429 D(x): 0.5878 D(G(z)): 0.5142 / 0.3382\n",
"[89/100][113/391] Loss_D: 3.3555 Loss_G: 1.9246 D(x): 0.5811 D(G(z)): 0.4545 / 0.5683\n",
"[89/100][114/391] Loss_D: 2.9727 Loss_G: 2.3410 D(x): 0.6842 D(G(z)): 0.5003 / 0.4877\n",
"[89/100][115/391] Loss_D: 2.7152 Loss_G: 2.6780 D(x): 0.7091 D(G(z)): 0.4220 / 0.4486\n",
"[89/100][116/391] Loss_D: 2.8840 Loss_G: 3.4427 D(x): 0.5728 D(G(z)): 0.3098 / 0.3450\n",
"[89/100][117/391] Loss_D: 3.3048 Loss_G: 2.4714 D(x): 0.5601 D(G(z)): 0.3994 / 0.4517\n",
"[89/100][118/391] Loss_D: 2.9100 Loss_G: 3.0025 D(x): 0.6909 D(G(z)): 0.4833 / 0.3993\n",
"[89/100][119/391] Loss_D: 3.0536 Loss_G: 2.7478 D(x): 0.6956 D(G(z)): 0.5159 / 0.4108\n",
"[89/100][120/391] Loss_D: 3.0333 Loss_G: 2.7820 D(x): 0.6042 D(G(z)): 0.4071 / 0.4227\n",
"[89/100][121/391] Loss_D: 3.5272 Loss_G: 2.4281 D(x): 0.6958 D(G(z)): 0.4768 / 0.4792\n",
"[89/100][122/391] Loss_D: 3.0491 Loss_G: 2.8267 D(x): 0.5950 D(G(z)): 0.3844 / 0.4108\n",
"[89/100][123/391] Loss_D: 2.9317 Loss_G: 2.5783 D(x): 0.7264 D(G(z)): 0.4759 / 0.4603\n",
"[89/100][124/391] Loss_D: 3.2481 Loss_G: 2.7640 D(x): 0.7150 D(G(z)): 0.5725 / 0.4248\n",
"[89/100][125/391] Loss_D: 2.9279 Loss_G: 2.8353 D(x): 0.6304 D(G(z)): 0.3939 / 0.4120\n",
"[89/100][126/391] Loss_D: 2.9861 Loss_G: 3.3869 D(x): 0.6532 D(G(z)): 0.4718 / 0.3527\n",
"[89/100][127/391] Loss_D: 3.0327 Loss_G: 2.9566 D(x): 0.6093 D(G(z)): 0.3913 / 0.4160\n",
"[89/100][128/391] Loss_D: 2.6870 Loss_G: 2.5823 D(x): 0.6308 D(G(z)): 0.3912 / 0.4481\n",
"[89/100][129/391] Loss_D: 2.5841 Loss_G: 3.0546 D(x): 0.6868 D(G(z)): 0.4013 / 0.4037\n",
"[89/100][130/391] Loss_D: 3.2472 Loss_G: 2.4048 D(x): 0.6924 D(G(z)): 0.5213 / 0.4771\n",
"[89/100][131/391] Loss_D: 2.8135 Loss_G: 2.0836 D(x): 0.6676 D(G(z)): 0.4163 / 0.5498\n",
"[89/100][132/391] Loss_D: 3.2014 Loss_G: 2.9765 D(x): 0.6900 D(G(z)): 0.5346 / 0.4073\n",
"[89/100][133/391] Loss_D: 2.4968 Loss_G: 2.6357 D(x): 0.6949 D(G(z)): 0.3805 / 0.4262\n",
"[89/100][134/391] Loss_D: 2.8304 Loss_G: 2.9417 D(x): 0.6073 D(G(z)): 0.3974 / 0.3988\n",
"[89/100][135/391] Loss_D: 3.3617 Loss_G: 2.4390 D(x): 0.5166 D(G(z)): 0.4032 / 0.4597\n",
"[89/100][136/391] Loss_D: 2.8509 Loss_G: 2.2099 D(x): 0.6633 D(G(z)): 0.4219 / 0.4998\n",
"[89/100][137/391] Loss_D: 3.2400 Loss_G: 2.8158 D(x): 0.5852 D(G(z)): 0.4683 / 0.4167\n",
"[89/100][138/391] Loss_D: 2.9507 Loss_G: 1.6059 D(x): 0.6040 D(G(z)): 0.3770 / 0.6049\n",
"[89/100][139/391] Loss_D: 3.1484 Loss_G: 3.4425 D(x): 0.7099 D(G(z)): 0.5294 / 0.3402\n",
"[89/100][140/391] Loss_D: 2.8678 Loss_G: 2.7565 D(x): 0.7345 D(G(z)): 0.5208 / 0.4279\n",
"[89/100][141/391] Loss_D: 2.6807 Loss_G: 2.2958 D(x): 0.6968 D(G(z)): 0.3733 / 0.4841\n",
"[89/100][142/391] Loss_D: 2.4377 Loss_G: 2.5013 D(x): 0.7598 D(G(z)): 0.4122 / 0.4822\n",
"[89/100][143/391] Loss_D: 2.7017 Loss_G: 3.1810 D(x): 0.6885 D(G(z)): 0.3846 / 0.3774\n",
"[89/100][144/391] Loss_D: 2.6398 Loss_G: 3.6541 D(x): 0.7040 D(G(z)): 0.4608 / 0.3278\n",
"[89/100][145/391] Loss_D: 3.0278 Loss_G: 2.6525 D(x): 0.6328 D(G(z)): 0.4282 / 0.4457\n",
"[89/100][146/391] Loss_D: 3.0794 Loss_G: 2.3825 D(x): 0.6586 D(G(z)): 0.4327 / 0.4659\n",
"[89/100][147/391] Loss_D: 2.9827 Loss_G: 3.4129 D(x): 0.6559 D(G(z)): 0.4087 / 0.3392\n",
"[89/100][148/391] Loss_D: 2.4878 Loss_G: 2.6028 D(x): 0.6548 D(G(z)): 0.3431 / 0.4468\n",
"[89/100][149/391] Loss_D: 2.9221 Loss_G: 2.6119 D(x): 0.6519 D(G(z)): 0.4018 / 0.4632\n",
"[89/100][150/391] Loss_D: 2.7878 Loss_G: 2.2370 D(x): 0.6679 D(G(z)): 0.3849 / 0.5126\n",
"[89/100][151/391] Loss_D: 3.6616 Loss_G: 2.1919 D(x): 0.7383 D(G(z)): 0.4377 / 0.4973\n",
"[89/100][152/391] Loss_D: 2.7487 Loss_G: 3.3191 D(x): 0.6115 D(G(z)): 0.3712 / 0.3504\n",
"[89/100][153/391] Loss_D: 3.1036 Loss_G: 2.3596 D(x): 0.6743 D(G(z)): 0.5003 / 0.4814\n",
"[89/100][154/391] Loss_D: 2.7881 Loss_G: 2.3353 D(x): 0.5871 D(G(z)): 0.3479 / 0.4812\n",
"[89/100][155/391] Loss_D: 2.9352 Loss_G: 2.7173 D(x): 0.7335 D(G(z)): 0.4746 / 0.4345\n",
"[89/100][156/391] Loss_D: 3.1211 Loss_G: 3.3426 D(x): 0.6281 D(G(z)): 0.4517 / 0.3509\n",
"[89/100][157/391] Loss_D: 3.1285 Loss_G: 3.2782 D(x): 0.6684 D(G(z)): 0.4839 / 0.3472\n",
"[89/100][158/391] Loss_D: 3.0320 Loss_G: 2.4155 D(x): 0.6251 D(G(z)): 0.4593 / 0.4705\n",
"[89/100][159/391] Loss_D: 3.2054 Loss_G: 2.7592 D(x): 0.5986 D(G(z)): 0.4203 / 0.4188\n",
"[89/100][160/391] Loss_D: 3.4599 Loss_G: 2.3993 D(x): 0.6210 D(G(z)): 0.5306 / 0.4713\n",
"[89/100][161/391] Loss_D: 3.3806 Loss_G: 2.8444 D(x): 0.7166 D(G(z)): 0.5651 / 0.4305\n",
"[89/100][162/391] Loss_D: 3.0358 Loss_G: 2.2398 D(x): 0.6181 D(G(z)): 0.4381 / 0.5101\n",
"[89/100][163/391] Loss_D: 3.4003 Loss_G: 3.0425 D(x): 0.5922 D(G(z)): 0.4967 / 0.3833\n",
"[89/100][164/391] Loss_D: 2.8738 Loss_G: 3.0920 D(x): 0.6243 D(G(z)): 0.3968 / 0.3950\n",
"[89/100][165/391] Loss_D: 3.0690 Loss_G: 3.2177 D(x): 0.6377 D(G(z)): 0.4627 / 0.3536\n",
"[89/100][166/391] Loss_D: 3.0599 Loss_G: 2.3111 D(x): 0.6296 D(G(z)): 0.4261 / 0.4846\n",
"[89/100][167/391] Loss_D: 2.8423 Loss_G: 3.1629 D(x): 0.6975 D(G(z)): 0.4493 / 0.3564\n",
"[89/100][168/391] Loss_D: 2.6975 Loss_G: 2.3247 D(x): 0.7311 D(G(z)): 0.4503 / 0.4832\n",
"[89/100][169/391] Loss_D: 3.2025 Loss_G: 2.4839 D(x): 0.5514 D(G(z)): 0.4222 / 0.4345\n",
"[89/100][170/391] Loss_D: 2.9109 Loss_G: 2.3224 D(x): 0.6321 D(G(z)): 0.4070 / 0.4947\n",
"[89/100][171/391] Loss_D: 3.3221 Loss_G: 1.9685 D(x): 0.6499 D(G(z)): 0.5053 / 0.5530\n",
"[89/100][172/391] Loss_D: 3.1770 Loss_G: 2.6634 D(x): 0.6642 D(G(z)): 0.5121 / 0.4274\n",
"[89/100][173/391] Loss_D: 2.9104 Loss_G: 3.4332 D(x): 0.6379 D(G(z)): 0.4019 / 0.3414\n",
"[89/100][174/391] Loss_D: 3.2152 Loss_G: 3.7149 D(x): 0.6421 D(G(z)): 0.5181 / 0.3194\n",
"[89/100][175/391] Loss_D: 2.8506 Loss_G: 2.6795 D(x): 0.6400 D(G(z)): 0.3557 / 0.4529\n",
"[89/100][176/391] Loss_D: 3.1234 Loss_G: 2.4467 D(x): 0.6218 D(G(z)): 0.4370 / 0.4594\n",
"[89/100][177/391] Loss_D: 2.7423 Loss_G: 2.6357 D(x): 0.6488 D(G(z)): 0.3673 / 0.4317\n",
"[89/100][178/391] Loss_D: 2.6491 Loss_G: 2.5916 D(x): 0.6747 D(G(z)): 0.4033 / 0.4470\n",
"[89/100][179/391] Loss_D: 3.2723 Loss_G: 1.7876 D(x): 0.6309 D(G(z)): 0.4931 / 0.5702\n",
"[89/100][180/391] Loss_D: 3.0140 Loss_G: 2.6444 D(x): 0.6243 D(G(z)): 0.4002 / 0.4332\n",
"[89/100][181/391] Loss_D: 3.6104 Loss_G: 2.7552 D(x): 0.6898 D(G(z)): 0.4933 / 0.4431\n",
"[89/100][182/391] Loss_D: 2.7758 Loss_G: 2.9257 D(x): 0.6726 D(G(z)): 0.4440 / 0.4011\n",
"[89/100][183/391] Loss_D: 2.6049 Loss_G: 2.7210 D(x): 0.6950 D(G(z)): 0.4189 / 0.4203\n",
"[89/100][184/391] Loss_D: 3.4409 Loss_G: 2.4432 D(x): 0.6413 D(G(z)): 0.5419 / 0.4732\n",
"[89/100][185/391] Loss_D: 3.1657 Loss_G: 1.6305 D(x): 0.5664 D(G(z)): 0.3595 / 0.5947\n",
"[89/100][186/391] Loss_D: 3.2539 Loss_G: 3.1105 D(x): 0.7038 D(G(z)): 0.5672 / 0.3734\n",
"[89/100][187/391] Loss_D: 2.5861 Loss_G: 3.7320 D(x): 0.7096 D(G(z)): 0.3367 / 0.2995\n",
"[89/100][188/391] Loss_D: 3.1211 Loss_G: 2.5372 D(x): 0.6075 D(G(z)): 0.4729 / 0.4485\n",
"[89/100][189/391] Loss_D: 2.9650 Loss_G: 2.2114 D(x): 0.6952 D(G(z)): 0.4617 / 0.4980\n",
"[89/100][190/391] Loss_D: 2.8282 Loss_G: 2.9281 D(x): 0.6763 D(G(z)): 0.4615 / 0.4072\n",
"[89/100][191/391] Loss_D: 2.9365 Loss_G: 2.5241 D(x): 0.6807 D(G(z)): 0.4146 / 0.4531\n",
"[89/100][192/391] Loss_D: 3.2615 Loss_G: 2.6180 D(x): 0.5954 D(G(z)): 0.4910 / 0.4362\n",
"[89/100][193/391] Loss_D: 3.1624 Loss_G: 3.0670 D(x): 0.5700 D(G(z)): 0.4115 / 0.3829\n",
"[89/100][194/391] Loss_D: 2.6622 Loss_G: 2.9273 D(x): 0.6869 D(G(z)): 0.4263 / 0.3973\n",
"[89/100][195/391] Loss_D: 2.7035 Loss_G: 2.4391 D(x): 0.6797 D(G(z)): 0.3935 / 0.4683\n",
"[89/100][196/391] Loss_D: 2.7033 Loss_G: 3.0069 D(x): 0.6315 D(G(z)): 0.4152 / 0.3905\n",
"[89/100][197/391] Loss_D: 2.7693 Loss_G: 2.0478 D(x): 0.7005 D(G(z)): 0.3989 / 0.5158\n",
"[89/100][198/391] Loss_D: 3.0844 Loss_G: 1.4830 D(x): 0.5892 D(G(z)): 0.3748 / 0.6325\n",
"[89/100][199/391] Loss_D: 2.8899 Loss_G: 2.7257 D(x): 0.6213 D(G(z)): 0.3995 / 0.4357\n",
"[89/100][200/391] Loss_D: 3.1198 Loss_G: 1.9745 D(x): 0.6875 D(G(z)): 0.5084 / 0.5460\n",
"[89/100][201/391] Loss_D: 3.2478 Loss_G: 2.9915 D(x): 0.7106 D(G(z)): 0.5424 / 0.3971\n",
"[89/100][202/391] Loss_D: 2.4281 Loss_G: 2.9537 D(x): 0.7543 D(G(z)): 0.4234 / 0.4035\n",
"[89/100][203/391] Loss_D: 3.6116 Loss_G: 3.2137 D(x): 0.5733 D(G(z)): 0.5229 / 0.3641\n",
"[89/100][204/391] Loss_D: 2.8417 Loss_G: 2.4884 D(x): 0.6492 D(G(z)): 0.4584 / 0.4635\n",
"[89/100][205/391] Loss_D: 2.6647 Loss_G: 2.9454 D(x): 0.6384 D(G(z)): 0.3707 / 0.4109\n",
"[89/100][206/391] Loss_D: 3.0249 Loss_G: 2.9917 D(x): 0.7146 D(G(z)): 0.5132 / 0.4038\n",
"[89/100][207/391] Loss_D: 3.1351 Loss_G: 2.6646 D(x): 0.6081 D(G(z)): 0.4320 / 0.4180\n",
"[89/100][208/391] Loss_D: 2.7967 Loss_G: 2.7262 D(x): 0.6505 D(G(z)): 0.4621 / 0.4269\n",
"[89/100][209/391] Loss_D: 3.0051 Loss_G: 2.7966 D(x): 0.6350 D(G(z)): 0.4406 / 0.4265\n",
"[89/100][210/391] Loss_D: 2.9396 Loss_G: 3.5726 D(x): 0.6016 D(G(z)): 0.3885 / 0.3429\n",
"[89/100][211/391] Loss_D: 3.6520 Loss_G: 3.1320 D(x): 0.6718 D(G(z)): 0.4310 / 0.3871\n",
"[89/100][212/391] Loss_D: 2.9057 Loss_G: 2.4841 D(x): 0.6247 D(G(z)): 0.4044 / 0.4718\n",
"[89/100][213/391] Loss_D: 3.0567 Loss_G: 3.3419 D(x): 0.6252 D(G(z)): 0.4187 / 0.3494\n",
"[89/100][214/391] Loss_D: 2.6295 Loss_G: 3.6554 D(x): 0.7222 D(G(z)): 0.4680 / 0.3272\n",
"[89/100][215/391] Loss_D: 3.0150 Loss_G: 2.9156 D(x): 0.6107 D(G(z)): 0.3791 / 0.4119\n",
"[89/100][216/391] Loss_D: 3.1733 Loss_G: 2.5304 D(x): 0.5539 D(G(z)): 0.4024 / 0.4565\n",
"[89/100][217/391] Loss_D: 3.0678 Loss_G: 3.2993 D(x): 0.6603 D(G(z)): 0.5120 / 0.3529\n",
"[89/100][218/391] Loss_D: 2.8224 Loss_G: 3.0122 D(x): 0.6923 D(G(z)): 0.5048 / 0.4031\n",
"[89/100][219/391] Loss_D: 2.9818 Loss_G: 2.8844 D(x): 0.7046 D(G(z)): 0.5103 / 0.4061\n",
"[89/100][220/391] Loss_D: 2.7444 Loss_G: 3.0048 D(x): 0.6668 D(G(z)): 0.3938 / 0.4044\n",
"[89/100][221/391] Loss_D: 2.8091 Loss_G: 2.8347 D(x): 0.6587 D(G(z)): 0.4359 / 0.4410\n",
"[89/100][222/391] Loss_D: 2.6683 Loss_G: 3.4523 D(x): 0.6806 D(G(z)): 0.4253 / 0.3456\n",
"[89/100][223/391] Loss_D: 3.0359 Loss_G: 2.5579 D(x): 0.6843 D(G(z)): 0.5021 / 0.4632\n",
"[89/100][224/391] Loss_D: 2.5229 Loss_G: 2.7370 D(x): 0.6731 D(G(z)): 0.3859 / 0.4240\n",
"[89/100][225/391] Loss_D: 2.8154 Loss_G: 2.3007 D(x): 0.6575 D(G(z)): 0.4128 / 0.4825\n",
"[89/100][226/391] Loss_D: 2.7688 Loss_G: 2.4328 D(x): 0.6439 D(G(z)): 0.4182 / 0.4764\n",
"[89/100][227/391] Loss_D: 2.6498 Loss_G: 2.6115 D(x): 0.6766 D(G(z)): 0.3596 / 0.4395\n",
"[89/100][228/391] Loss_D: 3.3161 Loss_G: 2.3054 D(x): 0.5272 D(G(z)): 0.2759 / 0.4892\n",
"[89/100][229/391] Loss_D: 3.0877 Loss_G: 2.5210 D(x): 0.6854 D(G(z)): 0.5027 / 0.4584\n",
"[89/100][230/391] Loss_D: 2.2466 Loss_G: 2.7856 D(x): 0.7203 D(G(z)): 0.3768 / 0.4258\n",
"[89/100][231/391] Loss_D: 2.7877 Loss_G: 2.3160 D(x): 0.6649 D(G(z)): 0.4059 / 0.4977\n",
"[89/100][232/391] Loss_D: 2.9306 Loss_G: 2.5886 D(x): 0.7275 D(G(z)): 0.4850 / 0.4568\n",
"[89/100][233/391] Loss_D: 2.9737 Loss_G: 3.0710 D(x): 0.7387 D(G(z)): 0.5343 / 0.3775\n",
"[89/100][234/391] Loss_D: 2.9085 Loss_G: 3.2363 D(x): 0.6272 D(G(z)): 0.4507 / 0.3792\n",
"[89/100][235/391] Loss_D: 2.9084 Loss_G: 2.5283 D(x): 0.6309 D(G(z)): 0.4263 / 0.4709\n",
"[89/100][236/391] Loss_D: 2.7248 Loss_G: 2.7242 D(x): 0.6677 D(G(z)): 0.3728 / 0.4272\n",
"[89/100][237/391] Loss_D: 2.8419 Loss_G: 2.6587 D(x): 0.6629 D(G(z)): 0.4169 / 0.4424\n",
"[89/100][238/391] Loss_D: 3.2631 Loss_G: 2.7484 D(x): 0.5478 D(G(z)): 0.3997 / 0.4234\n",
"[89/100][239/391] Loss_D: 3.5961 Loss_G: 2.5545 D(x): 0.5685 D(G(z)): 0.5128 / 0.4494\n",
"[89/100][240/391] Loss_D: 2.7198 Loss_G: 2.3572 D(x): 0.7489 D(G(z)): 0.4440 / 0.4939\n",
"[89/100][241/391] Loss_D: 3.7325 Loss_G: 2.0879 D(x): 0.6654 D(G(z)): 0.3859 / 0.5310\n",
"[89/100][242/391] Loss_D: 2.7738 Loss_G: 2.9588 D(x): 0.7460 D(G(z)): 0.4885 / 0.4088\n",
"[89/100][243/391] Loss_D: 2.9005 Loss_G: 2.3167 D(x): 0.6412 D(G(z)): 0.4155 / 0.4918\n",
"[89/100][244/391] Loss_D: 2.1907 Loss_G: 2.8539 D(x): 0.7073 D(G(z)): 0.3108 / 0.4057\n",
"[89/100][245/391] Loss_D: 2.5424 Loss_G: 2.5594 D(x): 0.6775 D(G(z)): 0.3504 / 0.4510\n",
"[89/100][246/391] Loss_D: 3.1472 Loss_G: 2.4258 D(x): 0.6523 D(G(z)): 0.4727 / 0.4660\n",
"[89/100][247/391] Loss_D: 2.9729 Loss_G: 3.1134 D(x): 0.7451 D(G(z)): 0.5150 / 0.3757\n",
"[89/100][248/391] Loss_D: 3.1162 Loss_G: 2.3550 D(x): 0.5629 D(G(z)): 0.3628 / 0.4728\n",
"[89/100][249/391] Loss_D: 3.0487 Loss_G: 1.8626 D(x): 0.6500 D(G(z)): 0.4888 / 0.5651\n",
"[89/100][250/391] Loss_D: 2.6129 Loss_G: 2.7780 D(x): 0.7170 D(G(z)): 0.3687 / 0.4345\n",
"[89/100][251/391] Loss_D: 2.9242 Loss_G: 2.5389 D(x): 0.6944 D(G(z)): 0.4275 / 0.4726\n",
"[89/100][252/391] Loss_D: 2.8227 Loss_G: 2.8745 D(x): 0.6274 D(G(z)): 0.3671 / 0.3900\n",
"[89/100][253/391] Loss_D: 3.3067 Loss_G: 2.7162 D(x): 0.5968 D(G(z)): 0.4799 / 0.4281\n",
"[89/100][254/391] Loss_D: 3.3758 Loss_G: 2.5022 D(x): 0.6586 D(G(z)): 0.5661 / 0.4746\n",
"[89/100][255/391] Loss_D: 3.6855 Loss_G: 3.3638 D(x): 0.5799 D(G(z)): 0.5319 / 0.3597\n",
"[89/100][256/391] Loss_D: 2.7675 Loss_G: 2.7307 D(x): 0.6807 D(G(z)): 0.4363 / 0.4396\n",
"[89/100][257/391] Loss_D: 3.0930 Loss_G: 2.9698 D(x): 0.6228 D(G(z)): 0.4142 / 0.3964\n",
"[89/100][258/391] Loss_D: 2.8418 Loss_G: 2.4077 D(x): 0.6439 D(G(z)): 0.4163 / 0.4808\n",
"[89/100][259/391] Loss_D: 3.0744 Loss_G: 2.6572 D(x): 0.6403 D(G(z)): 0.4441 / 0.4498\n",
"[89/100][260/391] Loss_D: 3.3916 Loss_G: 3.0146 D(x): 0.5570 D(G(z)): 0.4587 / 0.4064\n",
"[89/100][261/391] Loss_D: 3.2120 Loss_G: 2.3352 D(x): 0.6553 D(G(z)): 0.4849 / 0.4924\n",
"[89/100][262/391] Loss_D: 2.8918 Loss_G: 2.7713 D(x): 0.6852 D(G(z)): 0.4312 / 0.4217\n",
"[89/100][263/391] Loss_D: 3.3145 Loss_G: 2.4004 D(x): 0.6133 D(G(z)): 0.4540 / 0.4630\n",
"[89/100][264/391] Loss_D: 3.0437 Loss_G: 2.4200 D(x): 0.6151 D(G(z)): 0.4847 / 0.4730\n",
"[89/100][265/391] Loss_D: 2.8844 Loss_G: 1.9429 D(x): 0.6393 D(G(z)): 0.4251 / 0.5546\n",
"[89/100][266/391] Loss_D: 3.8614 Loss_G: 2.6902 D(x): 0.5909 D(G(z)): 0.5556 / 0.4463\n",
"[89/100][267/391] Loss_D: 2.9247 Loss_G: 2.6677 D(x): 0.6551 D(G(z)): 0.4359 / 0.4392\n",
"[89/100][268/391] Loss_D: 2.9988 Loss_G: 2.2148 D(x): 0.6532 D(G(z)): 0.5038 / 0.5187\n",
"[89/100][269/391] Loss_D: 2.7348 Loss_G: 2.7292 D(x): 0.6799 D(G(z)): 0.3718 / 0.4296\n",
"[89/100][270/391] Loss_D: 2.7525 Loss_G: 2.3492 D(x): 0.6793 D(G(z)): 0.3934 / 0.4996\n",
"[89/100][271/391] Loss_D: 3.9085 Loss_G: 2.7983 D(x): 0.5065 D(G(z)): 0.5617 / 0.4222\n",
"[89/100][272/391] Loss_D: 3.8416 Loss_G: 2.7416 D(x): 0.6507 D(G(z)): 0.5964 / 0.4108\n",
"[89/100][273/391] Loss_D: 2.5913 Loss_G: 2.5172 D(x): 0.7213 D(G(z)): 0.4335 / 0.4674\n",
"[89/100][274/391] Loss_D: 2.7937 Loss_G: 3.0798 D(x): 0.6561 D(G(z)): 0.4078 / 0.3936\n",
"[89/100][275/391] Loss_D: 3.4294 Loss_G: 2.3019 D(x): 0.5310 D(G(z)): 0.4070 / 0.4797\n",
"[89/100][276/391] Loss_D: 3.0521 Loss_G: 3.0457 D(x): 0.6311 D(G(z)): 0.4408 / 0.4033\n",
"[89/100][277/391] Loss_D: 3.7174 Loss_G: 2.8273 D(x): 0.5952 D(G(z)): 0.5473 / 0.4147\n",
"[89/100][278/391] Loss_D: 2.9874 Loss_G: 2.4210 D(x): 0.6354 D(G(z)): 0.4585 / 0.4842\n",
"[89/100][279/391] Loss_D: 3.2679 Loss_G: 2.5338 D(x): 0.6338 D(G(z)): 0.5028 / 0.4708\n",
"[89/100][280/391] Loss_D: 2.9813 Loss_G: 2.5474 D(x): 0.6472 D(G(z)): 0.4609 / 0.4691\n",
"[89/100][281/391] Loss_D: 3.0673 Loss_G: 2.8429 D(x): 0.6823 D(G(z)): 0.4821 / 0.4294\n",
"[89/100][282/391] Loss_D: 3.3923 Loss_G: 2.6797 D(x): 0.6560 D(G(z)): 0.5502 / 0.4533\n",
"[89/100][283/391] Loss_D: 3.9778 Loss_G: 2.6957 D(x): 0.4728 D(G(z)): 0.4765 / 0.4319\n",
"[89/100][284/391] Loss_D: 2.7601 Loss_G: 3.2266 D(x): 0.6093 D(G(z)): 0.3352 / 0.3692\n",
"[89/100][285/391] Loss_D: 3.7332 Loss_G: 2.7624 D(x): 0.6229 D(G(z)): 0.5937 / 0.4200\n",
"[89/100][286/391] Loss_D: 2.9568 Loss_G: 2.3427 D(x): 0.6299 D(G(z)): 0.4252 / 0.4744\n",
"[89/100][287/391] Loss_D: 3.7537 Loss_G: 2.7826 D(x): 0.5803 D(G(z)): 0.5409 / 0.4222\n",
"[89/100][288/391] Loss_D: 2.6778 Loss_G: 3.0916 D(x): 0.6686 D(G(z)): 0.4486 / 0.4088\n",
"[89/100][289/391] Loss_D: 2.6442 Loss_G: 2.9162 D(x): 0.7319 D(G(z)): 0.4254 / 0.3999\n",
"[89/100][290/391] Loss_D: 3.0510 Loss_G: 2.3859 D(x): 0.5455 D(G(z)): 0.3199 / 0.4771\n",
"[89/100][291/391] Loss_D: 2.8427 Loss_G: 3.4765 D(x): 0.6918 D(G(z)): 0.4497 / 0.3423\n",
"[89/100][292/391] Loss_D: 2.8663 Loss_G: 2.2531 D(x): 0.7058 D(G(z)): 0.4284 / 0.4998\n",
"[89/100][293/391] Loss_D: 2.7271 Loss_G: 3.3040 D(x): 0.6979 D(G(z)): 0.4380 / 0.3702\n",
"[89/100][294/391] Loss_D: 2.7100 Loss_G: 2.6242 D(x): 0.6221 D(G(z)): 0.4162 / 0.4414\n",
"[89/100][295/391] Loss_D: 2.6779 Loss_G: 3.2835 D(x): 0.7511 D(G(z)): 0.4320 / 0.3664\n",
"[89/100][296/391] Loss_D: 3.1756 Loss_G: 3.3187 D(x): 0.6337 D(G(z)): 0.4643 / 0.3565\n",
"[89/100][297/391] Loss_D: 3.0572 Loss_G: 2.8633 D(x): 0.6535 D(G(z)): 0.3936 / 0.4288\n",
"[89/100][298/391] Loss_D: 3.1385 Loss_G: 2.1056 D(x): 0.6067 D(G(z)): 0.4388 / 0.5205\n",
"[89/100][299/391] Loss_D: 2.7893 Loss_G: 2.4736 D(x): 0.6733 D(G(z)): 0.4198 / 0.4728\n",
"[89/100][300/391] Loss_D: 3.2512 Loss_G: 2.8398 D(x): 0.6567 D(G(z)): 0.5184 / 0.4287\n",
"[89/100][301/391] Loss_D: 3.7546 Loss_G: 3.5139 D(x): 0.7903 D(G(z)): 0.4345 / 0.3553\n",
"[89/100][302/391] Loss_D: 2.7924 Loss_G: 2.3853 D(x): 0.6148 D(G(z)): 0.3794 / 0.4801\n",
"[89/100][303/391] Loss_D: 3.5197 Loss_G: 2.5073 D(x): 0.6380 D(G(z)): 0.5601 / 0.4659\n",
"[89/100][304/391] Loss_D: 2.9014 Loss_G: 2.0065 D(x): 0.6090 D(G(z)): 0.4754 / 0.5358\n",
"[89/100][305/391] Loss_D: 3.0384 Loss_G: 2.7957 D(x): 0.6712 D(G(z)): 0.4743 / 0.4157\n",
"[89/100][306/391] Loss_D: 3.1358 Loss_G: 3.0218 D(x): 0.5360 D(G(z)): 0.3472 / 0.3880\n",
"[89/100][307/391] Loss_D: 3.0261 Loss_G: 2.9799 D(x): 0.7258 D(G(z)): 0.5238 / 0.4040\n",
"[89/100][308/391] Loss_D: 3.0145 Loss_G: 2.4166 D(x): 0.5919 D(G(z)): 0.3386 / 0.4670\n",
"[89/100][309/391] Loss_D: 2.9639 Loss_G: 2.3634 D(x): 0.6440 D(G(z)): 0.4544 / 0.4784\n",
"[89/100][310/391] Loss_D: 3.8458 Loss_G: 2.4979 D(x): 0.6429 D(G(z)): 0.5970 / 0.4783\n",
"[89/100][311/391] Loss_D: 3.1252 Loss_G: 2.5209 D(x): 0.6439 D(G(z)): 0.4467 / 0.4605\n",
"[89/100][312/391] Loss_D: 2.4551 Loss_G: 3.2723 D(x): 0.7270 D(G(z)): 0.3799 / 0.3640\n",
"[89/100][313/391] Loss_D: 2.7015 Loss_G: 2.6688 D(x): 0.6713 D(G(z)): 0.3941 / 0.4487\n",
"[89/100][314/391] Loss_D: 3.0953 Loss_G: 2.4652 D(x): 0.5987 D(G(z)): 0.4432 / 0.4696\n",
"[89/100][315/391] Loss_D: 2.7422 Loss_G: 2.9895 D(x): 0.6603 D(G(z)): 0.3815 / 0.4055\n",
"[89/100][316/391] Loss_D: 3.0827 Loss_G: 2.6808 D(x): 0.6437 D(G(z)): 0.4418 / 0.4405\n",
"[89/100][317/391] Loss_D: 3.4895 Loss_G: 2.3207 D(x): 0.6334 D(G(z)): 0.5436 / 0.4817\n",
"[89/100][318/391] Loss_D: 2.7056 Loss_G: 2.4384 D(x): 0.6096 D(G(z)): 0.3420 / 0.4731\n",
"[89/100][319/391] Loss_D: 2.9356 Loss_G: 2.6770 D(x): 0.6110 D(G(z)): 0.4149 / 0.4611\n",
"[89/100][320/391] Loss_D: 3.1006 Loss_G: 2.7455 D(x): 0.6283 D(G(z)): 0.4451 / 0.4321\n",
"[89/100][321/391] Loss_D: 3.2680 Loss_G: 2.3342 D(x): 0.7057 D(G(z)): 0.5341 / 0.4698\n",
"[89/100][322/391] Loss_D: 3.2614 Loss_G: 2.6034 D(x): 0.6656 D(G(z)): 0.5374 / 0.4299\n",
"[89/100][323/391] Loss_D: 3.0931 Loss_G: 2.2460 D(x): 0.6367 D(G(z)): 0.4436 / 0.5094\n",
"[89/100][324/391] Loss_D: 2.7733 Loss_G: 1.9759 D(x): 0.6117 D(G(z)): 0.3960 / 0.5475\n",
"[89/100][325/391] Loss_D: 3.1651 Loss_G: 2.9635 D(x): 0.6751 D(G(z)): 0.4836 / 0.4079\n",
"[89/100][326/391] Loss_D: 3.0863 Loss_G: 2.9709 D(x): 0.6928 D(G(z)): 0.5013 / 0.3842\n",
"[89/100][327/391] Loss_D: 2.9297 Loss_G: 2.2708 D(x): 0.6974 D(G(z)): 0.4662 / 0.5006\n",
"[89/100][328/391] Loss_D: 2.6815 Loss_G: 3.1796 D(x): 0.6872 D(G(z)): 0.4544 / 0.3822\n",
"[89/100][329/391] Loss_D: 2.6854 Loss_G: 2.7377 D(x): 0.6489 D(G(z)): 0.2759 / 0.4475\n",
"[89/100][330/391] Loss_D: 4.0084 Loss_G: 2.6419 D(x): 0.5970 D(G(z)): 0.5983 / 0.4408\n",
"[89/100][331/391] Loss_D: 3.4275 Loss_G: 3.3199 D(x): 0.6205 D(G(z)): 0.4744 / 0.3659\n",
"[89/100][332/391] Loss_D: 2.8854 Loss_G: 3.9789 D(x): 0.7023 D(G(z)): 0.4939 / 0.3025\n",
"[89/100][333/391] Loss_D: 2.8593 Loss_G: 3.5126 D(x): 0.6399 D(G(z)): 0.3310 / 0.3377\n",
"[89/100][334/391] Loss_D: 2.2983 Loss_G: 2.6310 D(x): 0.6928 D(G(z)): 0.3506 / 0.4312\n",
"[89/100][335/391] Loss_D: 3.4083 Loss_G: 3.5063 D(x): 0.5910 D(G(z)): 0.5114 / 0.3336\n",
"[89/100][336/391] Loss_D: 2.9348 Loss_G: 3.3900 D(x): 0.6606 D(G(z)): 0.4033 / 0.3559\n",
"[89/100][337/391] Loss_D: 2.9777 Loss_G: 2.3522 D(x): 0.5949 D(G(z)): 0.3577 / 0.5008\n",
"[89/100][338/391] Loss_D: 2.8704 Loss_G: 2.6308 D(x): 0.6503 D(G(z)): 0.3993 / 0.4437\n",
"[89/100][339/391] Loss_D: 3.0189 Loss_G: 3.2347 D(x): 0.6923 D(G(z)): 0.4810 / 0.3833\n",
"[89/100][340/391] Loss_D: 2.7980 Loss_G: 2.2770 D(x): 0.6348 D(G(z)): 0.3775 / 0.4876\n",
"[89/100][341/391] Loss_D: 2.8371 Loss_G: 2.9289 D(x): 0.6543 D(G(z)): 0.4319 / 0.4129\n",
"[89/100][342/391] Loss_D: 3.3391 Loss_G: 2.2777 D(x): 0.6630 D(G(z)): 0.5380 / 0.4948\n",
"[89/100][343/391] Loss_D: 2.9845 Loss_G: 2.8319 D(x): 0.6529 D(G(z)): 0.4451 / 0.4242\n",
"[89/100][344/391] Loss_D: 3.2549 Loss_G: 2.4267 D(x): 0.6319 D(G(z)): 0.5256 / 0.4797\n",
"[89/100][345/391] Loss_D: 3.1647 Loss_G: 2.6638 D(x): 0.6318 D(G(z)): 0.4572 / 0.4378\n",
"[89/100][346/391] Loss_D: 3.3013 Loss_G: 2.1547 D(x): 0.6488 D(G(z)): 0.5055 / 0.5089\n",
"[89/100][347/391] Loss_D: 3.4579 Loss_G: 2.7933 D(x): 0.6036 D(G(z)): 0.4943 / 0.4080\n",
"[89/100][348/391] Loss_D: 2.8876 Loss_G: 3.0786 D(x): 0.6076 D(G(z)): 0.3863 / 0.3845\n",
"[89/100][349/391] Loss_D: 3.3411 Loss_G: 3.5022 D(x): 0.5779 D(G(z)): 0.4660 / 0.3578\n",
"[89/100][350/391] Loss_D: 2.7706 Loss_G: 2.8250 D(x): 0.6704 D(G(z)): 0.4500 / 0.4348\n",
"[89/100][351/391] Loss_D: 3.0318 Loss_G: 2.6083 D(x): 0.6064 D(G(z)): 0.4074 / 0.4544\n",
"[89/100][352/391] Loss_D: 3.1754 Loss_G: 1.9466 D(x): 0.6593 D(G(z)): 0.5385 / 0.5651\n",
"[89/100][353/391] Loss_D: 3.1466 Loss_G: 3.1580 D(x): 0.6149 D(G(z)): 0.4192 / 0.3755\n",
"[89/100][354/391] Loss_D: 3.2173 Loss_G: 2.8372 D(x): 0.5925 D(G(z)): 0.4757 / 0.4170\n",
"[89/100][355/391] Loss_D: 2.5438 Loss_G: 3.2130 D(x): 0.7555 D(G(z)): 0.4551 / 0.3732\n",
"[89/100][356/391] Loss_D: 3.2205 Loss_G: 3.1699 D(x): 0.6780 D(G(z)): 0.5330 / 0.3690\n",
"[89/100][357/391] Loss_D: 2.8866 Loss_G: 3.6247 D(x): 0.6703 D(G(z)): 0.4774 / 0.3194\n",
"[89/100][358/391] Loss_D: 2.9365 Loss_G: 3.6811 D(x): 0.5918 D(G(z)): 0.3854 / 0.3234\n",
"[89/100][359/391] Loss_D: 3.1412 Loss_G: 2.5227 D(x): 0.5545 D(G(z)): 0.3511 / 0.4663\n",
"[89/100][360/391] Loss_D: 3.1970 Loss_G: 2.8614 D(x): 0.5859 D(G(z)): 0.4280 / 0.4186\n",
"[89/100][361/391] Loss_D: 3.5993 Loss_G: 1.6521 D(x): 0.5872 D(G(z)): 0.4887 / 0.6134\n",
"[89/100][362/391] Loss_D: 2.8407 Loss_G: 1.4899 D(x): 0.7324 D(G(z)): 0.4605 / 0.6424\n",
"[89/100][363/391] Loss_D: 2.6857 Loss_G: 3.2011 D(x): 0.7639 D(G(z)): 0.4682 / 0.3668\n",
"[89/100][364/391] Loss_D: 3.4495 Loss_G: 2.5646 D(x): 0.5707 D(G(z)): 0.4203 / 0.4435\n",
"[89/100][365/391] Loss_D: 3.0484 Loss_G: 2.6743 D(x): 0.6504 D(G(z)): 0.4682 / 0.4332\n",
"[89/100][366/391] Loss_D: 2.6295 Loss_G: 2.8282 D(x): 0.7015 D(G(z)): 0.4200 / 0.4254\n",
"[89/100][367/391] Loss_D: 2.6587 Loss_G: 3.6941 D(x): 0.7465 D(G(z)): 0.4172 / 0.3117\n",
"[89/100][368/391] Loss_D: 2.8675 Loss_G: 3.3972 D(x): 0.6176 D(G(z)): 0.3731 / 0.3437\n",
"[89/100][369/391] Loss_D: 2.7679 Loss_G: 2.8011 D(x): 0.6470 D(G(z)): 0.4374 / 0.4142\n",
"[89/100][370/391] Loss_D: 2.7893 Loss_G: 2.9860 D(x): 0.5986 D(G(z)): 0.3062 / 0.4069\n",
"[89/100][371/391] Loss_D: 2.9657 Loss_G: 3.2583 D(x): 0.6342 D(G(z)): 0.4223 / 0.3706\n",
"[89/100][372/391] Loss_D: 2.5902 Loss_G: 2.0419 D(x): 0.6559 D(G(z)): 0.3913 / 0.5380\n",
"[89/100][373/391] Loss_D: 3.2885 Loss_G: 2.6412 D(x): 0.7317 D(G(z)): 0.5389 / 0.4403\n",
"[89/100][374/391] Loss_D: 2.7284 Loss_G: 2.7394 D(x): 0.6965 D(G(z)): 0.4446 / 0.4377\n",
"[89/100][375/391] Loss_D: 3.2162 Loss_G: 2.6693 D(x): 0.7072 D(G(z)): 0.5415 / 0.4318\n",
"[89/100][376/391] Loss_D: 2.7901 Loss_G: 3.6947 D(x): 0.7095 D(G(z)): 0.4117 / 0.3269\n",
"[89/100][377/391] Loss_D: 3.4126 Loss_G: 2.8036 D(x): 0.5777 D(G(z)): 0.4617 / 0.4156\n",
"[89/100][378/391] Loss_D: 2.9223 Loss_G: 1.8509 D(x): 0.5787 D(G(z)): 0.3645 / 0.5771\n",
"[89/100][379/391] Loss_D: 2.9334 Loss_G: 2.6313 D(x): 0.5930 D(G(z)): 0.3890 / 0.4484\n",
"[89/100][380/391] Loss_D: 2.8851 Loss_G: 2.7027 D(x): 0.7400 D(G(z)): 0.4991 / 0.4372\n",
"[89/100][381/391] Loss_D: 2.9780 Loss_G: 2.7080 D(x): 0.6458 D(G(z)): 0.4463 / 0.4149\n",
"[89/100][382/391] Loss_D: 2.8337 Loss_G: 2.3396 D(x): 0.6027 D(G(z)): 0.3813 / 0.4946\n",
"[89/100][383/391] Loss_D: 2.8143 Loss_G: 2.2519 D(x): 0.6881 D(G(z)): 0.4156 / 0.5018\n",
"[89/100][384/391] Loss_D: 3.1125 Loss_G: 2.6467 D(x): 0.6489 D(G(z)): 0.5234 / 0.4217\n",
"[89/100][385/391] Loss_D: 2.8389 Loss_G: 2.7213 D(x): 0.7341 D(G(z)): 0.4998 / 0.4436\n",
"[89/100][386/391] Loss_D: 2.4539 Loss_G: 2.8500 D(x): 0.7663 D(G(z)): 0.3582 / 0.4188\n",
"[89/100][387/391] Loss_D: 3.1017 Loss_G: 2.9701 D(x): 0.7112 D(G(z)): 0.5228 / 0.3911\n",
"[89/100][388/391] Loss_D: 2.9119 Loss_G: 3.2092 D(x): 0.6089 D(G(z)): 0.3876 / 0.3785\n",
"[89/100][389/391] Loss_D: 2.7750 Loss_G: 2.7581 D(x): 0.6567 D(G(z)): 0.3641 / 0.4220\n",
"[89/100][390/391] Loss_D: 2.8751 Loss_G: 2.6358 D(x): 0.6106 D(G(z)): 0.4131 / 0.4547\n",
"[89/100][391/391] Loss_D: 3.6776 Loss_G: 2.6207 D(x): 0.6770 D(G(z)): 0.3518 / 0.4527\n",
"[90/100][1/391] Loss_D: 3.5756 Loss_G: 3.0115 D(x): 0.6701 D(G(z)): 0.4733 / 0.3923\n",
"[90/100][2/391] Loss_D: 2.6073 Loss_G: 2.6259 D(x): 0.6954 D(G(z)): 0.3996 / 0.4449\n",
"[90/100][3/391] Loss_D: 2.7042 Loss_G: 2.4369 D(x): 0.6793 D(G(z)): 0.3991 / 0.4619\n",
"[90/100][4/391] Loss_D: 2.8330 Loss_G: 3.5127 D(x): 0.6862 D(G(z)): 0.4784 / 0.3428\n",
"[90/100][5/391] Loss_D: 2.8726 Loss_G: 2.2433 D(x): 0.6368 D(G(z)): 0.4158 / 0.4759\n",
"[90/100][6/391] Loss_D: 2.8982 Loss_G: 3.1601 D(x): 0.6173 D(G(z)): 0.3789 / 0.3612\n",
"[90/100][7/391] Loss_D: 3.4197 Loss_G: 2.7024 D(x): 0.6105 D(G(z)): 0.4951 / 0.4332\n",
"[90/100][8/391] Loss_D: 2.9492 Loss_G: 3.4112 D(x): 0.6088 D(G(z)): 0.4536 / 0.3368\n",
"[90/100][9/391] Loss_D: 2.6817 Loss_G: 3.0581 D(x): 0.7295 D(G(z)): 0.4540 / 0.3907\n",
"[90/100][10/391] Loss_D: 3.0087 Loss_G: 2.2595 D(x): 0.6620 D(G(z)): 0.5203 / 0.4990\n",
"[90/100][11/391] Loss_D: 3.7330 Loss_G: 2.7405 D(x): 0.5204 D(G(z)): 0.4764 / 0.4258\n",
"[90/100][12/391] Loss_D: 2.6187 Loss_G: 2.4771 D(x): 0.6862 D(G(z)): 0.3860 / 0.4670\n",
"[90/100][13/391] Loss_D: 2.7078 Loss_G: 2.7261 D(x): 0.6874 D(G(z)): 0.4308 / 0.4195\n",
"[90/100][14/391] Loss_D: 3.4423 Loss_G: 3.1130 D(x): 0.6402 D(G(z)): 0.5388 / 0.3795\n",
"[90/100][15/391] Loss_D: 2.8249 Loss_G: 2.2612 D(x): 0.6203 D(G(z)): 0.4268 / 0.4999\n",
"[90/100][16/391] Loss_D: 2.8210 Loss_G: 3.1306 D(x): 0.7870 D(G(z)): 0.4417 / 0.3725\n",
"[90/100][17/391] Loss_D: 3.0239 Loss_G: 2.6243 D(x): 0.6587 D(G(z)): 0.4735 / 0.4438\n",
"[90/100][18/391] Loss_D: 3.2426 Loss_G: 3.2258 D(x): 0.5529 D(G(z)): 0.3961 / 0.3759\n",
"[90/100][19/391] Loss_D: 2.7313 Loss_G: 2.9246 D(x): 0.6354 D(G(z)): 0.3561 / 0.4053\n",
"[90/100][20/391] Loss_D: 3.3093 Loss_G: 2.9735 D(x): 0.6391 D(G(z)): 0.5043 / 0.4057\n",
"[90/100][21/391] Loss_D: 3.3225 Loss_G: 2.8502 D(x): 0.6365 D(G(z)): 0.5689 / 0.4135\n",
"[90/100][22/391] Loss_D: 2.6146 Loss_G: 2.3027 D(x): 0.6870 D(G(z)): 0.3935 / 0.4903\n",
"[90/100][23/391] Loss_D: 2.8280 Loss_G: 3.0150 D(x): 0.6067 D(G(z)): 0.3178 / 0.3906\n",
"[90/100][24/391] Loss_D: 3.2009 Loss_G: 3.1044 D(x): 0.5442 D(G(z)): 0.3525 / 0.3752\n",
"[90/100][25/391] Loss_D: 2.8476 Loss_G: 2.7122 D(x): 0.7319 D(G(z)): 0.4886 / 0.4492\n",
"[90/100][26/391] Loss_D: 3.2908 Loss_G: 2.2851 D(x): 0.5675 D(G(z)): 0.4491 / 0.4909\n",
"[90/100][27/391] Loss_D: 2.8632 Loss_G: 2.6717 D(x): 0.6977 D(G(z)): 0.3858 / 0.4417\n",
"[90/100][28/391] Loss_D: 3.4399 Loss_G: 2.6851 D(x): 0.5739 D(G(z)): 0.5149 / 0.4418\n",
"[90/100][29/391] Loss_D: 2.9746 Loss_G: 2.5585 D(x): 0.7249 D(G(z)): 0.5268 / 0.4608\n",
"[90/100][30/391] Loss_D: 2.9799 Loss_G: 2.8790 D(x): 0.6583 D(G(z)): 0.4778 / 0.4178\n",
"[90/100][31/391] Loss_D: 3.8541 Loss_G: 2.8970 D(x): 0.7681 D(G(z)): 0.4839 / 0.4252\n",
"[90/100][32/391] Loss_D: 2.7161 Loss_G: 2.5405 D(x): 0.6404 D(G(z)): 0.3381 / 0.4542\n",
"[90/100][33/391] Loss_D: 3.2703 Loss_G: 3.0193 D(x): 0.5955 D(G(z)): 0.4596 / 0.3950\n",
"[90/100][34/391] Loss_D: 2.5564 Loss_G: 2.3839 D(x): 0.6931 D(G(z)): 0.4336 / 0.4738\n",
"[90/100][35/391] Loss_D: 3.2560 Loss_G: 2.7727 D(x): 0.5801 D(G(z)): 0.4787 / 0.4146\n",
"[90/100][36/391] Loss_D: 3.0628 Loss_G: 2.5786 D(x): 0.6429 D(G(z)): 0.4285 / 0.4276\n",
"[90/100][37/391] Loss_D: 2.7335 Loss_G: 2.6330 D(x): 0.7086 D(G(z)): 0.4401 / 0.4356\n",
"[90/100][38/391] Loss_D: 2.1942 Loss_G: 2.6567 D(x): 0.7549 D(G(z)): 0.3867 / 0.4569\n",
"[90/100][39/391] Loss_D: 3.0966 Loss_G: 3.3218 D(x): 0.6233 D(G(z)): 0.4035 / 0.3643\n",
"[90/100][40/391] Loss_D: 2.8146 Loss_G: 1.8297 D(x): 0.7226 D(G(z)): 0.4497 / 0.5771\n",
"[90/100][41/391] Loss_D: 3.4034 Loss_G: 2.1728 D(x): 0.6349 D(G(z)): 0.5163 / 0.5009\n",
"[90/100][42/391] Loss_D: 3.2471 Loss_G: 2.7675 D(x): 0.5948 D(G(z)): 0.4558 / 0.4218\n",
"[90/100][43/391] Loss_D: 2.8560 Loss_G: 2.7914 D(x): 0.7119 D(G(z)): 0.4588 / 0.4341\n",
"[90/100][44/391] Loss_D: 2.9954 Loss_G: 2.9009 D(x): 0.5782 D(G(z)): 0.4116 / 0.4131\n",
"[90/100][45/391] Loss_D: 2.6603 Loss_G: 2.3018 D(x): 0.7009 D(G(z)): 0.4064 / 0.4903\n",
"[90/100][46/391] Loss_D: 2.7200 Loss_G: 2.0138 D(x): 0.6937 D(G(z)): 0.4220 / 0.5312\n",
"[90/100][47/391] Loss_D: 2.9418 Loss_G: 2.7195 D(x): 0.6520 D(G(z)): 0.4662 / 0.4313\n",
"[90/100][48/391] Loss_D: 3.0792 Loss_G: 3.2805 D(x): 0.5736 D(G(z)): 0.4000 / 0.3641\n",
"[90/100][49/391] Loss_D: 3.1048 Loss_G: 2.6273 D(x): 0.5750 D(G(z)): 0.4061 / 0.4302\n",
"[90/100][50/391] Loss_D: 3.0722 Loss_G: 2.5519 D(x): 0.7167 D(G(z)): 0.5520 / 0.4696\n",
"[90/100][51/391] Loss_D: 3.6808 Loss_G: 2.7608 D(x): 0.6174 D(G(z)): 0.5512 / 0.4362\n",
"[90/100][52/391] Loss_D: 2.5912 Loss_G: 2.9262 D(x): 0.6610 D(G(z)): 0.3399 / 0.4143\n",
"[90/100][53/391] Loss_D: 3.0507 Loss_G: 2.8641 D(x): 0.5868 D(G(z)): 0.4081 / 0.4141\n",
"[90/100][54/391] Loss_D: 3.3030 Loss_G: 2.0353 D(x): 0.5553 D(G(z)): 0.4284 / 0.5177\n",
"[90/100][55/391] Loss_D: 2.7211 Loss_G: 2.1213 D(x): 0.7126 D(G(z)): 0.4681 / 0.5158\n",
"[90/100][56/391] Loss_D: 2.9125 Loss_G: 2.1980 D(x): 0.6595 D(G(z)): 0.3911 / 0.4997\n",
"[90/100][57/391] Loss_D: 2.7203 Loss_G: 3.3155 D(x): 0.7671 D(G(z)): 0.4701 / 0.3544\n",
"[90/100][58/391] Loss_D: 2.9358 Loss_G: 3.0886 D(x): 0.6009 D(G(z)): 0.4168 / 0.3846\n",
"[90/100][59/391] Loss_D: 2.6466 Loss_G: 2.4701 D(x): 0.7056 D(G(z)): 0.4309 / 0.4601\n",
"[90/100][60/391] Loss_D: 2.9824 Loss_G: 2.7440 D(x): 0.6436 D(G(z)): 0.4170 / 0.4361\n",
"[90/100][61/391] Loss_D: 3.6942 Loss_G: 2.5810 D(x): 0.6800 D(G(z)): 0.4923 / 0.4611\n",
"[90/100][62/391] Loss_D: 3.0542 Loss_G: 2.7184 D(x): 0.6536 D(G(z)): 0.4744 / 0.4299\n",
"[90/100][63/391] Loss_D: 3.1957 Loss_G: 2.2655 D(x): 0.6442 D(G(z)): 0.4987 / 0.5046\n",
"[90/100][64/391] Loss_D: 2.8150 Loss_G: 3.5811 D(x): 0.6326 D(G(z)): 0.4316 / 0.3238\n",
"[90/100][65/391] Loss_D: 3.2505 Loss_G: 2.4698 D(x): 0.5964 D(G(z)): 0.4417 / 0.4678\n",
"[90/100][66/391] Loss_D: 2.9940 Loss_G: 2.7306 D(x): 0.6026 D(G(z)): 0.4111 / 0.4335\n",
"[90/100][67/391] Loss_D: 2.9380 Loss_G: 2.8102 D(x): 0.6387 D(G(z)): 0.3287 / 0.4166\n",
"[90/100][68/391] Loss_D: 2.8960 Loss_G: 2.6463 D(x): 0.6228 D(G(z)): 0.4117 / 0.4588\n",
"[90/100][69/391] Loss_D: 2.9718 Loss_G: 1.9462 D(x): 0.7472 D(G(z)): 0.5259 / 0.5393\n",
"[90/100][70/391] Loss_D: 2.9336 Loss_G: 2.4247 D(x): 0.6316 D(G(z)): 0.3856 / 0.4858\n",
"[90/100][71/391] Loss_D: 3.0987 Loss_G: 2.7405 D(x): 0.6868 D(G(z)): 0.4716 / 0.4350\n",
"[90/100][72/391] Loss_D: 2.3322 Loss_G: 2.6799 D(x): 0.7005 D(G(z)): 0.3400 / 0.4425\n",
"[90/100][73/391] Loss_D: 3.4055 Loss_G: 2.3849 D(x): 0.6544 D(G(z)): 0.5755 / 0.5001\n",
"[90/100][74/391] Loss_D: 2.8216 Loss_G: 2.9911 D(x): 0.6085 D(G(z)): 0.3658 / 0.3964\n",
"[90/100][75/391] Loss_D: 2.9381 Loss_G: 3.4335 D(x): 0.6494 D(G(z)): 0.4611 / 0.3405\n",
"[90/100][76/391] Loss_D: 2.9566 Loss_G: 2.4173 D(x): 0.5936 D(G(z)): 0.3843 / 0.4758\n",
"[90/100][77/391] Loss_D: 3.3380 Loss_G: 2.6304 D(x): 0.5805 D(G(z)): 0.4491 / 0.4386\n",
"[90/100][78/391] Loss_D: 3.5572 Loss_G: 2.6266 D(x): 0.6245 D(G(z)): 0.5878 / 0.4483\n",
"[90/100][79/391] Loss_D: 2.8969 Loss_G: 2.1995 D(x): 0.6777 D(G(z)): 0.4669 / 0.5238\n",
"[90/100][80/391] Loss_D: 3.0220 Loss_G: 3.3082 D(x): 0.6914 D(G(z)): 0.4654 / 0.3717\n",
"[90/100][81/391] Loss_D: 3.2092 Loss_G: 3.3400 D(x): 0.6538 D(G(z)): 0.5328 / 0.3657\n",
"[90/100][82/391] Loss_D: 2.9779 Loss_G: 2.7690 D(x): 0.7076 D(G(z)): 0.4870 / 0.4339\n",
"[90/100][83/391] Loss_D: 3.2618 Loss_G: 3.4228 D(x): 0.5871 D(G(z)): 0.4612 / 0.3494\n",
"[90/100][84/391] Loss_D: 2.8011 Loss_G: 2.7360 D(x): 0.6480 D(G(z)): 0.4289 / 0.4162\n",
"[90/100][85/391] Loss_D: 2.4667 Loss_G: 3.3909 D(x): 0.7448 D(G(z)): 0.3916 / 0.3538\n",
"[90/100][86/391] Loss_D: 3.4493 Loss_G: 3.2519 D(x): 0.6037 D(G(z)): 0.5015 / 0.3727\n",
"[90/100][87/391] Loss_D: 3.0739 Loss_G: 2.9534 D(x): 0.6153 D(G(z)): 0.3280 / 0.3938\n",
"[90/100][88/391] Loss_D: 3.8822 Loss_G: 2.4460 D(x): 0.5019 D(G(z)): 0.4715 / 0.4704\n",
"[90/100][89/391] Loss_D: 3.0444 Loss_G: 2.6814 D(x): 0.6419 D(G(z)): 0.4473 / 0.4502\n",
"[90/100][90/391] Loss_D: 2.6950 Loss_G: 2.8660 D(x): 0.7281 D(G(z)): 0.4446 / 0.4300\n",
"[90/100][91/391] Loss_D: 3.5458 Loss_G: 2.9236 D(x): 0.7377 D(G(z)): 0.4370 / 0.4278\n",
"[90/100][92/391] Loss_D: 2.5448 Loss_G: 2.8150 D(x): 0.7165 D(G(z)): 0.4330 / 0.4111\n",
"[90/100][93/391] Loss_D: 3.5639 Loss_G: 2.9507 D(x): 0.5959 D(G(z)): 0.5272 / 0.4018\n",
"[90/100][94/391] Loss_D: 2.8269 Loss_G: 3.0502 D(x): 0.5803 D(G(z)): 0.3568 / 0.3892\n",
"[90/100][95/391] Loss_D: 3.3423 Loss_G: 3.0116 D(x): 0.5556 D(G(z)): 0.4092 / 0.3895\n",
"[90/100][96/391] Loss_D: 2.7438 Loss_G: 2.3247 D(x): 0.7145 D(G(z)): 0.4478 / 0.4916\n",
"[90/100][97/391] Loss_D: 2.6471 Loss_G: 2.2812 D(x): 0.7343 D(G(z)): 0.3482 / 0.4843\n",
"[90/100][98/391] Loss_D: 3.0529 Loss_G: 3.3950 D(x): 0.6585 D(G(z)): 0.4766 / 0.3545\n",
"[90/100][99/391] Loss_D: 3.2719 Loss_G: 2.7507 D(x): 0.6286 D(G(z)): 0.4765 / 0.4312\n",
"[90/100][100/391] Loss_D: 3.1922 Loss_G: 2.4264 D(x): 0.5578 D(G(z)): 0.4144 / 0.4546\n",
"[90/100][101/391] Loss_D: 2.5263 Loss_G: 2.0182 D(x): 0.7067 D(G(z)): 0.3954 / 0.5269\n",
"[90/100][102/391] Loss_D: 2.9309 Loss_G: 3.0700 D(x): 0.6492 D(G(z)): 0.3808 / 0.3905\n",
"[90/100][103/391] Loss_D: 3.4428 Loss_G: 2.4410 D(x): 0.5631 D(G(z)): 0.5043 / 0.4710\n",
"[90/100][104/391] Loss_D: 3.1663 Loss_G: 2.9171 D(x): 0.7333 D(G(z)): 0.5405 / 0.4089\n",
"[90/100][105/391] Loss_D: 3.8794 Loss_G: 2.8231 D(x): 0.6424 D(G(z)): 0.5996 / 0.4323\n",
"[90/100][106/391] Loss_D: 2.5655 Loss_G: 3.0822 D(x): 0.7296 D(G(z)): 0.4083 / 0.3850\n",
"[90/100][107/391] Loss_D: 2.9750 Loss_G: 2.8412 D(x): 0.5844 D(G(z)): 0.3547 / 0.4208\n",
"[90/100][108/391] Loss_D: 3.4767 Loss_G: 2.9278 D(x): 0.6346 D(G(z)): 0.5474 / 0.3906\n",
"[90/100][109/391] Loss_D: 2.8173 Loss_G: 2.8697 D(x): 0.6619 D(G(z)): 0.4646 / 0.4218\n",
"[90/100][110/391] Loss_D: 2.9768 Loss_G: 4.0037 D(x): 0.7077 D(G(z)): 0.4817 / 0.3008\n",
"[90/100][111/391] Loss_D: 2.7155 Loss_G: 3.7661 D(x): 0.6759 D(G(z)): 0.4103 / 0.3227\n",
"[90/100][112/391] Loss_D: 3.2940 Loss_G: 3.0964 D(x): 0.5675 D(G(z)): 0.4373 / 0.3912\n",
"[90/100][113/391] Loss_D: 3.3363 Loss_G: 2.7609 D(x): 0.6149 D(G(z)): 0.5131 / 0.4179\n",
"[90/100][114/391] Loss_D: 3.4841 Loss_G: 3.4848 D(x): 0.5513 D(G(z)): 0.4925 / 0.3285\n",
"[90/100][115/391] Loss_D: 3.4322 Loss_G: 2.9282 D(x): 0.5574 D(G(z)): 0.4308 / 0.4048\n",
"[90/100][116/391] Loss_D: 2.8797 Loss_G: 2.6661 D(x): 0.6387 D(G(z)): 0.4087 / 0.4397\n",
"[90/100][117/391] Loss_D: 2.7960 Loss_G: 2.4789 D(x): 0.6651 D(G(z)): 0.3673 / 0.4495\n",
"[90/100][118/391] Loss_D: 2.4142 Loss_G: 3.3499 D(x): 0.7554 D(G(z)): 0.4661 / 0.3558\n",
"[90/100][119/391] Loss_D: 3.0462 Loss_G: 2.7961 D(x): 0.6284 D(G(z)): 0.4475 / 0.4430\n",
"[90/100][120/391] Loss_D: 2.7366 Loss_G: 2.2465 D(x): 0.6963 D(G(z)): 0.4650 / 0.5037\n",
"[90/100][121/391] Loss_D: 3.5179 Loss_G: 3.1537 D(x): 0.6633 D(G(z)): 0.4439 / 0.3859\n",
"[90/100][122/391] Loss_D: 2.4276 Loss_G: 2.4845 D(x): 0.7324 D(G(z)): 0.3718 / 0.4676\n",
"[90/100][123/391] Loss_D: 3.1908 Loss_G: 2.6177 D(x): 0.5801 D(G(z)): 0.4304 / 0.4551\n",
"[90/100][124/391] Loss_D: 3.2658 Loss_G: 2.9905 D(x): 0.6269 D(G(z)): 0.4952 / 0.4039\n",
"[90/100][125/391] Loss_D: 2.5445 Loss_G: 3.3058 D(x): 0.7537 D(G(z)): 0.3977 / 0.3548\n",
"[90/100][126/391] Loss_D: 2.6371 Loss_G: 2.9994 D(x): 0.6304 D(G(z)): 0.3437 / 0.3850\n",
"[90/100][127/391] Loss_D: 2.9260 Loss_G: 2.2432 D(x): 0.6453 D(G(z)): 0.4220 / 0.5072\n",
"[90/100][128/391] Loss_D: 3.2652 Loss_G: 2.5373 D(x): 0.5895 D(G(z)): 0.4636 / 0.4594\n",
"[90/100][129/391] Loss_D: 3.0569 Loss_G: 2.8637 D(x): 0.6024 D(G(z)): 0.4120 / 0.4201\n",
"[90/100][130/391] Loss_D: 3.1767 Loss_G: 2.5133 D(x): 0.6407 D(G(z)): 0.4575 / 0.4617\n",
"[90/100][131/391] Loss_D: 2.7548 Loss_G: 2.7138 D(x): 0.7267 D(G(z)): 0.4376 / 0.4393\n",
"[90/100][132/391] Loss_D: 3.1783 Loss_G: 2.2434 D(x): 0.6473 D(G(z)): 0.4786 / 0.4941\n",
"[90/100][133/391] Loss_D: 3.1976 Loss_G: 3.1056 D(x): 0.6863 D(G(z)): 0.5691 / 0.3884\n",
"[90/100][134/391] Loss_D: 2.4733 Loss_G: 3.1081 D(x): 0.6433 D(G(z)): 0.3620 / 0.3890\n",
"[90/100][135/391] Loss_D: 3.0873 Loss_G: 2.2357 D(x): 0.6616 D(G(z)): 0.4882 / 0.5041\n",
"[90/100][136/391] Loss_D: 3.3958 Loss_G: 1.9503 D(x): 0.5535 D(G(z)): 0.4221 / 0.5481\n",
"[90/100][137/391] Loss_D: 3.1934 Loss_G: 3.1461 D(x): 0.6462 D(G(z)): 0.4964 / 0.3812\n",
"[90/100][138/391] Loss_D: 3.1694 Loss_G: 2.7402 D(x): 0.5521 D(G(z)): 0.3734 / 0.4170\n",
"[90/100][139/391] Loss_D: 3.2806 Loss_G: 2.9774 D(x): 0.6175 D(G(z)): 0.4860 / 0.4049\n",
"[90/100][140/391] Loss_D: 2.8108 Loss_G: 2.2302 D(x): 0.6562 D(G(z)): 0.4360 / 0.4945\n",
"[90/100][141/391] Loss_D: 2.8534 Loss_G: 3.2655 D(x): 0.6645 D(G(z)): 0.3973 / 0.3589\n",
"[90/100][142/391] Loss_D: 3.0177 Loss_G: 2.0780 D(x): 0.6710 D(G(z)): 0.4881 / 0.5233\n",
"[90/100][143/391] Loss_D: 3.1935 Loss_G: 3.0799 D(x): 0.6251 D(G(z)): 0.4596 / 0.3857\n",
"[90/100][144/391] Loss_D: 2.6360 Loss_G: 3.0481 D(x): 0.6842 D(G(z)): 0.4417 / 0.3878\n",
"[90/100][145/391] Loss_D: 3.4228 Loss_G: 2.8061 D(x): 0.6110 D(G(z)): 0.4669 / 0.4187\n",
"[90/100][146/391] Loss_D: 2.7728 Loss_G: 2.9510 D(x): 0.6875 D(G(z)): 0.3840 / 0.3975\n",
"[90/100][147/391] Loss_D: 2.7580 Loss_G: 2.3514 D(x): 0.6795 D(G(z)): 0.3839 / 0.4532\n",
"[90/100][148/391] Loss_D: 1.9308 Loss_G: 2.6082 D(x): 0.7669 D(G(z)): 0.3396 / 0.4336\n",
"[90/100][149/391] Loss_D: 3.2316 Loss_G: 2.9242 D(x): 0.6780 D(G(z)): 0.5341 / 0.4295\n",
"[90/100][150/391] Loss_D: 3.0419 Loss_G: 3.1377 D(x): 0.6784 D(G(z)): 0.4877 / 0.4011\n",
"[90/100][151/391] Loss_D: 3.6672 Loss_G: 2.2989 D(x): 0.6892 D(G(z)): 0.4625 / 0.4939\n",
"[90/100][152/391] Loss_D: 3.4782 Loss_G: 1.8914 D(x): 0.6469 D(G(z)): 0.5641 / 0.5551\n",
"[90/100][153/391] Loss_D: 3.1832 Loss_G: 2.3581 D(x): 0.6184 D(G(z)): 0.4824 / 0.4839\n",
"[90/100][154/391] Loss_D: 2.4024 Loss_G: 2.9730 D(x): 0.6863 D(G(z)): 0.3745 / 0.4130\n",
"[90/100][155/391] Loss_D: 2.9598 Loss_G: 3.2315 D(x): 0.6404 D(G(z)): 0.3497 / 0.3633\n",
"[90/100][156/391] Loss_D: 3.3173 Loss_G: 3.2539 D(x): 0.5526 D(G(z)): 0.4044 / 0.3809\n",
"[90/100][157/391] Loss_D: 2.9973 Loss_G: 2.5043 D(x): 0.6986 D(G(z)): 0.4548 / 0.4565\n",
"[90/100][158/391] Loss_D: 3.1592 Loss_G: 2.4668 D(x): 0.6118 D(G(z)): 0.4792 / 0.4683\n",
"[90/100][159/391] Loss_D: 2.7880 Loss_G: 2.1379 D(x): 0.6752 D(G(z)): 0.3924 / 0.5080\n",
"[90/100][160/391] Loss_D: 2.8688 Loss_G: 3.1214 D(x): 0.7257 D(G(z)): 0.5063 / 0.3990\n",
"[90/100][161/391] Loss_D: 3.1479 Loss_G: 2.1736 D(x): 0.7066 D(G(z)): 0.4900 / 0.5157\n",
"[90/100][162/391] Loss_D: 2.9899 Loss_G: 2.3167 D(x): 0.6647 D(G(z)): 0.4618 / 0.4798\n",
"[90/100][163/391] Loss_D: 3.1590 Loss_G: 2.7954 D(x): 0.6098 D(G(z)): 0.4614 / 0.3991\n",
"[90/100][164/391] Loss_D: 2.6494 Loss_G: 2.0983 D(x): 0.6604 D(G(z)): 0.3917 / 0.5125\n",
"[90/100][165/391] Loss_D: 3.1303 Loss_G: 2.9287 D(x): 0.6574 D(G(z)): 0.4916 / 0.4004\n",
"[90/100][166/391] Loss_D: 3.0168 Loss_G: 2.7078 D(x): 0.6696 D(G(z)): 0.4869 / 0.4384\n",
"[90/100][167/391] Loss_D: 3.0428 Loss_G: 2.5776 D(x): 0.6218 D(G(z)): 0.4099 / 0.4484\n",
"[90/100][168/391] Loss_D: 3.1403 Loss_G: 3.0613 D(x): 0.5993 D(G(z)): 0.4265 / 0.3972\n",
"[90/100][169/391] Loss_D: 2.5396 Loss_G: 2.1279 D(x): 0.6355 D(G(z)): 0.3519 / 0.5256\n",
"[90/100][170/391] Loss_D: 2.9332 Loss_G: 1.9582 D(x): 0.6396 D(G(z)): 0.4165 / 0.5436\n",
"[90/100][171/391] Loss_D: 2.5389 Loss_G: 1.9773 D(x): 0.7687 D(G(z)): 0.3864 / 0.5457\n",
"[90/100][172/391] Loss_D: 3.1926 Loss_G: 2.5896 D(x): 0.6819 D(G(z)): 0.5265 / 0.4452\n",
"[90/100][173/391] Loss_D: 2.9400 Loss_G: 3.0018 D(x): 0.7028 D(G(z)): 0.4810 / 0.3988\n",
"[90/100][174/391] Loss_D: 3.2433 Loss_G: 3.0423 D(x): 0.6459 D(G(z)): 0.5387 / 0.4006\n",
"[90/100][175/391] Loss_D: 2.9256 Loss_G: 2.3004 D(x): 0.5910 D(G(z)): 0.3484 / 0.5094\n",
"[90/100][176/391] Loss_D: 2.9226 Loss_G: 3.1656 D(x): 0.6419 D(G(z)): 0.3817 / 0.3833\n",
"[90/100][177/391] Loss_D: 2.7909 Loss_G: 2.4308 D(x): 0.6837 D(G(z)): 0.3588 / 0.4765\n",
"[90/100][178/391] Loss_D: 2.4494 Loss_G: 2.8770 D(x): 0.7059 D(G(z)): 0.3549 / 0.4013\n",
"[90/100][179/391] Loss_D: 3.1074 Loss_G: 2.6057 D(x): 0.6491 D(G(z)): 0.4517 / 0.4542\n",
"[90/100][180/391] Loss_D: 2.9381 Loss_G: 2.3329 D(x): 0.7215 D(G(z)): 0.4935 / 0.4745\n",
"[90/100][181/391] Loss_D: 3.5602 Loss_G: 2.9367 D(x): 0.6245 D(G(z)): 0.4799 / 0.4102\n",
"[90/100][182/391] Loss_D: 2.5501 Loss_G: 3.1670 D(x): 0.6533 D(G(z)): 0.3577 / 0.3859\n",
"[90/100][183/391] Loss_D: 2.4189 Loss_G: 2.2765 D(x): 0.6458 D(G(z)): 0.3204 / 0.4865\n",
"[90/100][184/391] Loss_D: 3.0637 Loss_G: 2.2141 D(x): 0.7321 D(G(z)): 0.5286 / 0.5001\n",
"[90/100][185/391] Loss_D: 2.7533 Loss_G: 2.7885 D(x): 0.6421 D(G(z)): 0.3737 / 0.4235\n",
"[90/100][186/391] Loss_D: 2.6193 Loss_G: 2.0715 D(x): 0.6671 D(G(z)): 0.3732 / 0.5339\n",
"[90/100][187/391] Loss_D: 2.6199 Loss_G: 2.7128 D(x): 0.6901 D(G(z)): 0.3876 / 0.4336\n",
"[90/100][188/391] Loss_D: 2.5703 Loss_G: 2.8697 D(x): 0.6883 D(G(z)): 0.4469 / 0.4210\n",
"[90/100][189/391] Loss_D: 2.7750 Loss_G: 2.0285 D(x): 0.6462 D(G(z)): 0.3866 / 0.5429\n",
"[90/100][190/391] Loss_D: 2.7078 Loss_G: 2.0582 D(x): 0.6584 D(G(z)): 0.3945 / 0.5517\n",
"[90/100][191/391] Loss_D: 2.8701 Loss_G: 2.4300 D(x): 0.7687 D(G(z)): 0.4712 / 0.4996\n",
"[90/100][192/391] Loss_D: 2.8811 Loss_G: 2.4690 D(x): 0.6110 D(G(z)): 0.4287 / 0.4715\n",
"[90/100][193/391] Loss_D: 2.9294 Loss_G: 3.1203 D(x): 0.6923 D(G(z)): 0.5078 / 0.3959\n",
"[90/100][194/391] Loss_D: 2.6527 Loss_G: 2.5692 D(x): 0.6408 D(G(z)): 0.3446 / 0.4441\n",
"[90/100][195/391] Loss_D: 2.8217 Loss_G: 3.0667 D(x): 0.6573 D(G(z)): 0.3698 / 0.3806\n",
"[90/100][196/391] Loss_D: 2.3757 Loss_G: 2.9096 D(x): 0.7637 D(G(z)): 0.4169 / 0.4021\n",
"[90/100][197/391] Loss_D: 2.9491 Loss_G: 2.1342 D(x): 0.7165 D(G(z)): 0.4590 / 0.5118\n",
"[90/100][198/391] Loss_D: 2.7684 Loss_G: 3.6352 D(x): 0.6318 D(G(z)): 0.2874 / 0.3304\n",
"[90/100][199/391] Loss_D: 3.3961 Loss_G: 2.9683 D(x): 0.5716 D(G(z)): 0.4536 / 0.4145\n",
"[90/100][200/391] Loss_D: 3.4354 Loss_G: 2.7127 D(x): 0.6651 D(G(z)): 0.5765 / 0.4400\n",
"[90/100][201/391] Loss_D: 3.4589 Loss_G: 2.6943 D(x): 0.5944 D(G(z)): 0.4916 / 0.4386\n",
"[90/100][202/391] Loss_D: 2.6784 Loss_G: 2.4092 D(x): 0.7344 D(G(z)): 0.4666 / 0.4859\n",
"[90/100][203/391] Loss_D: 3.0718 Loss_G: 2.9087 D(x): 0.6459 D(G(z)): 0.4950 / 0.4006\n",
"[90/100][204/391] Loss_D: 2.1782 Loss_G: 3.5607 D(x): 0.7807 D(G(z)): 0.4050 / 0.3304\n",
"[90/100][205/391] Loss_D: 2.8227 Loss_G: 2.7285 D(x): 0.6617 D(G(z)): 0.4549 / 0.4411\n",
"[90/100][206/391] Loss_D: 2.7890 Loss_G: 2.3132 D(x): 0.6531 D(G(z)): 0.4322 / 0.4937\n",
"[90/100][207/391] Loss_D: 3.5127 Loss_G: 2.6306 D(x): 0.5492 D(G(z)): 0.4806 / 0.4342\n",
"[90/100][208/391] Loss_D: 3.0024 Loss_G: 2.4976 D(x): 0.5856 D(G(z)): 0.3900 / 0.4593\n",
"[90/100][209/391] Loss_D: 2.7057 Loss_G: 2.6839 D(x): 0.6211 D(G(z)): 0.3131 / 0.4394\n",
"[90/100][210/391] Loss_D: 3.1484 Loss_G: 2.5338 D(x): 0.6456 D(G(z)): 0.4856 / 0.4769\n",
"[90/100][211/391] Loss_D: 3.5746 Loss_G: 2.2706 D(x): 0.6886 D(G(z)): 0.4638 / 0.5109\n",
"[90/100][212/391] Loss_D: 2.6710 Loss_G: 1.9126 D(x): 0.6959 D(G(z)): 0.3584 / 0.5507\n",
"[90/100][213/391] Loss_D: 3.4428 Loss_G: 2.6802 D(x): 0.6722 D(G(z)): 0.5636 / 0.4378\n",
"[90/100][214/391] Loss_D: 2.5881 Loss_G: 3.5118 D(x): 0.7444 D(G(z)): 0.4726 / 0.3335\n",
"[90/100][215/391] Loss_D: 3.3517 Loss_G: 2.4488 D(x): 0.6147 D(G(z)): 0.4892 / 0.4761\n",
"[90/100][216/391] Loss_D: 2.9481 Loss_G: 2.5240 D(x): 0.5553 D(G(z)): 0.3342 / 0.4580\n",
"[90/100][217/391] Loss_D: 2.9955 Loss_G: 2.5183 D(x): 0.6252 D(G(z)): 0.4330 / 0.4552\n",
"[90/100][218/391] Loss_D: 2.4096 Loss_G: 2.3989 D(x): 0.6815 D(G(z)): 0.3197 / 0.4684\n",
"[90/100][219/391] Loss_D: 2.5755 Loss_G: 2.3404 D(x): 0.7304 D(G(z)): 0.4298 / 0.4849\n",
"[90/100][220/391] Loss_D: 3.0267 Loss_G: 2.4898 D(x): 0.5847 D(G(z)): 0.3637 / 0.4678\n",
"[90/100][221/391] Loss_D: 2.6433 Loss_G: 2.1656 D(x): 0.6794 D(G(z)): 0.4089 / 0.5065\n",
"[90/100][222/391] Loss_D: 2.8391 Loss_G: 2.8272 D(x): 0.7315 D(G(z)): 0.4923 / 0.4183\n",
"[90/100][223/391] Loss_D: 3.1355 Loss_G: 1.7700 D(x): 0.5878 D(G(z)): 0.4006 / 0.5648\n",
"[90/100][224/391] Loss_D: 3.0353 Loss_G: 2.0671 D(x): 0.7273 D(G(z)): 0.5450 / 0.5504\n",
"[90/100][225/391] Loss_D: 2.8485 Loss_G: 2.3699 D(x): 0.6853 D(G(z)): 0.4392 / 0.4698\n",
"[90/100][226/391] Loss_D: 2.5753 Loss_G: 2.8505 D(x): 0.7384 D(G(z)): 0.4265 / 0.4287\n",
"[90/100][227/391] Loss_D: 2.8479 Loss_G: 2.4057 D(x): 0.6878 D(G(z)): 0.4555 / 0.4627\n",
"[90/100][228/391] Loss_D: 3.1138 Loss_G: 2.5745 D(x): 0.6722 D(G(z)): 0.5268 / 0.4576\n",
"[90/100][229/391] Loss_D: 2.7634 Loss_G: 2.4925 D(x): 0.6315 D(G(z)): 0.3348 / 0.4698\n",
"[90/100][230/391] Loss_D: 2.5002 Loss_G: 2.5303 D(x): 0.6854 D(G(z)): 0.4041 / 0.4658\n",
"[90/100][231/391] Loss_D: 3.2370 Loss_G: 3.1093 D(x): 0.5876 D(G(z)): 0.4438 / 0.3881\n",
"[90/100][232/391] Loss_D: 2.5884 Loss_G: 2.8076 D(x): 0.7090 D(G(z)): 0.3979 / 0.4224\n",
"[90/100][233/391] Loss_D: 2.6233 Loss_G: 2.6857 D(x): 0.6933 D(G(z)): 0.3989 / 0.4298\n",
"[90/100][234/391] Loss_D: 3.0492 Loss_G: 2.5064 D(x): 0.6611 D(G(z)): 0.4927 / 0.4663\n",
"[90/100][235/391] Loss_D: 2.9299 Loss_G: 2.7958 D(x): 0.6126 D(G(z)): 0.4454 / 0.4189\n",
"[90/100][236/391] Loss_D: 2.8403 Loss_G: 2.9949 D(x): 0.6799 D(G(z)): 0.4306 / 0.3981\n",
"[90/100][237/391] Loss_D: 3.3195 Loss_G: 2.6998 D(x): 0.6965 D(G(z)): 0.5624 / 0.4399\n",
"[90/100][238/391] Loss_D: 3.1423 Loss_G: 2.8428 D(x): 0.5540 D(G(z)): 0.3616 / 0.4186\n",
"[90/100][239/391] Loss_D: 2.7217 Loss_G: 2.4450 D(x): 0.6493 D(G(z)): 0.3748 / 0.4763\n",
"[90/100][240/391] Loss_D: 2.8638 Loss_G: 2.3335 D(x): 0.7044 D(G(z)): 0.4645 / 0.4973\n",
"[90/100][241/391] Loss_D: 3.8954 Loss_G: 2.2817 D(x): 0.7486 D(G(z)): 0.4851 / 0.4913\n",
"[90/100][242/391] Loss_D: 2.6360 Loss_G: 2.7581 D(x): 0.6423 D(G(z)): 0.3464 / 0.4320\n",
"[90/100][243/391] Loss_D: 2.6887 Loss_G: 2.8962 D(x): 0.6968 D(G(z)): 0.4004 / 0.4119\n",
"[90/100][244/391] Loss_D: 2.4492 Loss_G: 2.9214 D(x): 0.7366 D(G(z)): 0.4344 / 0.3901\n",
"[90/100][245/391] Loss_D: 2.9037 Loss_G: 3.3032 D(x): 0.6721 D(G(z)): 0.4914 / 0.3546\n",
"[90/100][246/391] Loss_D: 3.0324 Loss_G: 2.7066 D(x): 0.6185 D(G(z)): 0.4090 / 0.4255\n",
"[90/100][247/391] Loss_D: 3.0569 Loss_G: 3.3029 D(x): 0.6172 D(G(z)): 0.3912 / 0.3739\n",
"[90/100][248/391] Loss_D: 2.9742 Loss_G: 2.4934 D(x): 0.6729 D(G(z)): 0.4876 / 0.4535\n",
"[90/100][249/391] Loss_D: 3.4288 Loss_G: 2.7520 D(x): 0.6789 D(G(z)): 0.5882 / 0.4260\n",
"[90/100][250/391] Loss_D: 2.9526 Loss_G: 2.9073 D(x): 0.6072 D(G(z)): 0.3417 / 0.4200\n",
"[90/100][251/391] Loss_D: 3.0198 Loss_G: 3.2105 D(x): 0.6148 D(G(z)): 0.3915 / 0.3821\n",
"[90/100][252/391] Loss_D: 2.7968 Loss_G: 2.7247 D(x): 0.6919 D(G(z)): 0.4191 / 0.4239\n",
"[90/100][253/391] Loss_D: 2.7439 Loss_G: 2.3677 D(x): 0.7182 D(G(z)): 0.4452 / 0.5001\n",
"[90/100][254/391] Loss_D: 2.5800 Loss_G: 2.9407 D(x): 0.6673 D(G(z)): 0.4046 / 0.4035\n",
"[90/100][255/391] Loss_D: 3.3407 Loss_G: 2.9347 D(x): 0.5439 D(G(z)): 0.4057 / 0.4051\n",
"[90/100][256/391] Loss_D: 3.4876 Loss_G: 2.6562 D(x): 0.5958 D(G(z)): 0.5278 / 0.4234\n",
"[90/100][257/391] Loss_D: 2.8417 Loss_G: 2.8392 D(x): 0.6378 D(G(z)): 0.3578 / 0.4027\n",
"[90/100][258/391] Loss_D: 3.0091 Loss_G: 2.5690 D(x): 0.6900 D(G(z)): 0.5077 / 0.4315\n",
"[90/100][259/391] Loss_D: 3.0600 Loss_G: 2.4470 D(x): 0.6638 D(G(z)): 0.4704 / 0.4754\n",
"[90/100][260/391] Loss_D: 3.0375 Loss_G: 2.7116 D(x): 0.6460 D(G(z)): 0.4745 / 0.4489\n",
"[90/100][261/391] Loss_D: 3.0683 Loss_G: 3.9447 D(x): 0.7117 D(G(z)): 0.4850 / 0.3022\n",
"[90/100][262/391] Loss_D: 2.6317 Loss_G: 2.6385 D(x): 0.6479 D(G(z)): 0.3123 / 0.4483\n",
"[90/100][263/391] Loss_D: 3.2632 Loss_G: 2.9836 D(x): 0.6336 D(G(z)): 0.4373 / 0.3937\n",
"[90/100][264/391] Loss_D: 2.8484 Loss_G: 2.7608 D(x): 0.5852 D(G(z)): 0.3668 / 0.4525\n",
"[90/100][265/391] Loss_D: 3.6416 Loss_G: 2.4171 D(x): 0.6674 D(G(z)): 0.5943 / 0.4870\n",
"[90/100][266/391] Loss_D: 2.4890 Loss_G: 2.4973 D(x): 0.7967 D(G(z)): 0.3727 / 0.4488\n",
"[90/100][267/391] Loss_D: 2.9090 Loss_G: 2.5097 D(x): 0.6298 D(G(z)): 0.4256 / 0.4604\n",
"[90/100][268/391] Loss_D: 2.6058 Loss_G: 3.5088 D(x): 0.6195 D(G(z)): 0.3291 / 0.3467\n",
"[90/100][269/391] Loss_D: 2.8306 Loss_G: 2.9889 D(x): 0.7006 D(G(z)): 0.4301 / 0.3977\n",
"[90/100][270/391] Loss_D: 2.4679 Loss_G: 2.8809 D(x): 0.6967 D(G(z)): 0.3600 / 0.4205\n",
"[90/100][271/391] Loss_D: 3.6612 Loss_G: 1.8978 D(x): 0.6962 D(G(z)): 0.4804 / 0.5718\n",
"[90/100][272/391] Loss_D: 2.4849 Loss_G: 2.6948 D(x): 0.7432 D(G(z)): 0.4019 / 0.4510\n",
"[90/100][273/391] Loss_D: 3.2211 Loss_G: 2.8161 D(x): 0.6218 D(G(z)): 0.4573 / 0.4170\n",
"[90/100][274/391] Loss_D: 3.3338 Loss_G: 2.5638 D(x): 0.6572 D(G(z)): 0.5257 / 0.4457\n",
"[90/100][275/391] Loss_D: 2.6305 Loss_G: 3.3533 D(x): 0.7062 D(G(z)): 0.3870 / 0.3554\n",
"[90/100][276/391] Loss_D: 3.0988 Loss_G: 3.9401 D(x): 0.5578 D(G(z)): 0.3435 / 0.2891\n",
"[90/100][277/391] Loss_D: 3.4115 Loss_G: 2.8648 D(x): 0.6692 D(G(z)): 0.5626 / 0.4097\n",
"[90/100][278/391] Loss_D: 2.5494 Loss_G: 2.5699 D(x): 0.6808 D(G(z)): 0.4055 / 0.4578\n",
"[90/100][279/391] Loss_D: 2.5792 Loss_G: 2.6235 D(x): 0.6669 D(G(z)): 0.3710 / 0.4529\n",
"[90/100][280/391] Loss_D: 2.8637 Loss_G: 2.6707 D(x): 0.6561 D(G(z)): 0.4483 / 0.4401\n",
"[90/100][281/391] Loss_D: 3.1840 Loss_G: 2.6474 D(x): 0.6732 D(G(z)): 0.4949 / 0.4411\n",
"[90/100][282/391] Loss_D: 2.7262 Loss_G: 2.8226 D(x): 0.6663 D(G(z)): 0.3712 / 0.4206\n",
"[90/100][283/391] Loss_D: 3.4560 Loss_G: 3.0517 D(x): 0.6229 D(G(z)): 0.5076 / 0.3948\n",
"[90/100][284/391] Loss_D: 2.5442 Loss_G: 2.0654 D(x): 0.6909 D(G(z)): 0.4238 / 0.5426\n",
"[90/100][285/391] Loss_D: 2.6681 Loss_G: 2.1832 D(x): 0.6858 D(G(z)): 0.3989 / 0.5064\n",
"[90/100][286/391] Loss_D: 3.3351 Loss_G: 3.6046 D(x): 0.5487 D(G(z)): 0.4310 / 0.3264\n",
"[90/100][287/391] Loss_D: 2.7501 Loss_G: 2.7955 D(x): 0.7352 D(G(z)): 0.4187 / 0.4204\n",
"[90/100][288/391] Loss_D: 3.2423 Loss_G: 2.8800 D(x): 0.5648 D(G(z)): 0.4711 / 0.4138\n",
"[90/100][289/391] Loss_D: 3.2985 Loss_G: 2.3192 D(x): 0.5808 D(G(z)): 0.3793 / 0.4933\n",
"[90/100][290/391] Loss_D: 2.7920 Loss_G: 2.4281 D(x): 0.6840 D(G(z)): 0.4363 / 0.4621\n",
"[90/100][291/391] Loss_D: 2.9215 Loss_G: 2.3823 D(x): 0.6858 D(G(z)): 0.4444 / 0.4690\n",
"[90/100][292/391] Loss_D: 2.8497 Loss_G: 2.4268 D(x): 0.7330 D(G(z)): 0.4667 / 0.4684\n",
"[90/100][293/391] Loss_D: 3.1864 Loss_G: 2.0985 D(x): 0.5767 D(G(z)): 0.4311 / 0.5258\n",
"[90/100][294/391] Loss_D: 2.7282 Loss_G: 2.2439 D(x): 0.7156 D(G(z)): 0.4801 / 0.4790\n",
"[90/100][295/391] Loss_D: 2.9574 Loss_G: 2.5321 D(x): 0.5939 D(G(z)): 0.3513 / 0.4663\n",
"[90/100][296/391] Loss_D: 2.7222 Loss_G: 2.2818 D(x): 0.6921 D(G(z)): 0.4260 / 0.4979\n",
"[90/100][297/391] Loss_D: 3.0515 Loss_G: 3.0198 D(x): 0.6842 D(G(z)): 0.4737 / 0.3912\n",
"[90/100][298/391] Loss_D: 2.5233 Loss_G: 3.9450 D(x): 0.7004 D(G(z)): 0.3894 / 0.2997\n",
"[90/100][299/391] Loss_D: 2.6431 Loss_G: 2.8776 D(x): 0.6774 D(G(z)): 0.3749 / 0.4181\n",
"[90/100][300/391] Loss_D: 4.3989 Loss_G: 2.5889 D(x): 0.5993 D(G(z)): 0.6677 / 0.4580\n",
"[90/100][301/391] Loss_D: 3.6254 Loss_G: 3.4297 D(x): 0.6518 D(G(z)): 0.4732 / 0.3596\n",
"[90/100][302/391] Loss_D: 2.5285 Loss_G: 3.0371 D(x): 0.6624 D(G(z)): 0.3370 / 0.3947\n",
"[90/100][303/391] Loss_D: 2.8865 Loss_G: 2.4581 D(x): 0.5722 D(G(z)): 0.3483 / 0.4601\n",
"[90/100][304/391] Loss_D: 2.7477 Loss_G: 2.7149 D(x): 0.6047 D(G(z)): 0.3967 / 0.4143\n",
"[90/100][305/391] Loss_D: 2.9765 Loss_G: 2.7918 D(x): 0.5826 D(G(z)): 0.3867 / 0.4092\n",
"[90/100][306/391] Loss_D: 3.3768 Loss_G: 2.4188 D(x): 0.6823 D(G(z)): 0.5164 / 0.4747\n",
"[90/100][307/391] Loss_D: 3.1111 Loss_G: 2.6557 D(x): 0.6956 D(G(z)): 0.5349 / 0.4178\n",
"[90/100][308/391] Loss_D: 3.5745 Loss_G: 2.5010 D(x): 0.5633 D(G(z)): 0.5247 / 0.4624\n",
"[90/100][309/391] Loss_D: 2.7534 Loss_G: 2.9084 D(x): 0.7150 D(G(z)): 0.4887 / 0.4271\n",
"[90/100][310/391] Loss_D: 2.9992 Loss_G: 2.5280 D(x): 0.7010 D(G(z)): 0.4510 / 0.4803\n",
"[90/100][311/391] Loss_D: 3.0002 Loss_G: 1.9670 D(x): 0.6222 D(G(z)): 0.3860 / 0.5523\n",
"[90/100][312/391] Loss_D: 2.9659 Loss_G: 2.9554 D(x): 0.6079 D(G(z)): 0.3814 / 0.4111\n",
"[90/100][313/391] Loss_D: 3.0764 Loss_G: 2.3218 D(x): 0.6666 D(G(z)): 0.4845 / 0.4887\n",
"[90/100][314/391] Loss_D: 2.7354 Loss_G: 2.7731 D(x): 0.5767 D(G(z)): 0.3419 / 0.4149\n",
"[90/100][315/391] Loss_D: 2.7929 Loss_G: 2.0290 D(x): 0.6552 D(G(z)): 0.4010 / 0.5245\n",
"[90/100][316/391] Loss_D: 2.8379 Loss_G: 2.2278 D(x): 0.6414 D(G(z)): 0.3881 / 0.4938\n",
"[90/100][317/391] Loss_D: 2.9269 Loss_G: 2.6998 D(x): 0.6773 D(G(z)): 0.4525 / 0.4255\n",
"[90/100][318/391] Loss_D: 2.4724 Loss_G: 3.0229 D(x): 0.7090 D(G(z)): 0.3932 / 0.3982\n",
"[90/100][319/391] Loss_D: 3.2308 Loss_G: 3.3664 D(x): 0.7076 D(G(z)): 0.5591 / 0.3567\n",
"[90/100][320/391] Loss_D: 3.3383 Loss_G: 3.2808 D(x): 0.5984 D(G(z)): 0.4683 / 0.3747\n",
"[90/100][321/391] Loss_D: 2.9460 Loss_G: 2.6338 D(x): 0.6548 D(G(z)): 0.4303 / 0.4476\n",
"[90/100][322/391] Loss_D: 2.5368 Loss_G: 2.8568 D(x): 0.7027 D(G(z)): 0.4012 / 0.4226\n",
"[90/100][323/391] Loss_D: 2.8246 Loss_G: 3.2109 D(x): 0.7103 D(G(z)): 0.4297 / 0.3798\n",
"[90/100][324/391] Loss_D: 2.7729 Loss_G: 2.4420 D(x): 0.7537 D(G(z)): 0.5115 / 0.4739\n",
"[90/100][325/391] Loss_D: 3.4117 Loss_G: 3.3154 D(x): 0.5838 D(G(z)): 0.4412 / 0.3692\n",
"[90/100][326/391] Loss_D: 2.7902 Loss_G: 2.8819 D(x): 0.6646 D(G(z)): 0.3910 / 0.4149\n",
"[90/100][327/391] Loss_D: 3.4712 Loss_G: 2.8605 D(x): 0.5938 D(G(z)): 0.5222 / 0.4167\n",
"[90/100][328/391] Loss_D: 2.8549 Loss_G: 2.6038 D(x): 0.6612 D(G(z)): 0.4630 / 0.4517\n",
"[90/100][329/391] Loss_D: 3.5606 Loss_G: 2.9832 D(x): 0.6009 D(G(z)): 0.5438 / 0.4175\n",
"[90/100][330/391] Loss_D: 2.7720 Loss_G: 2.7094 D(x): 0.6791 D(G(z)): 0.4233 / 0.4464\n",
"[90/100][331/391] Loss_D: 3.4625 Loss_G: 2.3978 D(x): 0.6452 D(G(z)): 0.4093 / 0.4577\n",
"[90/100][332/391] Loss_D: 2.2361 Loss_G: 2.9785 D(x): 0.7280 D(G(z)): 0.3295 / 0.3993\n",
"[90/100][333/391] Loss_D: 3.2124 Loss_G: 2.5871 D(x): 0.6206 D(G(z)): 0.4091 / 0.4563\n",
"[90/100][334/391] Loss_D: 2.3019 Loss_G: 2.1724 D(x): 0.6880 D(G(z)): 0.3817 / 0.5082\n",
"[90/100][335/391] Loss_D: 3.3114 Loss_G: 2.1625 D(x): 0.5789 D(G(z)): 0.4658 / 0.4979\n",
"[90/100][336/391] Loss_D: 2.9929 Loss_G: 2.3027 D(x): 0.6203 D(G(z)): 0.3764 / 0.4975\n",
"[90/100][337/391] Loss_D: 3.1442 Loss_G: 2.8445 D(x): 0.6693 D(G(z)): 0.5051 / 0.4027\n",
"[90/100][338/391] Loss_D: 2.8337 Loss_G: 1.6519 D(x): 0.7095 D(G(z)): 0.4787 / 0.5908\n",
"[90/100][339/391] Loss_D: 3.6235 Loss_G: 2.2102 D(x): 0.5975 D(G(z)): 0.5623 / 0.5269\n",
"[90/100][340/391] Loss_D: 3.3202 Loss_G: 2.4979 D(x): 0.5887 D(G(z)): 0.4668 / 0.4695\n",
"[90/100][341/391] Loss_D: 3.1180 Loss_G: 2.5391 D(x): 0.6396 D(G(z)): 0.5049 / 0.4495\n",
"[90/100][342/391] Loss_D: 2.8921 Loss_G: 3.0359 D(x): 0.6576 D(G(z)): 0.4239 / 0.3825\n",
"[90/100][343/391] Loss_D: 3.0710 Loss_G: 3.3645 D(x): 0.7314 D(G(z)): 0.5184 / 0.3464\n",
"[90/100][344/391] Loss_D: 2.7106 Loss_G: 3.3951 D(x): 0.6945 D(G(z)): 0.4412 / 0.3573\n",
"[90/100][345/391] Loss_D: 2.6331 Loss_G: 2.8999 D(x): 0.6607 D(G(z)): 0.3576 / 0.4174\n",
"[90/100][346/391] Loss_D: 3.6355 Loss_G: 2.6456 D(x): 0.5857 D(G(z)): 0.5249 / 0.4409\n",
"[90/100][347/391] Loss_D: 3.5414 Loss_G: 3.3524 D(x): 0.5297 D(G(z)): 0.4677 / 0.3566\n",
"[90/100][348/391] Loss_D: 3.0243 Loss_G: 2.8142 D(x): 0.5723 D(G(z)): 0.3552 / 0.4148\n",
"[90/100][349/391] Loss_D: 2.4325 Loss_G: 2.2616 D(x): 0.7607 D(G(z)): 0.4112 / 0.5043\n",
"[90/100][350/391] Loss_D: 2.6951 Loss_G: 2.1220 D(x): 0.6794 D(G(z)): 0.4311 / 0.5221\n",
"[90/100][351/391] Loss_D: 3.3397 Loss_G: 3.2867 D(x): 0.6267 D(G(z)): 0.4910 / 0.3606\n",
"[90/100][352/391] Loss_D: 2.2835 Loss_G: 2.6048 D(x): 0.7207 D(G(z)): 0.3713 / 0.4430\n",
"[90/100][353/391] Loss_D: 3.2335 Loss_G: 3.9174 D(x): 0.5670 D(G(z)): 0.4307 / 0.2903\n",
"[90/100][354/391] Loss_D: 3.1344 Loss_G: 2.8021 D(x): 0.6547 D(G(z)): 0.5115 / 0.4267\n",
"[90/100][355/391] Loss_D: 2.7104 Loss_G: 2.6365 D(x): 0.6276 D(G(z)): 0.3739 / 0.4285\n",
"[90/100][356/391] Loss_D: 2.7201 Loss_G: 2.8094 D(x): 0.6456 D(G(z)): 0.3813 / 0.4022\n",
"[90/100][357/391] Loss_D: 2.9741 Loss_G: 2.2021 D(x): 0.6989 D(G(z)): 0.5133 / 0.5066\n",
"[90/100][358/391] Loss_D: 2.4214 Loss_G: 2.5639 D(x): 0.6788 D(G(z)): 0.3720 / 0.4573\n",
"[90/100][359/391] Loss_D: 2.8338 Loss_G: 2.1624 D(x): 0.5982 D(G(z)): 0.3607 / 0.5251\n",
"[90/100][360/391] Loss_D: 2.5022 Loss_G: 2.2053 D(x): 0.6972 D(G(z)): 0.3610 / 0.5013\n",
"[90/100][361/391] Loss_D: 3.4774 Loss_G: 2.5481 D(x): 0.5955 D(G(z)): 0.4272 / 0.4546\n",
"[90/100][362/391] Loss_D: 2.9137 Loss_G: 2.7533 D(x): 0.6994 D(G(z)): 0.4621 / 0.4364\n",
"[90/100][363/391] Loss_D: 3.0588 Loss_G: 2.7169 D(x): 0.6586 D(G(z)): 0.4723 / 0.4315\n",
"[90/100][364/391] Loss_D: 2.7650 Loss_G: 3.2755 D(x): 0.7024 D(G(z)): 0.4392 / 0.3617\n",
"[90/100][365/391] Loss_D: 3.0337 Loss_G: 3.3670 D(x): 0.5747 D(G(z)): 0.3368 / 0.3520\n",
"[90/100][366/391] Loss_D: 3.0362 Loss_G: 2.7823 D(x): 0.6891 D(G(z)): 0.5355 / 0.4295\n",
"[90/100][367/391] Loss_D: 3.1487 Loss_G: 2.5502 D(x): 0.6782 D(G(z)): 0.4867 / 0.4482\n",
"[90/100][368/391] Loss_D: 3.0065 Loss_G: 2.8455 D(x): 0.6529 D(G(z)): 0.4838 / 0.4229\n",
"[90/100][369/391] Loss_D: 2.6201 Loss_G: 2.4648 D(x): 0.6479 D(G(z)): 0.3515 / 0.4905\n",
"[90/100][370/391] Loss_D: 3.0361 Loss_G: 2.3333 D(x): 0.6508 D(G(z)): 0.4964 / 0.4780\n",
"[90/100][371/391] Loss_D: 3.3802 Loss_G: 3.8949 D(x): 0.6652 D(G(z)): 0.5369 / 0.3086\n",
"[90/100][372/391] Loss_D: 3.2670 Loss_G: 2.1971 D(x): 0.6334 D(G(z)): 0.5305 / 0.5248\n",
"[90/100][373/391] Loss_D: 3.0820 Loss_G: 3.0510 D(x): 0.6972 D(G(z)): 0.5119 / 0.3979\n",
"[90/100][374/391] Loss_D: 2.6511 Loss_G: 2.4370 D(x): 0.6560 D(G(z)): 0.3566 / 0.4647\n",
"[90/100][375/391] Loss_D: 2.9288 Loss_G: 2.4408 D(x): 0.6568 D(G(z)): 0.4363 / 0.4565\n",
"[90/100][376/391] Loss_D: 3.2342 Loss_G: 2.4215 D(x): 0.6075 D(G(z)): 0.4482 / 0.4789\n",
"[90/100][377/391] Loss_D: 3.1815 Loss_G: 3.2851 D(x): 0.5860 D(G(z)): 0.3865 / 0.3607\n",
"[90/100][378/391] Loss_D: 2.9271 Loss_G: 1.7563 D(x): 0.6331 D(G(z)): 0.4529 / 0.5698\n",
"[90/100][379/391] Loss_D: 2.4339 Loss_G: 3.0468 D(x): 0.7166 D(G(z)): 0.3908 / 0.3884\n",
"[90/100][380/391] Loss_D: 2.4904 Loss_G: 2.4211 D(x): 0.6983 D(G(z)): 0.3855 / 0.4838\n",
"[90/100][381/391] Loss_D: 3.0288 Loss_G: 1.9002 D(x): 0.5834 D(G(z)): 0.3791 / 0.5462\n",
"[90/100][382/391] Loss_D: 2.9926 Loss_G: 2.4904 D(x): 0.5884 D(G(z)): 0.4111 / 0.4709\n",
"[90/100][383/391] Loss_D: 2.7515 Loss_G: 2.3693 D(x): 0.6580 D(G(z)): 0.4042 / 0.4789\n",
"[90/100][384/391] Loss_D: 2.5553 Loss_G: 2.7479 D(x): 0.6835 D(G(z)): 0.4443 / 0.4197\n",
"[90/100][385/391] Loss_D: 2.5015 Loss_G: 2.1462 D(x): 0.7389 D(G(z)): 0.4114 / 0.5123\n",
"[90/100][386/391] Loss_D: 2.8419 Loss_G: 3.2069 D(x): 0.6656 D(G(z)): 0.4113 / 0.3672\n",
"[90/100][387/391] Loss_D: 2.6967 Loss_G: 2.8697 D(x): 0.7532 D(G(z)): 0.4394 / 0.4127\n",
"[90/100][388/391] Loss_D: 2.9956 Loss_G: 3.2018 D(x): 0.6277 D(G(z)): 0.5044 / 0.3939\n",
"[90/100][389/391] Loss_D: 2.7148 Loss_G: 2.4971 D(x): 0.6732 D(G(z)): 0.3972 / 0.4712\n",
"[90/100][390/391] Loss_D: 2.4985 Loss_G: 2.0273 D(x): 0.7141 D(G(z)): 0.4217 / 0.5408\n",
"[90/100][391/391] Loss_D: 3.8347 Loss_G: 2.2114 D(x): 0.6409 D(G(z)): 0.5341 / 0.4991\n",
"[91/100][1/391] Loss_D: 3.6940 Loss_G: 2.8243 D(x): 0.6774 D(G(z)): 0.5420 / 0.4345\n",
"[91/100][2/391] Loss_D: 2.6889 Loss_G: 3.7649 D(x): 0.6547 D(G(z)): 0.3825 / 0.2999\n",
"[91/100][3/391] Loss_D: 3.0617 Loss_G: 3.2915 D(x): 0.6104 D(G(z)): 0.4373 / 0.3684\n",
"[91/100][4/391] Loss_D: 2.5592 Loss_G: 2.6386 D(x): 0.6399 D(G(z)): 0.3503 / 0.4295\n",
"[91/100][5/391] Loss_D: 3.2485 Loss_G: 2.7360 D(x): 0.5982 D(G(z)): 0.4837 / 0.4301\n",
"[91/100][6/391] Loss_D: 3.1161 Loss_G: 4.0537 D(x): 0.6636 D(G(z)): 0.4730 / 0.2767\n",
"[91/100][7/391] Loss_D: 3.1854 Loss_G: 2.6816 D(x): 0.6942 D(G(z)): 0.4764 / 0.4339\n",
"[91/100][8/391] Loss_D: 3.3424 Loss_G: 2.9774 D(x): 0.6273 D(G(z)): 0.5772 / 0.4056\n",
"[91/100][9/391] Loss_D: 2.7434 Loss_G: 3.0323 D(x): 0.6341 D(G(z)): 0.3645 / 0.3997\n",
"[91/100][10/391] Loss_D: 2.9414 Loss_G: 2.5554 D(x): 0.6320 D(G(z)): 0.4383 / 0.4652\n",
"[91/100][11/391] Loss_D: 2.7466 Loss_G: 2.9838 D(x): 0.6429 D(G(z)): 0.3726 / 0.4086\n",
"[91/100][12/391] Loss_D: 2.5140 Loss_G: 2.9773 D(x): 0.7227 D(G(z)): 0.3710 / 0.3991\n",
"[91/100][13/391] Loss_D: 3.1430 Loss_G: 3.2989 D(x): 0.6520 D(G(z)): 0.5196 / 0.3652\n",
"[91/100][14/391] Loss_D: 2.8104 Loss_G: 3.3645 D(x): 0.5952 D(G(z)): 0.3812 / 0.3417\n",
"[91/100][15/391] Loss_D: 2.9505 Loss_G: 2.9309 D(x): 0.6858 D(G(z)): 0.5083 / 0.3991\n",
"[91/100][16/391] Loss_D: 2.9474 Loss_G: 2.4304 D(x): 0.5586 D(G(z)): 0.2791 / 0.4729\n",
"[91/100][17/391] Loss_D: 3.1021 Loss_G: 2.8561 D(x): 0.6429 D(G(z)): 0.4975 / 0.4106\n",
"[91/100][18/391] Loss_D: 2.8816 Loss_G: 2.8873 D(x): 0.7026 D(G(z)): 0.5056 / 0.4040\n",
"[91/100][19/391] Loss_D: 2.6538 Loss_G: 2.3391 D(x): 0.6958 D(G(z)): 0.4421 / 0.4961\n",
"[91/100][20/391] Loss_D: 3.1308 Loss_G: 1.7770 D(x): 0.6285 D(G(z)): 0.4764 / 0.5814\n",
"[91/100][21/391] Loss_D: 2.6299 Loss_G: 4.1617 D(x): 0.7181 D(G(z)): 0.4825 / 0.2773\n",
"[91/100][22/391] Loss_D: 2.9232 Loss_G: 2.8341 D(x): 0.6262 D(G(z)): 0.4374 / 0.4199\n",
"[91/100][23/391] Loss_D: 4.1279 Loss_G: 2.3064 D(x): 0.4219 D(G(z)): 0.4483 / 0.4879\n",
"[91/100][24/391] Loss_D: 3.1774 Loss_G: 2.6034 D(x): 0.6201 D(G(z)): 0.4488 / 0.4456\n",
"[91/100][25/391] Loss_D: 3.5021 Loss_G: 3.0596 D(x): 0.7574 D(G(z)): 0.6187 / 0.3913\n",
"[91/100][26/391] Loss_D: 3.7391 Loss_G: 3.6795 D(x): 0.5377 D(G(z)): 0.5020 / 0.3138\n",
"[91/100][27/391] Loss_D: 3.0647 Loss_G: 2.6677 D(x): 0.6151 D(G(z)): 0.3727 / 0.4257\n",
"[91/100][28/391] Loss_D: 2.8024 Loss_G: 1.9909 D(x): 0.6295 D(G(z)): 0.4152 / 0.5401\n",
"[91/100][29/391] Loss_D: 2.9116 Loss_G: 2.4462 D(x): 0.6381 D(G(z)): 0.3937 / 0.4608\n",
"[91/100][30/391] Loss_D: 3.1819 Loss_G: 2.8323 D(x): 0.6483 D(G(z)): 0.5288 / 0.4097\n",
"[91/100][31/391] Loss_D: 3.5289 Loss_G: 2.4930 D(x): 0.6881 D(G(z)): 0.4301 / 0.4607\n",
"[91/100][32/391] Loss_D: 3.1333 Loss_G: 2.8323 D(x): 0.6097 D(G(z)): 0.4302 / 0.4266\n",
"[91/100][33/391] Loss_D: 2.9622 Loss_G: 1.9511 D(x): 0.6270 D(G(z)): 0.4171 / 0.5583\n",
"[91/100][34/391] Loss_D: 2.7235 Loss_G: 3.1442 D(x): 0.6775 D(G(z)): 0.4545 / 0.3788\n",
"[91/100][35/391] Loss_D: 2.7501 Loss_G: 2.6188 D(x): 0.6538 D(G(z)): 0.4070 / 0.4293\n",
"[91/100][36/391] Loss_D: 3.4447 Loss_G: 2.5213 D(x): 0.5901 D(G(z)): 0.4863 / 0.4584\n",
"[91/100][37/391] Loss_D: 2.8622 Loss_G: 2.8486 D(x): 0.6556 D(G(z)): 0.3990 / 0.4257\n",
"[91/100][38/391] Loss_D: 2.3403 Loss_G: 2.6249 D(x): 0.7582 D(G(z)): 0.4297 / 0.4582\n",
"[91/100][39/391] Loss_D: 3.0293 Loss_G: 2.6751 D(x): 0.6247 D(G(z)): 0.3885 / 0.4496\n",
"[91/100][40/391] Loss_D: 3.1470 Loss_G: 2.2939 D(x): 0.7115 D(G(z)): 0.5089 / 0.4896\n",
"[91/100][41/391] Loss_D: 3.0733 Loss_G: 3.1601 D(x): 0.6397 D(G(z)): 0.4264 / 0.3812\n",
"[91/100][42/391] Loss_D: 2.6607 Loss_G: 2.1071 D(x): 0.7377 D(G(z)): 0.4343 / 0.5083\n",
"[91/100][43/391] Loss_D: 3.2092 Loss_G: 2.5830 D(x): 0.5985 D(G(z)): 0.4337 / 0.4504\n",
"[91/100][44/391] Loss_D: 2.6551 Loss_G: 2.4223 D(x): 0.6954 D(G(z)): 0.4407 / 0.4770\n",
"[91/100][45/391] Loss_D: 3.2236 Loss_G: 3.2427 D(x): 0.6453 D(G(z)): 0.4638 / 0.3617\n",
"[91/100][46/391] Loss_D: 2.9536 Loss_G: 3.5989 D(x): 0.7025 D(G(z)): 0.4792 / 0.3314\n",
"[91/100][47/391] Loss_D: 2.9616 Loss_G: 3.5478 D(x): 0.5926 D(G(z)): 0.4047 / 0.3213\n",
"[91/100][48/391] Loss_D: 2.5223 Loss_G: 2.9300 D(x): 0.6567 D(G(z)): 0.3141 / 0.4216\n",
"[91/100][49/391] Loss_D: 2.8973 Loss_G: 2.8467 D(x): 0.6155 D(G(z)): 0.4193 / 0.4169\n",
"[91/100][50/391] Loss_D: 3.0340 Loss_G: 2.3866 D(x): 0.6463 D(G(z)): 0.4928 / 0.4724\n",
"[91/100][51/391] Loss_D: 3.3031 Loss_G: 2.8133 D(x): 0.6040 D(G(z)): 0.4793 / 0.4173\n",
"[91/100][52/391] Loss_D: 2.7960 Loss_G: 2.7739 D(x): 0.6095 D(G(z)): 0.3755 / 0.4404\n",
"[91/100][53/391] Loss_D: 2.8625 Loss_G: 2.2727 D(x): 0.6209 D(G(z)): 0.3853 / 0.5018\n",
"[91/100][54/391] Loss_D: 3.9236 Loss_G: 2.2791 D(x): 0.5560 D(G(z)): 0.5391 / 0.4835\n",
"[91/100][55/391] Loss_D: 3.2688 Loss_G: 2.5596 D(x): 0.6262 D(G(z)): 0.5121 / 0.4522\n",
"[91/100][56/391] Loss_D: 3.4363 Loss_G: 2.5227 D(x): 0.5801 D(G(z)): 0.4694 / 0.4449\n",
"[91/100][57/391] Loss_D: 2.8937 Loss_G: 3.2527 D(x): 0.7354 D(G(z)): 0.4842 / 0.3444\n",
"[91/100][58/391] Loss_D: 2.4175 Loss_G: 3.1411 D(x): 0.7035 D(G(z)): 0.3581 / 0.3723\n",
"[91/100][59/391] Loss_D: 2.8805 Loss_G: 3.0802 D(x): 0.7494 D(G(z)): 0.5137 / 0.3826\n",
"[91/100][60/391] Loss_D: 3.0788 Loss_G: 3.3928 D(x): 0.6220 D(G(z)): 0.4613 / 0.3559\n",
"[91/100][61/391] Loss_D: 3.5746 Loss_G: 2.3180 D(x): 0.6088 D(G(z)): 0.3767 / 0.4995\n",
"[91/100][62/391] Loss_D: 3.2837 Loss_G: 3.1312 D(x): 0.6209 D(G(z)): 0.4833 / 0.3884\n",
"[91/100][63/391] Loss_D: 3.2222 Loss_G: 2.9853 D(x): 0.5400 D(G(z)): 0.3785 / 0.3968\n",
"[91/100][64/391] Loss_D: 3.2646 Loss_G: 2.4992 D(x): 0.5920 D(G(z)): 0.4762 / 0.4612\n",
"[91/100][65/391] Loss_D: 2.8780 Loss_G: 2.6837 D(x): 0.7016 D(G(z)): 0.4816 / 0.4224\n",
"[91/100][66/391] Loss_D: 2.8865 Loss_G: 2.9227 D(x): 0.6612 D(G(z)): 0.4264 / 0.4143\n",
"[91/100][67/391] Loss_D: 3.2939 Loss_G: 2.9510 D(x): 0.6265 D(G(z)): 0.4716 / 0.4024\n",
"[91/100][68/391] Loss_D: 2.9539 Loss_G: 2.1204 D(x): 0.6507 D(G(z)): 0.4972 / 0.5056\n",
"[91/100][69/391] Loss_D: 3.1007 Loss_G: 2.5011 D(x): 0.5948 D(G(z)): 0.3634 / 0.4625\n",
"[91/100][70/391] Loss_D: 3.1448 Loss_G: 3.3029 D(x): 0.6319 D(G(z)): 0.4785 / 0.3757\n",
"[91/100][71/391] Loss_D: 3.1593 Loss_G: 2.4471 D(x): 0.6798 D(G(z)): 0.4792 / 0.4677\n",
"[91/100][72/391] Loss_D: 2.4906 Loss_G: 2.1387 D(x): 0.7115 D(G(z)): 0.4068 / 0.5228\n",
"[91/100][73/391] Loss_D: 3.2631 Loss_G: 2.6519 D(x): 0.6131 D(G(z)): 0.4971 / 0.4389\n",
"[91/100][74/391] Loss_D: 2.1683 Loss_G: 2.7926 D(x): 0.7820 D(G(z)): 0.4047 / 0.4187\n",
"[91/100][75/391] Loss_D: 3.5432 Loss_G: 2.8385 D(x): 0.6288 D(G(z)): 0.5567 / 0.4158\n",
"[91/100][76/391] Loss_D: 2.5999 Loss_G: 3.6891 D(x): 0.6685 D(G(z)): 0.3684 / 0.3139\n",
"[91/100][77/391] Loss_D: 3.7416 Loss_G: 3.1184 D(x): 0.5486 D(G(z)): 0.5129 / 0.3836\n",
"[91/100][78/391] Loss_D: 3.2827 Loss_G: 3.0356 D(x): 0.5171 D(G(z)): 0.3123 / 0.3845\n",
"[91/100][79/391] Loss_D: 2.9244 Loss_G: 2.3373 D(x): 0.6443 D(G(z)): 0.4570 / 0.4902\n",
"[91/100][80/391] Loss_D: 3.1251 Loss_G: 3.0810 D(x): 0.6670 D(G(z)): 0.4593 / 0.3868\n",
"[91/100][81/391] Loss_D: 3.4783 Loss_G: 3.0660 D(x): 0.6499 D(G(z)): 0.5800 / 0.3893\n",
"[91/100][82/391] Loss_D: 3.2016 Loss_G: 3.3932 D(x): 0.6945 D(G(z)): 0.5089 / 0.3453\n",
"[91/100][83/391] Loss_D: 2.7149 Loss_G: 3.5010 D(x): 0.7134 D(G(z)): 0.4433 / 0.3434\n",
"[91/100][84/391] Loss_D: 3.2037 Loss_G: 3.3199 D(x): 0.5904 D(G(z)): 0.4696 / 0.3709\n",
"[91/100][85/391] Loss_D: 3.8415 Loss_G: 3.6756 D(x): 0.5769 D(G(z)): 0.5698 / 0.3344\n",
"[91/100][86/391] Loss_D: 3.6617 Loss_G: 2.6366 D(x): 0.5123 D(G(z)): 0.4518 / 0.4382\n",
"[91/100][87/391] Loss_D: 3.2586 Loss_G: 3.1238 D(x): 0.6464 D(G(z)): 0.4691 / 0.3913\n",
"[91/100][88/391] Loss_D: 2.8453 Loss_G: 2.5527 D(x): 0.6519 D(G(z)): 0.4653 / 0.4554\n",
"[91/100][89/391] Loss_D: 2.9261 Loss_G: 3.1193 D(x): 0.6744 D(G(z)): 0.4707 / 0.3904\n",
"[91/100][90/391] Loss_D: 2.9178 Loss_G: 2.6657 D(x): 0.6281 D(G(z)): 0.4202 / 0.4639\n",
"[91/100][91/391] Loss_D: 3.6179 Loss_G: 2.1981 D(x): 0.6242 D(G(z)): 0.3158 / 0.4916\n",
"[91/100][92/391] Loss_D: 2.7473 Loss_G: 2.9821 D(x): 0.6389 D(G(z)): 0.3939 / 0.3909\n",
"[91/100][93/391] Loss_D: 2.7378 Loss_G: 2.5956 D(x): 0.7172 D(G(z)): 0.4487 / 0.4607\n",
"[91/100][94/391] Loss_D: 3.2717 Loss_G: 3.5932 D(x): 0.6673 D(G(z)): 0.5523 / 0.3381\n",
"[91/100][95/391] Loss_D: 2.7888 Loss_G: 2.5215 D(x): 0.6679 D(G(z)): 0.3799 / 0.4585\n",
"[91/100][96/391] Loss_D: 2.9164 Loss_G: 2.8413 D(x): 0.6008 D(G(z)): 0.3818 / 0.4117\n",
"[91/100][97/391] Loss_D: 3.3056 Loss_G: 3.8964 D(x): 0.6686 D(G(z)): 0.5270 / 0.3060\n",
"[91/100][98/391] Loss_D: 2.5936 Loss_G: 2.7536 D(x): 0.7025 D(G(z)): 0.4121 / 0.4106\n",
"[91/100][99/391] Loss_D: 3.0680 Loss_G: 2.5856 D(x): 0.6469 D(G(z)): 0.4440 / 0.4533\n",
"[91/100][100/391] Loss_D: 2.9293 Loss_G: 2.7301 D(x): 0.6697 D(G(z)): 0.4871 / 0.4406\n",
"[91/100][101/391] Loss_D: 2.7541 Loss_G: 3.3119 D(x): 0.6802 D(G(z)): 0.4240 / 0.3660\n",
"[91/100][102/391] Loss_D: 2.6791 Loss_G: 2.2216 D(x): 0.6464 D(G(z)): 0.3209 / 0.5090\n",
"[91/100][103/391] Loss_D: 2.7692 Loss_G: 2.9662 D(x): 0.7191 D(G(z)): 0.4435 / 0.3976\n",
"[91/100][104/391] Loss_D: 3.0345 Loss_G: 3.4657 D(x): 0.6390 D(G(z)): 0.4791 / 0.3406\n",
"[91/100][105/391] Loss_D: 3.3359 Loss_G: 2.8421 D(x): 0.5836 D(G(z)): 0.4531 / 0.4099\n",
"[91/100][106/391] Loss_D: 3.1841 Loss_G: 2.7472 D(x): 0.6057 D(G(z)): 0.4834 / 0.4166\n",
"[91/100][107/391] Loss_D: 2.9141 Loss_G: 3.0144 D(x): 0.6747 D(G(z)): 0.4119 / 0.3810\n",
"[91/100][108/391] Loss_D: 2.5996 Loss_G: 3.3192 D(x): 0.6842 D(G(z)): 0.3571 / 0.3602\n",
"[91/100][109/391] Loss_D: 3.2235 Loss_G: 3.1239 D(x): 0.6450 D(G(z)): 0.5259 / 0.3846\n",
"[91/100][110/391] Loss_D: 2.8342 Loss_G: 1.8096 D(x): 0.6645 D(G(z)): 0.4052 / 0.5810\n",
"[91/100][111/391] Loss_D: 2.3316 Loss_G: 2.2853 D(x): 0.7438 D(G(z)): 0.3712 / 0.5027\n",
"[91/100][112/391] Loss_D: 2.8594 Loss_G: 2.4354 D(x): 0.6360 D(G(z)): 0.3973 / 0.4643\n",
"[91/100][113/391] Loss_D: 2.7292 Loss_G: 2.1814 D(x): 0.6987 D(G(z)): 0.4315 / 0.5211\n",
"[91/100][114/391] Loss_D: 3.2784 Loss_G: 2.7568 D(x): 0.6391 D(G(z)): 0.5205 / 0.4259\n",
"[91/100][115/391] Loss_D: 3.2873 Loss_G: 2.4506 D(x): 0.6373 D(G(z)): 0.4815 / 0.4628\n",
"[91/100][116/391] Loss_D: 3.0498 Loss_G: 2.6177 D(x): 0.5787 D(G(z)): 0.3899 / 0.4507\n",
"[91/100][117/391] Loss_D: 3.9305 Loss_G: 3.1581 D(x): 0.5685 D(G(z)): 0.5605 / 0.3740\n",
"[91/100][118/391] Loss_D: 2.6859 Loss_G: 3.2839 D(x): 0.6731 D(G(z)): 0.4350 / 0.3700\n",
"[91/100][119/391] Loss_D: 2.8502 Loss_G: 2.3256 D(x): 0.6576 D(G(z)): 0.4163 / 0.4772\n",
"[91/100][120/391] Loss_D: 2.7651 Loss_G: 3.1928 D(x): 0.7246 D(G(z)): 0.4727 / 0.4003\n",
"[91/100][121/391] Loss_D: 3.7518 Loss_G: 1.9394 D(x): 0.7927 D(G(z)): 0.3691 / 0.5407\n",
"[91/100][122/391] Loss_D: 2.9495 Loss_G: 2.9255 D(x): 0.6445 D(G(z)): 0.4166 / 0.4135\n",
"[91/100][123/391] Loss_D: 2.7870 Loss_G: 3.1196 D(x): 0.6379 D(G(z)): 0.3254 / 0.3882\n",
"[91/100][124/391] Loss_D: 3.1334 Loss_G: 2.7380 D(x): 0.6324 D(G(z)): 0.4661 / 0.4317\n",
"[91/100][125/391] Loss_D: 3.1865 Loss_G: 2.2916 D(x): 0.6799 D(G(z)): 0.4654 / 0.4838\n",
"[91/100][126/391] Loss_D: 2.8195 Loss_G: 2.5501 D(x): 0.6164 D(G(z)): 0.3719 / 0.4438\n",
"[91/100][127/391] Loss_D: 2.9947 Loss_G: 2.5582 D(x): 0.6293 D(G(z)): 0.3863 / 0.4399\n",
"[91/100][128/391] Loss_D: 2.3163 Loss_G: 3.1494 D(x): 0.7487 D(G(z)): 0.4235 / 0.3798\n",
"[91/100][129/391] Loss_D: 3.0369 Loss_G: 2.3065 D(x): 0.5931 D(G(z)): 0.4211 / 0.4802\n",
"[91/100][130/391] Loss_D: 3.0170 Loss_G: 2.9277 D(x): 0.6793 D(G(z)): 0.4412 / 0.4066\n",
"[91/100][131/391] Loss_D: 3.0422 Loss_G: 2.9292 D(x): 0.6623 D(G(z)): 0.4690 / 0.4128\n",
"[91/100][132/391] Loss_D: 2.7921 Loss_G: 2.8829 D(x): 0.7239 D(G(z)): 0.4681 / 0.4174\n",
"[91/100][133/391] Loss_D: 2.3218 Loss_G: 3.3661 D(x): 0.7666 D(G(z)): 0.4338 / 0.3552\n",
"[91/100][134/391] Loss_D: 2.8553 Loss_G: 2.8881 D(x): 0.6249 D(G(z)): 0.4264 / 0.4144\n",
"[91/100][135/391] Loss_D: 3.0965 Loss_G: 2.6366 D(x): 0.6345 D(G(z)): 0.4800 / 0.4581\n",
"[91/100][136/391] Loss_D: 2.7721 Loss_G: 3.8402 D(x): 0.6090 D(G(z)): 0.3357 / 0.3137\n",
"[91/100][137/391] Loss_D: 3.0415 Loss_G: 2.8926 D(x): 0.5753 D(G(z)): 0.3877 / 0.4030\n",
"[91/100][138/391] Loss_D: 3.0892 Loss_G: 3.0972 D(x): 0.6709 D(G(z)): 0.5596 / 0.3846\n",
"[91/100][139/391] Loss_D: 2.7852 Loss_G: 3.2730 D(x): 0.6877 D(G(z)): 0.4123 / 0.3674\n",
"[91/100][140/391] Loss_D: 2.8661 Loss_G: 2.8303 D(x): 0.6557 D(G(z)): 0.4473 / 0.4332\n",
"[91/100][141/391] Loss_D: 3.3055 Loss_G: 2.7118 D(x): 0.6037 D(G(z)): 0.4503 / 0.4334\n",
"[91/100][142/391] Loss_D: 2.4650 Loss_G: 2.7758 D(x): 0.6661 D(G(z)): 0.3608 / 0.4408\n",
"[91/100][143/391] Loss_D: 2.7611 Loss_G: 2.6941 D(x): 0.7006 D(G(z)): 0.4148 / 0.4299\n",
"[91/100][144/391] Loss_D: 2.8741 Loss_G: 3.0089 D(x): 0.6835 D(G(z)): 0.4869 / 0.3942\n",
"[91/100][145/391] Loss_D: 3.4320 Loss_G: 2.9143 D(x): 0.6716 D(G(z)): 0.5323 / 0.4138\n",
"[91/100][146/391] Loss_D: 3.2679 Loss_G: 3.8064 D(x): 0.5748 D(G(z)): 0.4381 / 0.3031\n",
"[91/100][147/391] Loss_D: 2.9380 Loss_G: 2.7409 D(x): 0.6561 D(G(z)): 0.4067 / 0.4235\n",
"[91/100][148/391] Loss_D: 2.9849 Loss_G: 2.7181 D(x): 0.6033 D(G(z)): 0.4289 / 0.4338\n",
"[91/100][149/391] Loss_D: 2.8429 Loss_G: 2.7703 D(x): 0.6821 D(G(z)): 0.4531 / 0.4271\n",
"[91/100][150/391] Loss_D: 2.8524 Loss_G: 2.1654 D(x): 0.7058 D(G(z)): 0.4570 / 0.5073\n",
"[91/100][151/391] Loss_D: 3.8732 Loss_G: 2.1925 D(x): 0.6386 D(G(z)): 0.5436 / 0.5061\n",
"[91/100][152/391] Loss_D: 2.5037 Loss_G: 2.7747 D(x): 0.6340 D(G(z)): 0.3711 / 0.4327\n",
"[91/100][153/391] Loss_D: 3.1275 Loss_G: 2.6890 D(x): 0.6230 D(G(z)): 0.4744 / 0.4327\n",
"[91/100][154/391] Loss_D: 3.0834 Loss_G: 3.6440 D(x): 0.6623 D(G(z)): 0.5176 / 0.3293\n",
"[91/100][155/391] Loss_D: 3.2098 Loss_G: 3.6890 D(x): 0.6110 D(G(z)): 0.4193 / 0.3189\n",
"[91/100][156/391] Loss_D: 2.8498 Loss_G: 1.7794 D(x): 0.6306 D(G(z)): 0.3837 / 0.5787\n",
"[91/100][157/391] Loss_D: 2.6134 Loss_G: 2.7260 D(x): 0.7305 D(G(z)): 0.3418 / 0.4282\n",
"[91/100][158/391] Loss_D: 2.5549 Loss_G: 2.1052 D(x): 0.7120 D(G(z)): 0.4384 / 0.5433\n",
"[91/100][159/391] Loss_D: 2.9473 Loss_G: 2.7957 D(x): 0.6641 D(G(z)): 0.4601 / 0.4289\n",
"[91/100][160/391] Loss_D: 3.4334 Loss_G: 2.9407 D(x): 0.5443 D(G(z)): 0.4386 / 0.4078\n",
"[91/100][161/391] Loss_D: 3.0042 Loss_G: 2.2921 D(x): 0.6336 D(G(z)): 0.4526 / 0.4810\n",
"[91/100][162/391] Loss_D: 2.7896 Loss_G: 2.5464 D(x): 0.7140 D(G(z)): 0.4660 / 0.4583\n",
"[91/100][163/391] Loss_D: 2.9613 Loss_G: 3.5929 D(x): 0.6761 D(G(z)): 0.4778 / 0.3437\n",
"[91/100][164/391] Loss_D: 2.5102 Loss_G: 2.8287 D(x): 0.7214 D(G(z)): 0.4398 / 0.4237\n",
"[91/100][165/391] Loss_D: 2.7985 Loss_G: 2.7726 D(x): 0.6674 D(G(z)): 0.4213 / 0.4078\n",
"[91/100][166/391] Loss_D: 2.7423 Loss_G: 2.6178 D(x): 0.5854 D(G(z)): 0.3254 / 0.4416\n",
"[91/100][167/391] Loss_D: 2.8204 Loss_G: 2.4358 D(x): 0.7352 D(G(z)): 0.4348 / 0.4543\n",
"[91/100][168/391] Loss_D: 3.1374 Loss_G: 3.6838 D(x): 0.6555 D(G(z)): 0.5071 / 0.3337\n",
"[91/100][169/391] Loss_D: 3.0049 Loss_G: 3.1663 D(x): 0.6101 D(G(z)): 0.4257 / 0.3737\n",
"[91/100][170/391] Loss_D: 2.7372 Loss_G: 3.2378 D(x): 0.6663 D(G(z)): 0.3950 / 0.3634\n",
"[91/100][171/391] Loss_D: 2.5314 Loss_G: 2.6002 D(x): 0.6995 D(G(z)): 0.3801 / 0.4312\n",
"[91/100][172/391] Loss_D: 3.8895 Loss_G: 2.0570 D(x): 0.5402 D(G(z)): 0.5304 / 0.5322\n",
"[91/100][173/391] Loss_D: 2.6308 Loss_G: 2.3546 D(x): 0.6949 D(G(z)): 0.4152 / 0.4788\n",
"[91/100][174/391] Loss_D: 2.8130 Loss_G: 2.3619 D(x): 0.6912 D(G(z)): 0.4988 / 0.4802\n",
"[91/100][175/391] Loss_D: 2.9193 Loss_G: 2.7552 D(x): 0.6733 D(G(z)): 0.4407 / 0.4356\n",
"[91/100][176/391] Loss_D: 2.7883 Loss_G: 3.4402 D(x): 0.7150 D(G(z)): 0.4304 / 0.3586\n",
"[91/100][177/391] Loss_D: 2.8719 Loss_G: 2.5356 D(x): 0.6673 D(G(z)): 0.4178 / 0.4331\n",
"[91/100][178/391] Loss_D: 2.8461 Loss_G: 2.5143 D(x): 0.6246 D(G(z)): 0.3950 / 0.4800\n",
"[91/100][179/391] Loss_D: 3.1271 Loss_G: 2.6064 D(x): 0.5786 D(G(z)): 0.3802 / 0.4542\n",
"[91/100][180/391] Loss_D: 2.3166 Loss_G: 2.6855 D(x): 0.7351 D(G(z)): 0.3462 / 0.4386\n",
"[91/100][181/391] Loss_D: 3.7794 Loss_G: 2.8870 D(x): 0.7564 D(G(z)): 0.5316 / 0.4024\n",
"[91/100][182/391] Loss_D: 2.7599 Loss_G: 3.4793 D(x): 0.6470 D(G(z)): 0.4135 / 0.3665\n",
"[91/100][183/391] Loss_D: 2.9042 Loss_G: 2.9774 D(x): 0.5862 D(G(z)): 0.4079 / 0.4033\n",
"[91/100][184/391] Loss_D: 2.8775 Loss_G: 3.2552 D(x): 0.6286 D(G(z)): 0.3975 / 0.3635\n",
"[91/100][185/391] Loss_D: 3.2581 Loss_G: 2.6449 D(x): 0.6340 D(G(z)): 0.5100 / 0.4478\n",
"[91/100][186/391] Loss_D: 3.2327 Loss_G: 3.1438 D(x): 0.5626 D(G(z)): 0.4642 / 0.3790\n",
"[91/100][187/391] Loss_D: 2.7633 Loss_G: 2.3499 D(x): 0.6574 D(G(z)): 0.3980 / 0.4934\n",
"[91/100][188/391] Loss_D: 2.5468 Loss_G: 2.6376 D(x): 0.7245 D(G(z)): 0.4639 / 0.4376\n",
"[91/100][189/391] Loss_D: 2.8479 Loss_G: 2.4461 D(x): 0.6849 D(G(z)): 0.4459 / 0.4696\n",
"[91/100][190/391] Loss_D: 3.1416 Loss_G: 2.4670 D(x): 0.6071 D(G(z)): 0.4675 / 0.4807\n",
"[91/100][191/391] Loss_D: 3.7623 Loss_G: 2.6513 D(x): 0.6170 D(G(z)): 0.5411 / 0.4509\n",
"[91/100][192/391] Loss_D: 2.6647 Loss_G: 2.7457 D(x): 0.7268 D(G(z)): 0.4555 / 0.4241\n",
"[91/100][193/391] Loss_D: 3.0807 Loss_G: 2.6528 D(x): 0.6425 D(G(z)): 0.4999 / 0.4416\n",
"[91/100][194/391] Loss_D: 2.9064 Loss_G: 2.3029 D(x): 0.6757 D(G(z)): 0.4349 / 0.4819\n",
"[91/100][195/391] Loss_D: 3.4119 Loss_G: 3.5611 D(x): 0.5094 D(G(z)): 0.3734 / 0.3348\n",
"[91/100][196/391] Loss_D: 2.2126 Loss_G: 2.8103 D(x): 0.7879 D(G(z)): 0.3797 / 0.4292\n",
"[91/100][197/391] Loss_D: 2.7710 Loss_G: 2.4538 D(x): 0.6933 D(G(z)): 0.3930 / 0.4686\n",
"[91/100][198/391] Loss_D: 2.7286 Loss_G: 1.8595 D(x): 0.6814 D(G(z)): 0.4363 / 0.5525\n",
"[91/100][199/391] Loss_D: 2.9032 Loss_G: 2.5702 D(x): 0.6297 D(G(z)): 0.3887 / 0.4575\n",
"[91/100][200/391] Loss_D: 2.9046 Loss_G: 2.0111 D(x): 0.6356 D(G(z)): 0.4318 / 0.5529\n",
"[91/100][201/391] Loss_D: 2.4825 Loss_G: 2.6658 D(x): 0.7550 D(G(z)): 0.3652 / 0.4394\n",
"[91/100][202/391] Loss_D: 2.8417 Loss_G: 2.4418 D(x): 0.7313 D(G(z)): 0.4991 / 0.4727\n",
"[91/100][203/391] Loss_D: 3.0277 Loss_G: 2.3667 D(x): 0.6904 D(G(z)): 0.4953 / 0.4749\n",
"[91/100][204/391] Loss_D: 2.5127 Loss_G: 3.1468 D(x): 0.6167 D(G(z)): 0.3391 / 0.3694\n",
"[91/100][205/391] Loss_D: 2.7895 Loss_G: 3.3153 D(x): 0.5947 D(G(z)): 0.3675 / 0.3597\n",
"[91/100][206/391] Loss_D: 2.3903 Loss_G: 2.5451 D(x): 0.6774 D(G(z)): 0.3114 / 0.4556\n",
"[91/100][207/391] Loss_D: 2.9973 Loss_G: 2.8287 D(x): 0.6944 D(G(z)): 0.4753 / 0.4293\n",
"[91/100][208/391] Loss_D: 2.3200 Loss_G: 2.9250 D(x): 0.7164 D(G(z)): 0.3607 / 0.4008\n",
"[91/100][209/391] Loss_D: 3.2876 Loss_G: 3.4384 D(x): 0.6543 D(G(z)): 0.5288 / 0.3612\n",
"[91/100][210/391] Loss_D: 2.8506 Loss_G: 2.1859 D(x): 0.6835 D(G(z)): 0.4416 / 0.5201\n",
"[91/100][211/391] Loss_D: 3.6369 Loss_G: 2.8548 D(x): 0.7432 D(G(z)): 0.4838 / 0.4088\n",
"[91/100][212/391] Loss_D: 2.5497 Loss_G: 2.9437 D(x): 0.6848 D(G(z)): 0.3306 / 0.4083\n",
"[91/100][213/391] Loss_D: 2.6922 Loss_G: 2.6988 D(x): 0.7358 D(G(z)): 0.4289 / 0.4359\n",
"[91/100][214/391] Loss_D: 2.6327 Loss_G: 2.5677 D(x): 0.6785 D(G(z)): 0.4362 / 0.4451\n",
"[91/100][215/391] Loss_D: 2.4686 Loss_G: 3.1170 D(x): 0.6668 D(G(z)): 0.2691 / 0.3819\n",
"[91/100][216/391] Loss_D: 2.8076 Loss_G: 3.3910 D(x): 0.6517 D(G(z)): 0.4185 / 0.3516\n",
"[91/100][217/391] Loss_D: 2.7449 Loss_G: 3.0983 D(x): 0.6755 D(G(z)): 0.4199 / 0.3946\n",
"[91/100][218/391] Loss_D: 2.8024 Loss_G: 3.2653 D(x): 0.6532 D(G(z)): 0.4304 / 0.3734\n",
"[91/100][219/391] Loss_D: 2.6192 Loss_G: 2.1194 D(x): 0.6471 D(G(z)): 0.3631 / 0.5264\n",
"[91/100][220/391] Loss_D: 3.0572 Loss_G: 2.2366 D(x): 0.6622 D(G(z)): 0.4773 / 0.5063\n",
"[91/100][221/391] Loss_D: 3.1621 Loss_G: 3.3711 D(x): 0.6473 D(G(z)): 0.4997 / 0.3617\n",
"[91/100][222/391] Loss_D: 2.8150 Loss_G: 3.0238 D(x): 0.7128 D(G(z)): 0.4835 / 0.3925\n",
"[91/100][223/391] Loss_D: 3.0422 Loss_G: 2.9498 D(x): 0.7058 D(G(z)): 0.5197 / 0.4111\n",
"[91/100][224/391] Loss_D: 3.1411 Loss_G: 3.0177 D(x): 0.6290 D(G(z)): 0.4466 / 0.3895\n",
"[91/100][225/391] Loss_D: 2.7400 Loss_G: 3.5057 D(x): 0.6485 D(G(z)): 0.4064 / 0.3445\n",
"[91/100][226/391] Loss_D: 2.7883 Loss_G: 1.7966 D(x): 0.6670 D(G(z)): 0.4058 / 0.5493\n",
"[91/100][227/391] Loss_D: 2.8506 Loss_G: 3.7484 D(x): 0.6330 D(G(z)): 0.4376 / 0.3202\n",
"[91/100][228/391] Loss_D: 3.4067 Loss_G: 3.1924 D(x): 0.6317 D(G(z)): 0.5448 / 0.3767\n",
"[91/100][229/391] Loss_D: 2.7885 Loss_G: 2.7712 D(x): 0.6310 D(G(z)): 0.3426 / 0.4309\n",
"[91/100][230/391] Loss_D: 3.0296 Loss_G: 2.9080 D(x): 0.6249 D(G(z)): 0.4611 / 0.4194\n",
"[91/100][231/391] Loss_D: 3.2815 Loss_G: 2.6519 D(x): 0.6569 D(G(z)): 0.5246 / 0.4545\n",
"[91/100][232/391] Loss_D: 2.7022 Loss_G: 2.8387 D(x): 0.6404 D(G(z)): 0.3433 / 0.4177\n",
"[91/100][233/391] Loss_D: 2.9794 Loss_G: 2.3303 D(x): 0.7303 D(G(z)): 0.5067 / 0.4801\n",
"[91/100][234/391] Loss_D: 2.8354 Loss_G: 3.1988 D(x): 0.6773 D(G(z)): 0.4484 / 0.3949\n",
"[91/100][235/391] Loss_D: 2.8597 Loss_G: 2.9412 D(x): 0.5997 D(G(z)): 0.4030 / 0.3922\n",
"[91/100][236/391] Loss_D: 2.8533 Loss_G: 3.0533 D(x): 0.6654 D(G(z)): 0.4241 / 0.3732\n",
"[91/100][237/391] Loss_D: 3.3624 Loss_G: 2.7270 D(x): 0.5986 D(G(z)): 0.4991 / 0.4150\n",
"[91/100][238/391] Loss_D: 2.5667 Loss_G: 3.8944 D(x): 0.6896 D(G(z)): 0.4408 / 0.3057\n",
"[91/100][239/391] Loss_D: 3.5004 Loss_G: 2.4144 D(x): 0.5796 D(G(z)): 0.4953 / 0.4623\n",
"[91/100][240/391] Loss_D: 3.1890 Loss_G: 3.2735 D(x): 0.6548 D(G(z)): 0.4814 / 0.3719\n",
"[91/100][241/391] Loss_D: 3.9437 Loss_G: 3.1010 D(x): 0.5618 D(G(z)): 0.3353 / 0.3915\n",
"[91/100][242/391] Loss_D: 2.5235 Loss_G: 3.2793 D(x): 0.7100 D(G(z)): 0.4018 / 0.3799\n",
"[91/100][243/391] Loss_D: 2.4607 Loss_G: 3.3959 D(x): 0.7617 D(G(z)): 0.4354 / 0.3518\n",
"[91/100][244/391] Loss_D: 2.5257 Loss_G: 2.2151 D(x): 0.6445 D(G(z)): 0.3441 / 0.5004\n",
"[91/100][245/391] Loss_D: 2.8418 Loss_G: 2.2317 D(x): 0.6394 D(G(z)): 0.4054 / 0.4941\n",
"[91/100][246/391] Loss_D: 2.7419 Loss_G: 2.2075 D(x): 0.6939 D(G(z)): 0.3791 / 0.5214\n",
"[91/100][247/391] Loss_D: 3.0395 Loss_G: 2.3852 D(x): 0.6589 D(G(z)): 0.4664 / 0.4799\n",
"[91/100][248/391] Loss_D: 3.1298 Loss_G: 3.3730 D(x): 0.5971 D(G(z)): 0.4442 / 0.3547\n",
"[91/100][249/391] Loss_D: 2.6604 Loss_G: 1.9771 D(x): 0.6213 D(G(z)): 0.3840 / 0.5590\n",
"[91/100][250/391] Loss_D: 2.8812 Loss_G: 2.4960 D(x): 0.7609 D(G(z)): 0.4904 / 0.4602\n",
"[91/100][251/391] Loss_D: 3.0921 Loss_G: 2.1537 D(x): 0.6417 D(G(z)): 0.4839 / 0.5087\n",
"[91/100][252/391] Loss_D: 2.8011 Loss_G: 2.4941 D(x): 0.6448 D(G(z)): 0.3917 / 0.4689\n",
"[91/100][253/391] Loss_D: 3.1299 Loss_G: 2.2137 D(x): 0.5953 D(G(z)): 0.3756 / 0.4933\n",
"[91/100][254/391] Loss_D: 2.6236 Loss_G: 1.9387 D(x): 0.6663 D(G(z)): 0.4093 / 0.5400\n",
"[91/100][255/391] Loss_D: 3.1645 Loss_G: 2.1568 D(x): 0.6884 D(G(z)): 0.5043 / 0.5253\n",
"[91/100][256/391] Loss_D: 2.8151 Loss_G: 2.5169 D(x): 0.6016 D(G(z)): 0.3723 / 0.4527\n",
"[91/100][257/391] Loss_D: 3.6886 Loss_G: 2.7497 D(x): 0.6178 D(G(z)): 0.5248 / 0.4175\n",
"[91/100][258/391] Loss_D: 2.6628 Loss_G: 2.8641 D(x): 0.7074 D(G(z)): 0.4162 / 0.4266\n",
"[91/100][259/391] Loss_D: 2.6905 Loss_G: 2.7553 D(x): 0.7266 D(G(z)): 0.4540 / 0.4220\n",
"[91/100][260/391] Loss_D: 2.6343 Loss_G: 2.5699 D(x): 0.7912 D(G(z)): 0.4451 / 0.4515\n",
"[91/100][261/391] Loss_D: 2.6469 Loss_G: 3.1155 D(x): 0.7420 D(G(z)): 0.4082 / 0.4084\n",
"[91/100][262/391] Loss_D: 2.7418 Loss_G: 3.4959 D(x): 0.7009 D(G(z)): 0.3949 / 0.3394\n",
"[91/100][263/391] Loss_D: 3.5297 Loss_G: 2.7799 D(x): 0.5576 D(G(z)): 0.4458 / 0.4371\n",
"[91/100][264/391] Loss_D: 2.2395 Loss_G: 2.6271 D(x): 0.7282 D(G(z)): 0.3834 / 0.4352\n",
"[91/100][265/391] Loss_D: 2.3694 Loss_G: 2.9224 D(x): 0.7757 D(G(z)): 0.3719 / 0.4009\n",
"[91/100][266/391] Loss_D: 3.2218 Loss_G: 2.6584 D(x): 0.7391 D(G(z)): 0.5138 / 0.4540\n",
"[91/100][267/391] Loss_D: 2.6232 Loss_G: 3.1810 D(x): 0.6338 D(G(z)): 0.3180 / 0.3755\n",
"[91/100][268/391] Loss_D: 3.1817 Loss_G: 2.6084 D(x): 0.5756 D(G(z)): 0.4521 / 0.4570\n",
"[91/100][269/391] Loss_D: 2.9715 Loss_G: 2.8551 D(x): 0.6477 D(G(z)): 0.4552 / 0.4126\n",
"[91/100][270/391] Loss_D: 2.8331 Loss_G: 2.7334 D(x): 0.6246 D(G(z)): 0.3439 / 0.4231\n",
"[91/100][271/391] Loss_D: 3.7154 Loss_G: 3.0171 D(x): 0.7207 D(G(z)): 0.3535 / 0.4034\n",
"[91/100][272/391] Loss_D: 3.2296 Loss_G: 2.3641 D(x): 0.7104 D(G(z)): 0.5440 / 0.4877\n",
"[91/100][273/391] Loss_D: 3.0854 Loss_G: 2.3771 D(x): 0.6892 D(G(z)): 0.5204 / 0.4844\n",
"[91/100][274/391] Loss_D: 2.8336 Loss_G: 2.9015 D(x): 0.6330 D(G(z)): 0.4015 / 0.4115\n",
"[91/100][275/391] Loss_D: 2.6229 Loss_G: 2.7966 D(x): 0.6819 D(G(z)): 0.3738 / 0.4164\n",
"[91/100][276/391] Loss_D: 3.3647 Loss_G: 3.8032 D(x): 0.6706 D(G(z)): 0.5541 / 0.3150\n",
"[91/100][277/391] Loss_D: 2.8729 Loss_G: 2.8889 D(x): 0.6982 D(G(z)): 0.4572 / 0.3999\n",
"[91/100][278/391] Loss_D: 2.5937 Loss_G: 3.5147 D(x): 0.6804 D(G(z)): 0.4288 / 0.3428\n",
"[91/100][279/391] Loss_D: 2.6972 Loss_G: 3.0353 D(x): 0.6457 D(G(z)): 0.3474 / 0.3997\n",
"[91/100][280/391] Loss_D: 2.6473 Loss_G: 3.0422 D(x): 0.6646 D(G(z)): 0.3839 / 0.3925\n",
"[91/100][281/391] Loss_D: 3.6302 Loss_G: 3.2383 D(x): 0.6403 D(G(z)): 0.5669 / 0.3704\n",
"[91/100][282/391] Loss_D: 2.7014 Loss_G: 2.6148 D(x): 0.7516 D(G(z)): 0.4513 / 0.4503\n",
"[91/100][283/391] Loss_D: 2.7881 Loss_G: 2.7750 D(x): 0.6722 D(G(z)): 0.4062 / 0.4238\n",
"[91/100][284/391] Loss_D: 2.8793 Loss_G: 3.3523 D(x): 0.6637 D(G(z)): 0.4449 / 0.3521\n",
"[91/100][285/391] Loss_D: 3.1016 Loss_G: 3.3780 D(x): 0.5962 D(G(z)): 0.4255 / 0.3470\n",
"[91/100][286/391] Loss_D: 3.4515 Loss_G: 3.0353 D(x): 0.5408 D(G(z)): 0.4467 / 0.3853\n",
"[91/100][287/391] Loss_D: 2.6869 Loss_G: 2.9944 D(x): 0.7185 D(G(z)): 0.3321 / 0.4004\n",
"[91/100][288/391] Loss_D: 2.3603 Loss_G: 1.9065 D(x): 0.6547 D(G(z)): 0.3093 / 0.5645\n",
"[91/100][289/391] Loss_D: 3.3429 Loss_G: 2.1576 D(x): 0.6630 D(G(z)): 0.5031 / 0.4939\n",
"[91/100][290/391] Loss_D: 2.8695 Loss_G: 2.4137 D(x): 0.6523 D(G(z)): 0.4520 / 0.4860\n",
"[91/100][291/391] Loss_D: 3.5662 Loss_G: 2.9180 D(x): 0.6061 D(G(z)): 0.5358 / 0.4050\n",
"[91/100][292/391] Loss_D: 3.6331 Loss_G: 3.3332 D(x): 0.6319 D(G(z)): 0.5466 / 0.3698\n",
"[91/100][293/391] Loss_D: 3.3823 Loss_G: 3.3227 D(x): 0.6316 D(G(z)): 0.5316 / 0.3699\n",
"[91/100][294/391] Loss_D: 3.3563 Loss_G: 3.5619 D(x): 0.5005 D(G(z)): 0.3653 / 0.3451\n",
"[91/100][295/391] Loss_D: 2.8823 Loss_G: 2.6142 D(x): 0.6428 D(G(z)): 0.3994 / 0.4429\n",
"[91/100][296/391] Loss_D: 2.7471 Loss_G: 2.6771 D(x): 0.6615 D(G(z)): 0.3797 / 0.4148\n",
"[91/100][297/391] Loss_D: 2.9429 Loss_G: 2.9761 D(x): 0.6620 D(G(z)): 0.4320 / 0.4083\n",
"[91/100][298/391] Loss_D: 3.1698 Loss_G: 2.4378 D(x): 0.6514 D(G(z)): 0.5140 / 0.4562\n",
"[91/100][299/391] Loss_D: 2.5778 Loss_G: 3.4584 D(x): 0.7724 D(G(z)): 0.4493 / 0.3459\n",
"[91/100][300/391] Loss_D: 2.8219 Loss_G: 3.4439 D(x): 0.6197 D(G(z)): 0.3494 / 0.3698\n",
"[91/100][301/391] Loss_D: 3.5692 Loss_G: 3.2687 D(x): 0.6662 D(G(z)): 0.4628 / 0.3701\n",
"[91/100][302/391] Loss_D: 2.8124 Loss_G: 2.7455 D(x): 0.5960 D(G(z)): 0.3463 / 0.4221\n",
"[91/100][303/391] Loss_D: 2.6469 Loss_G: 2.3438 D(x): 0.6972 D(G(z)): 0.4350 / 0.4890\n",
"[91/100][304/391] Loss_D: 2.4349 Loss_G: 2.1787 D(x): 0.7313 D(G(z)): 0.4678 / 0.5190\n",
"[91/100][305/391] Loss_D: 2.5522 Loss_G: 2.3474 D(x): 0.6745 D(G(z)): 0.3560 / 0.4816\n",
"[91/100][306/391] Loss_D: 2.7419 Loss_G: 3.0862 D(x): 0.6251 D(G(z)): 0.3165 / 0.3786\n",
"[91/100][307/391] Loss_D: 2.7596 Loss_G: 3.0131 D(x): 0.6410 D(G(z)): 0.4059 / 0.3956\n",
"[91/100][308/391] Loss_D: 2.9043 Loss_G: 3.3339 D(x): 0.6396 D(G(z)): 0.4342 / 0.3656\n",
"[91/100][309/391] Loss_D: 2.8260 Loss_G: 1.9493 D(x): 0.7263 D(G(z)): 0.5085 / 0.5519\n",
"[91/100][310/391] Loss_D: 2.7848 Loss_G: 3.1114 D(x): 0.7252 D(G(z)): 0.4387 / 0.3884\n",
"[91/100][311/391] Loss_D: 2.8941 Loss_G: 2.5524 D(x): 0.6999 D(G(z)): 0.4414 / 0.4561\n",
"[91/100][312/391] Loss_D: 2.5958 Loss_G: 3.4337 D(x): 0.6551 D(G(z)): 0.3844 / 0.3539\n",
"[91/100][313/391] Loss_D: 3.1693 Loss_G: 3.5993 D(x): 0.6295 D(G(z)): 0.4862 / 0.3426\n",
"[91/100][314/391] Loss_D: 2.5550 Loss_G: 3.8792 D(x): 0.6749 D(G(z)): 0.4201 / 0.3069\n",
"[91/100][315/391] Loss_D: 3.0947 Loss_G: 2.7249 D(x): 0.6331 D(G(z)): 0.4351 / 0.4243\n",
"[91/100][316/391] Loss_D: 3.2053 Loss_G: 2.8865 D(x): 0.5535 D(G(z)): 0.3790 / 0.4138\n",
"[91/100][317/391] Loss_D: 2.6567 Loss_G: 2.4108 D(x): 0.6918 D(G(z)): 0.3185 / 0.4663\n",
"[91/100][318/391] Loss_D: 2.7818 Loss_G: 2.6626 D(x): 0.6457 D(G(z)): 0.4217 / 0.4404\n",
"[91/100][319/391] Loss_D: 2.5073 Loss_G: 2.1193 D(x): 0.7122 D(G(z)): 0.3646 / 0.5170\n",
"[91/100][320/391] Loss_D: 2.8654 Loss_G: 2.0501 D(x): 0.7213 D(G(z)): 0.4796 / 0.5196\n",
"[91/100][321/391] Loss_D: 2.9854 Loss_G: 1.7441 D(x): 0.6540 D(G(z)): 0.4187 / 0.5749\n",
"[91/100][322/391] Loss_D: 3.2673 Loss_G: 2.0244 D(x): 0.6159 D(G(z)): 0.4939 / 0.5388\n",
"[91/100][323/391] Loss_D: 3.2281 Loss_G: 2.5307 D(x): 0.6899 D(G(z)): 0.5082 / 0.4514\n",
"[91/100][324/391] Loss_D: 2.8631 Loss_G: 2.8923 D(x): 0.7082 D(G(z)): 0.5196 / 0.4109\n",
"[91/100][325/391] Loss_D: 4.2961 Loss_G: 3.9806 D(x): 0.5388 D(G(z)): 0.5910 / 0.3054\n",
"[91/100][326/391] Loss_D: 2.9768 Loss_G: 2.4609 D(x): 0.5908 D(G(z)): 0.3770 / 0.4584\n",
"[91/100][327/391] Loss_D: 2.4846 Loss_G: 2.3460 D(x): 0.6752 D(G(z)): 0.3221 / 0.4664\n",
"[91/100][328/391] Loss_D: 3.0863 Loss_G: 2.3263 D(x): 0.6279 D(G(z)): 0.5021 / 0.4791\n",
"[91/100][329/391] Loss_D: 2.7066 Loss_G: 2.3942 D(x): 0.7532 D(G(z)): 0.4683 / 0.4799\n",
"[91/100][330/391] Loss_D: 2.6418 Loss_G: 2.4168 D(x): 0.6756 D(G(z)): 0.3621 / 0.4894\n",
"[91/100][331/391] Loss_D: 3.4854 Loss_G: 3.2628 D(x): 0.7058 D(G(z)): 0.3744 / 0.3632\n",
"[91/100][332/391] Loss_D: 2.6085 Loss_G: 2.3263 D(x): 0.6805 D(G(z)): 0.4279 / 0.4970\n",
"[91/100][333/391] Loss_D: 3.0197 Loss_G: 2.3682 D(x): 0.6281 D(G(z)): 0.3607 / 0.4866\n",
"[91/100][334/391] Loss_D: 2.4971 Loss_G: 2.8811 D(x): 0.7204 D(G(z)): 0.4301 / 0.4252\n",
"[91/100][335/391] Loss_D: 2.5894 Loss_G: 3.2826 D(x): 0.7081 D(G(z)): 0.3864 / 0.3623\n",
"[91/100][336/391] Loss_D: 3.4012 Loss_G: 2.8176 D(x): 0.6762 D(G(z)): 0.5258 / 0.4232\n",
"[91/100][337/391] Loss_D: 3.3947 Loss_G: 3.1038 D(x): 0.6186 D(G(z)): 0.5121 / 0.3830\n",
"[91/100][338/391] Loss_D: 2.8396 Loss_G: 3.3543 D(x): 0.6834 D(G(z)): 0.4827 / 0.3599\n",
"[91/100][339/391] Loss_D: 3.3066 Loss_G: 2.3539 D(x): 0.5675 D(G(z)): 0.4082 / 0.4819\n",
"[91/100][340/391] Loss_D: 2.3933 Loss_G: 2.6680 D(x): 0.7254 D(G(z)): 0.3477 / 0.4519\n",
"[91/100][341/391] Loss_D: 3.1345 Loss_G: 2.8899 D(x): 0.6967 D(G(z)): 0.5066 / 0.4106\n",
"[91/100][342/391] Loss_D: 2.8672 Loss_G: 2.7185 D(x): 0.6428 D(G(z)): 0.4268 / 0.4547\n",
"[91/100][343/391] Loss_D: 3.0459 Loss_G: 2.2702 D(x): 0.6752 D(G(z)): 0.4554 / 0.5042\n",
"[91/100][344/391] Loss_D: 2.9732 Loss_G: 2.4644 D(x): 0.5747 D(G(z)): 0.3810 / 0.4612\n",
"[91/100][345/391] Loss_D: 2.7427 Loss_G: 2.2346 D(x): 0.6849 D(G(z)): 0.4308 / 0.4939\n",
"[91/100][346/391] Loss_D: 2.9519 Loss_G: 3.1253 D(x): 0.6136 D(G(z)): 0.4272 / 0.3713\n",
"[91/100][347/391] Loss_D: 2.6716 Loss_G: 2.8793 D(x): 0.7602 D(G(z)): 0.4323 / 0.4010\n",
"[91/100][348/391] Loss_D: 3.0192 Loss_G: 2.2254 D(x): 0.6001 D(G(z)): 0.3500 / 0.4936\n",
"[91/100][349/391] Loss_D: 2.5629 Loss_G: 4.1360 D(x): 0.7626 D(G(z)): 0.4325 / 0.2818\n",
"[91/100][350/391] Loss_D: 2.5307 Loss_G: 2.6357 D(x): 0.6833 D(G(z)): 0.3521 / 0.4515\n",
"[91/100][351/391] Loss_D: 3.2540 Loss_G: 2.5055 D(x): 0.6786 D(G(z)): 0.5294 / 0.4809\n",
"[91/100][352/391] Loss_D: 2.7990 Loss_G: 2.3911 D(x): 0.6830 D(G(z)): 0.4668 / 0.4868\n",
"[91/100][353/391] Loss_D: 2.7638 Loss_G: 3.4865 D(x): 0.6553 D(G(z)): 0.3732 / 0.3412\n",
"[91/100][354/391] Loss_D: 2.6461 Loss_G: 2.9846 D(x): 0.6536 D(G(z)): 0.4013 / 0.4119\n",
"[91/100][355/391] Loss_D: 3.0275 Loss_G: 2.6024 D(x): 0.7090 D(G(z)): 0.5292 / 0.4486\n",
"[91/100][356/391] Loss_D: 2.8549 Loss_G: 3.0079 D(x): 0.6015 D(G(z)): 0.3802 / 0.4030\n",
"[91/100][357/391] Loss_D: 2.8461 Loss_G: 2.8076 D(x): 0.6001 D(G(z)): 0.3670 / 0.4053\n",
"[91/100][358/391] Loss_D: 3.1657 Loss_G: 2.0954 D(x): 0.5900 D(G(z)): 0.4495 / 0.5109\n",
"[91/100][359/391] Loss_D: 2.9319 Loss_G: 3.1398 D(x): 0.7194 D(G(z)): 0.5273 / 0.3793\n",
"[91/100][360/391] Loss_D: 2.8968 Loss_G: 2.6839 D(x): 0.6001 D(G(z)): 0.3698 / 0.4483\n",
"[91/100][361/391] Loss_D: 3.4995 Loss_G: 2.5198 D(x): 0.5684 D(G(z)): 0.3713 / 0.4703\n",
"[91/100][362/391] Loss_D: 2.5957 Loss_G: 2.4292 D(x): 0.8225 D(G(z)): 0.4601 / 0.4818\n",
"[91/100][363/391] Loss_D: 3.1565 Loss_G: 2.8592 D(x): 0.7443 D(G(z)): 0.5298 / 0.4255\n",
"[91/100][364/391] Loss_D: 2.8976 Loss_G: 2.5192 D(x): 0.6723 D(G(z)): 0.4006 / 0.4477\n",
"[91/100][365/391] Loss_D: 3.0035 Loss_G: 2.6664 D(x): 0.5548 D(G(z)): 0.3581 / 0.4393\n",
"[91/100][366/391] Loss_D: 2.6640 Loss_G: 2.3841 D(x): 0.6519 D(G(z)): 0.3941 / 0.4640\n",
"[91/100][367/391] Loss_D: 3.3177 Loss_G: 2.4767 D(x): 0.6740 D(G(z)): 0.5595 / 0.4674\n",
"[91/100][368/391] Loss_D: 2.9897 Loss_G: 3.2292 D(x): 0.6418 D(G(z)): 0.4526 / 0.3651\n",
"[91/100][369/391] Loss_D: 2.5980 Loss_G: 3.1870 D(x): 0.7221 D(G(z)): 0.4633 / 0.3765\n",
"[91/100][370/391] Loss_D: 3.0503 Loss_G: 2.3305 D(x): 0.5457 D(G(z)): 0.3456 / 0.4909\n",
"[91/100][371/391] Loss_D: 2.5589 Loss_G: 3.0005 D(x): 0.7084 D(G(z)): 0.3905 / 0.4115\n",
"[91/100][372/391] Loss_D: 2.5374 Loss_G: 3.4157 D(x): 0.7179 D(G(z)): 0.4400 / 0.3440\n",
"[91/100][373/391] Loss_D: 2.3601 Loss_G: 3.1188 D(x): 0.7287 D(G(z)): 0.3855 / 0.3831\n",
"[91/100][374/391] Loss_D: 2.6993 Loss_G: 3.0264 D(x): 0.6697 D(G(z)): 0.4310 / 0.4007\n",
"[91/100][375/391] Loss_D: 3.3899 Loss_G: 2.6042 D(x): 0.5998 D(G(z)): 0.4489 / 0.4266\n",
"[91/100][376/391] Loss_D: 3.8809 Loss_G: 2.9000 D(x): 0.5195 D(G(z)): 0.4955 / 0.4070\n",
"[91/100][377/391] Loss_D: 2.8993 Loss_G: 2.5158 D(x): 0.6707 D(G(z)): 0.4396 / 0.4633\n",
"[91/100][378/391] Loss_D: 2.5371 Loss_G: 2.6011 D(x): 0.6697 D(G(z)): 0.3952 / 0.4526\n",
"[91/100][379/391] Loss_D: 2.8613 Loss_G: 2.0436 D(x): 0.6426 D(G(z)): 0.4365 / 0.5469\n",
"[91/100][380/391] Loss_D: 2.9739 Loss_G: 2.8179 D(x): 0.7295 D(G(z)): 0.4936 / 0.4231\n",
"[91/100][381/391] Loss_D: 3.5120 Loss_G: 2.7160 D(x): 0.6763 D(G(z)): 0.5852 / 0.4287\n",
"[91/100][382/391] Loss_D: 2.9179 Loss_G: 3.3973 D(x): 0.5419 D(G(z)): 0.2882 / 0.3429\n",
"[91/100][383/391] Loss_D: 2.5198 Loss_G: 2.5278 D(x): 0.7128 D(G(z)): 0.3710 / 0.4708\n",
"[91/100][384/391] Loss_D: 2.6308 Loss_G: 2.6744 D(x): 0.6734 D(G(z)): 0.4462 / 0.4420\n",
"[91/100][385/391] Loss_D: 3.0221 Loss_G: 3.1642 D(x): 0.6511 D(G(z)): 0.4726 / 0.3714\n",
"[91/100][386/391] Loss_D: 3.2456 Loss_G: 2.5540 D(x): 0.6422 D(G(z)): 0.4619 / 0.4480\n",
"[91/100][387/391] Loss_D: 2.6662 Loss_G: 2.7044 D(x): 0.7187 D(G(z)): 0.3921 / 0.4329\n",
"[91/100][388/391] Loss_D: 3.1288 Loss_G: 3.0734 D(x): 0.6155 D(G(z)): 0.4670 / 0.3870\n",
"[91/100][389/391] Loss_D: 3.0605 Loss_G: 2.8750 D(x): 0.6113 D(G(z)): 0.4052 / 0.4162\n",
"[91/100][390/391] Loss_D: 2.6725 Loss_G: 2.6985 D(x): 0.6084 D(G(z)): 0.3210 / 0.4299\n",
"[91/100][391/391] Loss_D: 3.6451 Loss_G: 2.5174 D(x): 0.6795 D(G(z)): 0.4214 / 0.4593\n",
"[92/100][1/391] Loss_D: 3.6267 Loss_G: 2.3532 D(x): 0.6009 D(G(z)): 0.4923 / 0.4925\n",
"[92/100][2/391] Loss_D: 2.7450 Loss_G: 2.2572 D(x): 0.6174 D(G(z)): 0.3794 / 0.4904\n",
"[92/100][3/391] Loss_D: 2.8849 Loss_G: 1.2810 D(x): 0.6813 D(G(z)): 0.4609 / 0.6690\n",
"[92/100][4/391] Loss_D: 2.0356 Loss_G: 3.0320 D(x): 0.7679 D(G(z)): 0.3548 / 0.3983\n",
"[92/100][5/391] Loss_D: 2.8226 Loss_G: 3.0060 D(x): 0.6241 D(G(z)): 0.3486 / 0.3970\n",
"[92/100][6/391] Loss_D: 3.1983 Loss_G: 2.7316 D(x): 0.7549 D(G(z)): 0.5105 / 0.3982\n",
"[92/100][7/391] Loss_D: 2.6721 Loss_G: 2.3334 D(x): 0.7064 D(G(z)): 0.3562 / 0.4825\n",
"[92/100][8/391] Loss_D: 2.2800 Loss_G: 1.9171 D(x): 0.7087 D(G(z)): 0.3581 / 0.5255\n",
"[92/100][9/391] Loss_D: 3.3081 Loss_G: 2.4865 D(x): 0.5967 D(G(z)): 0.4992 / 0.4696\n",
"[92/100][10/391] Loss_D: 2.4627 Loss_G: 2.9325 D(x): 0.7566 D(G(z)): 0.4469 / 0.4296\n",
"[92/100][11/391] Loss_D: 2.8751 Loss_G: 2.4292 D(x): 0.6750 D(G(z)): 0.4157 / 0.4739\n",
"[92/100][12/391] Loss_D: 2.9415 Loss_G: 2.7019 D(x): 0.6822 D(G(z)): 0.4660 / 0.4317\n",
"[92/100][13/391] Loss_D: 2.6343 Loss_G: 2.8382 D(x): 0.6433 D(G(z)): 0.3277 / 0.4279\n",
"[92/100][14/391] Loss_D: 2.2812 Loss_G: 2.6108 D(x): 0.7373 D(G(z)): 0.3628 / 0.4456\n",
"[92/100][15/391] Loss_D: 2.4918 Loss_G: 2.0632 D(x): 0.6868 D(G(z)): 0.3813 / 0.5127\n",
"[92/100][16/391] Loss_D: 3.4505 Loss_G: 2.4814 D(x): 0.6704 D(G(z)): 0.5341 / 0.4748\n",
"[92/100][17/391] Loss_D: 3.8424 Loss_G: 4.0921 D(x): 0.5543 D(G(z)): 0.5711 / 0.2803\n",
"[92/100][18/391] Loss_D: 2.8776 Loss_G: 1.8445 D(x): 0.6068 D(G(z)): 0.4003 / 0.5626\n",
"[92/100][19/391] Loss_D: 3.1952 Loss_G: 2.5469 D(x): 0.6481 D(G(z)): 0.4974 / 0.4663\n",
"[92/100][20/391] Loss_D: 2.9107 Loss_G: 2.6764 D(x): 0.7107 D(G(z)): 0.5026 / 0.4432\n",
"[92/100][21/391] Loss_D: 2.5482 Loss_G: 2.4308 D(x): 0.7231 D(G(z)): 0.4571 / 0.4867\n",
"[92/100][22/391] Loss_D: 2.8290 Loss_G: 3.4017 D(x): 0.6951 D(G(z)): 0.4410 / 0.3544\n",
"[92/100][23/391] Loss_D: 2.9954 Loss_G: 3.7303 D(x): 0.6104 D(G(z)): 0.4261 / 0.3140\n",
"[92/100][24/391] Loss_D: 3.0037 Loss_G: 3.4753 D(x): 0.6050 D(G(z)): 0.3566 / 0.3348\n",
"[92/100][25/391] Loss_D: 2.7835 Loss_G: 3.0943 D(x): 0.7067 D(G(z)): 0.4243 / 0.3981\n",
"[92/100][26/391] Loss_D: 3.3359 Loss_G: 2.7448 D(x): 0.5486 D(G(z)): 0.4377 / 0.4313\n",
"[92/100][27/391] Loss_D: 3.1947 Loss_G: 3.4609 D(x): 0.6434 D(G(z)): 0.4503 / 0.3368\n",
"[92/100][28/391] Loss_D: 2.9868 Loss_G: 3.0473 D(x): 0.6080 D(G(z)): 0.4316 / 0.3892\n",
"[92/100][29/391] Loss_D: 2.6778 Loss_G: 2.8396 D(x): 0.7176 D(G(z)): 0.4557 / 0.4145\n",
"[92/100][30/391] Loss_D: 2.6794 Loss_G: 2.8041 D(x): 0.7023 D(G(z)): 0.4637 / 0.4386\n",
"[92/100][31/391] Loss_D: 3.7106 Loss_G: 3.0550 D(x): 0.7342 D(G(z)): 0.4226 / 0.3945\n",
"[92/100][32/391] Loss_D: 3.1531 Loss_G: 2.5268 D(x): 0.6774 D(G(z)): 0.4775 / 0.4456\n",
"[92/100][33/391] Loss_D: 3.3862 Loss_G: 3.6092 D(x): 0.6952 D(G(z)): 0.5601 / 0.3310\n",
"[92/100][34/391] Loss_D: 2.3386 Loss_G: 2.6739 D(x): 0.6720 D(G(z)): 0.2927 / 0.4460\n",
"[92/100][35/391] Loss_D: 2.6249 Loss_G: 3.0364 D(x): 0.7146 D(G(z)): 0.4338 / 0.3992\n",
"[92/100][36/391] Loss_D: 3.0952 Loss_G: 3.7149 D(x): 0.6165 D(G(z)): 0.4095 / 0.3221\n",
"[92/100][37/391] Loss_D: 2.8650 Loss_G: 3.7530 D(x): 0.6248 D(G(z)): 0.4103 / 0.3205\n",
"[92/100][38/391] Loss_D: 3.0999 Loss_G: 2.8089 D(x): 0.6344 D(G(z)): 0.4853 / 0.4112\n",
"[92/100][39/391] Loss_D: 2.7328 Loss_G: 1.9804 D(x): 0.7332 D(G(z)): 0.4678 / 0.5277\n",
"[92/100][40/391] Loss_D: 3.1198 Loss_G: 3.1922 D(x): 0.6219 D(G(z)): 0.4199 / 0.3731\n",
"[92/100][41/391] Loss_D: 3.5995 Loss_G: 2.7200 D(x): 0.4711 D(G(z)): 0.3337 / 0.4330\n",
"[92/100][42/391] Loss_D: 3.0801 Loss_G: 2.3443 D(x): 0.6358 D(G(z)): 0.4715 / 0.4801\n",
"[92/100][43/391] Loss_D: 3.2417 Loss_G: 2.9576 D(x): 0.6730 D(G(z)): 0.5355 / 0.4009\n",
"[92/100][44/391] Loss_D: 2.4721 Loss_G: 3.0971 D(x): 0.7078 D(G(z)): 0.4087 / 0.3861\n",
"[92/100][45/391] Loss_D: 3.2449 Loss_G: 3.4423 D(x): 0.6215 D(G(z)): 0.4742 / 0.3420\n",
"[92/100][46/391] Loss_D: 3.2298 Loss_G: 2.3095 D(x): 0.6855 D(G(z)): 0.5311 / 0.4838\n",
"[92/100][47/391] Loss_D: 3.4975 Loss_G: 3.1619 D(x): 0.5700 D(G(z)): 0.5094 / 0.3592\n",
"[92/100][48/391] Loss_D: 2.9605 Loss_G: 2.6062 D(x): 0.5789 D(G(z)): 0.3325 / 0.4554\n",
"[92/100][49/391] Loss_D: 3.0180 Loss_G: 3.6587 D(x): 0.6435 D(G(z)): 0.4607 / 0.3342\n",
"[92/100][50/391] Loss_D: 3.1099 Loss_G: 1.9417 D(x): 0.6044 D(G(z)): 0.4604 / 0.5647\n",
"[92/100][51/391] Loss_D: 2.4167 Loss_G: 2.0049 D(x): 0.7465 D(G(z)): 0.3965 / 0.5354\n",
"[92/100][52/391] Loss_D: 2.6198 Loss_G: 2.9386 D(x): 0.6580 D(G(z)): 0.3537 / 0.3880\n",
"[92/100][53/391] Loss_D: 2.6237 Loss_G: 2.4054 D(x): 0.7471 D(G(z)): 0.4379 / 0.4897\n",
"[92/100][54/391] Loss_D: 2.6097 Loss_G: 2.7251 D(x): 0.7335 D(G(z)): 0.4464 / 0.4114\n",
"[92/100][55/391] Loss_D: 2.5465 Loss_G: 2.6236 D(x): 0.6877 D(G(z)): 0.3758 / 0.4464\n",
"[92/100][56/391] Loss_D: 2.6993 Loss_G: 2.8584 D(x): 0.6776 D(G(z)): 0.3747 / 0.4104\n",
"[92/100][57/391] Loss_D: 2.8284 Loss_G: 3.2062 D(x): 0.6481 D(G(z)): 0.4135 / 0.3718\n",
"[92/100][58/391] Loss_D: 2.4508 Loss_G: 3.1487 D(x): 0.6986 D(G(z)): 0.3750 / 0.3865\n",
"[92/100][59/391] Loss_D: 2.7352 Loss_G: 2.8204 D(x): 0.6361 D(G(z)): 0.3649 / 0.4245\n",
"[92/100][60/391] Loss_D: 2.8832 Loss_G: 2.6126 D(x): 0.6725 D(G(z)): 0.4625 / 0.4419\n",
"[92/100][61/391] Loss_D: 3.6408 Loss_G: 2.6289 D(x): 0.5843 D(G(z)): 0.3933 / 0.4550\n",
"[92/100][62/391] Loss_D: 3.5033 Loss_G: 1.9257 D(x): 0.5576 D(G(z)): 0.4739 / 0.5372\n",
"[92/100][63/391] Loss_D: 3.1750 Loss_G: 2.1006 D(x): 0.6381 D(G(z)): 0.4713 / 0.5111\n",
"[92/100][64/391] Loss_D: 2.8493 Loss_G: 2.3446 D(x): 0.6879 D(G(z)): 0.5063 / 0.4958\n",
"[92/100][65/391] Loss_D: 2.7528 Loss_G: 3.1491 D(x): 0.6762 D(G(z)): 0.4012 / 0.3853\n",
"[92/100][66/391] Loss_D: 2.3315 Loss_G: 2.8623 D(x): 0.7363 D(G(z)): 0.3560 / 0.4110\n",
"[92/100][67/391] Loss_D: 3.1078 Loss_G: 2.7589 D(x): 0.7232 D(G(z)): 0.4860 / 0.4095\n",
"[92/100][68/391] Loss_D: 2.7505 Loss_G: 2.1711 D(x): 0.6608 D(G(z)): 0.4521 / 0.5151\n",
"[92/100][69/391] Loss_D: 3.3032 Loss_G: 1.6914 D(x): 0.5319 D(G(z)): 0.4045 / 0.5869\n",
"[92/100][70/391] Loss_D: 2.6696 Loss_G: 2.4013 D(x): 0.7138 D(G(z)): 0.3907 / 0.4829\n",
"[92/100][71/391] Loss_D: 2.9674 Loss_G: 2.7951 D(x): 0.7634 D(G(z)): 0.4925 / 0.4235\n",
"[92/100][72/391] Loss_D: 3.3544 Loss_G: 2.6867 D(x): 0.6306 D(G(z)): 0.5440 / 0.4267\n",
"[92/100][73/391] Loss_D: 2.7751 Loss_G: 3.0262 D(x): 0.7204 D(G(z)): 0.4634 / 0.3942\n",
"[92/100][74/391] Loss_D: 2.9939 Loss_G: 2.6403 D(x): 0.5573 D(G(z)): 0.3387 / 0.4362\n",
"[92/100][75/391] Loss_D: 3.0306 Loss_G: 3.0683 D(x): 0.5823 D(G(z)): 0.3915 / 0.3890\n",
"[92/100][76/391] Loss_D: 3.3229 Loss_G: 2.6493 D(x): 0.5972 D(G(z)): 0.4938 / 0.4356\n",
"[92/100][77/391] Loss_D: 2.6863 Loss_G: 2.5462 D(x): 0.7192 D(G(z)): 0.4004 / 0.4458\n",
"[92/100][78/391] Loss_D: 2.8835 Loss_G: 2.4166 D(x): 0.6478 D(G(z)): 0.4467 / 0.4649\n",
"[92/100][79/391] Loss_D: 2.9165 Loss_G: 2.3034 D(x): 0.7134 D(G(z)): 0.5401 / 0.4996\n",
"[92/100][80/391] Loss_D: 2.5184 Loss_G: 2.9592 D(x): 0.7424 D(G(z)): 0.3334 / 0.4168\n",
"[92/100][81/391] Loss_D: 3.3161 Loss_G: 3.0508 D(x): 0.6384 D(G(z)): 0.5275 / 0.4028\n",
"[92/100][82/391] Loss_D: 2.8357 Loss_G: 2.3486 D(x): 0.7308 D(G(z)): 0.4471 / 0.4855\n",
"[92/100][83/391] Loss_D: 2.8031 Loss_G: 3.8475 D(x): 0.6837 D(G(z)): 0.4097 / 0.3109\n",
"[92/100][84/391] Loss_D: 2.4894 Loss_G: 3.5084 D(x): 0.6454 D(G(z)): 0.3412 / 0.3375\n",
"[92/100][85/391] Loss_D: 2.8441 Loss_G: 3.5589 D(x): 0.6320 D(G(z)): 0.3914 / 0.3312\n",
"[92/100][86/391] Loss_D: 2.4100 Loss_G: 2.7859 D(x): 0.7240 D(G(z)): 0.3015 / 0.4306\n",
"[92/100][87/391] Loss_D: 2.8854 Loss_G: 2.4162 D(x): 0.6191 D(G(z)): 0.3005 / 0.4618\n",
"[92/100][88/391] Loss_D: 2.8419 Loss_G: 2.4492 D(x): 0.6339 D(G(z)): 0.4020 / 0.4726\n",
"[92/100][89/391] Loss_D: 2.6460 Loss_G: 3.1456 D(x): 0.6994 D(G(z)): 0.4207 / 0.3938\n",
"[92/100][90/391] Loss_D: 2.9233 Loss_G: 2.7171 D(x): 0.6922 D(G(z)): 0.5013 / 0.4458\n",
"[92/100][91/391] Loss_D: 3.6112 Loss_G: 2.4279 D(x): 0.7322 D(G(z)): 0.4064 / 0.4680\n",
"[92/100][92/391] Loss_D: 2.9515 Loss_G: 2.8285 D(x): 0.6154 D(G(z)): 0.4380 / 0.4303\n",
"[92/100][93/391] Loss_D: 3.8142 Loss_G: 3.1823 D(x): 0.5203 D(G(z)): 0.4561 / 0.3600\n",
"[92/100][94/391] Loss_D: 3.0450 Loss_G: 2.5339 D(x): 0.6995 D(G(z)): 0.5135 / 0.4521\n",
"[92/100][95/391] Loss_D: 3.0560 Loss_G: 2.8964 D(x): 0.7168 D(G(z)): 0.4992 / 0.4251\n",
"[92/100][96/391] Loss_D: 2.8809 Loss_G: 2.4987 D(x): 0.7697 D(G(z)): 0.5159 / 0.4674\n",
"[92/100][97/391] Loss_D: 2.9943 Loss_G: 2.9025 D(x): 0.7200 D(G(z)): 0.4852 / 0.4003\n",
"[92/100][98/391] Loss_D: 3.1228 Loss_G: 3.6296 D(x): 0.6294 D(G(z)): 0.4964 / 0.3342\n",
"[92/100][99/391] Loss_D: 4.0728 Loss_G: 3.1069 D(x): 0.4188 D(G(z)): 0.3980 / 0.3910\n",
"[92/100][100/391] Loss_D: 2.7342 Loss_G: 3.7824 D(x): 0.6426 D(G(z)): 0.3864 / 0.3257\n",
"[92/100][101/391] Loss_D: 2.9007 Loss_G: 2.9416 D(x): 0.6807 D(G(z)): 0.4562 / 0.4203\n",
"[92/100][102/391] Loss_D: 3.3650 Loss_G: 1.9830 D(x): 0.7157 D(G(z)): 0.5663 / 0.5448\n",
"[92/100][103/391] Loss_D: 2.7574 Loss_G: 2.5826 D(x): 0.6520 D(G(z)): 0.3709 / 0.4657\n",
"[92/100][104/391] Loss_D: 2.3736 Loss_G: 3.7445 D(x): 0.6965 D(G(z)): 0.3266 / 0.3204\n",
"[92/100][105/391] Loss_D: 2.7158 Loss_G: 2.9482 D(x): 0.6633 D(G(z)): 0.3835 / 0.3961\n",
"[92/100][106/391] Loss_D: 3.2665 Loss_G: 2.3887 D(x): 0.6822 D(G(z)): 0.5441 / 0.4919\n",
"[92/100][107/391] Loss_D: 2.5787 Loss_G: 2.5446 D(x): 0.7114 D(G(z)): 0.3469 / 0.4571\n",
"[92/100][108/391] Loss_D: 2.8326 Loss_G: 2.8208 D(x): 0.6720 D(G(z)): 0.4325 / 0.4131\n",
"[92/100][109/391] Loss_D: 2.7144 Loss_G: 2.3435 D(x): 0.6402 D(G(z)): 0.4052 / 0.4855\n",
"[92/100][110/391] Loss_D: 2.2958 Loss_G: 2.5858 D(x): 0.7594 D(G(z)): 0.3595 / 0.4509\n",
"[92/100][111/391] Loss_D: 2.8403 Loss_G: 2.3308 D(x): 0.7652 D(G(z)): 0.4880 / 0.4847\n",
"[92/100][112/391] Loss_D: 3.0922 Loss_G: 2.7355 D(x): 0.6369 D(G(z)): 0.4747 / 0.4313\n",
"[92/100][113/391] Loss_D: 2.6233 Loss_G: 2.9854 D(x): 0.6458 D(G(z)): 0.3529 / 0.4031\n",
"[92/100][114/391] Loss_D: 2.3340 Loss_G: 3.1479 D(x): 0.7356 D(G(z)): 0.4312 / 0.3997\n",
"[92/100][115/391] Loss_D: 3.0157 Loss_G: 3.3599 D(x): 0.6058 D(G(z)): 0.3831 / 0.3658\n",
"[92/100][116/391] Loss_D: 3.1611 Loss_G: 3.1211 D(x): 0.5385 D(G(z)): 0.3556 / 0.3830\n",
"[92/100][117/391] Loss_D: 3.3172 Loss_G: 2.8834 D(x): 0.7202 D(G(z)): 0.5411 / 0.4186\n",
"[92/100][118/391] Loss_D: 3.1163 Loss_G: 2.7516 D(x): 0.5922 D(G(z)): 0.4071 / 0.4227\n",
"[92/100][119/391] Loss_D: 2.9879 Loss_G: 3.3467 D(x): 0.6240 D(G(z)): 0.4183 / 0.3577\n",
"[92/100][120/391] Loss_D: 2.9665 Loss_G: 2.5066 D(x): 0.6917 D(G(z)): 0.4843 / 0.4539\n",
"[92/100][121/391] Loss_D: 3.5027 Loss_G: 2.5597 D(x): 0.6942 D(G(z)): 0.3768 / 0.4666\n",
"[92/100][122/391] Loss_D: 3.2910 Loss_G: 3.0197 D(x): 0.7239 D(G(z)): 0.5617 / 0.3819\n",
"[92/100][123/391] Loss_D: 2.5145 Loss_G: 3.3148 D(x): 0.7119 D(G(z)): 0.3399 / 0.3659\n",
"[92/100][124/391] Loss_D: 3.4629 Loss_G: 2.4977 D(x): 0.5655 D(G(z)): 0.4662 / 0.4532\n",
"[92/100][125/391] Loss_D: 3.7968 Loss_G: 2.1239 D(x): 0.5515 D(G(z)): 0.4893 / 0.5071\n",
"[92/100][126/391] Loss_D: 2.6313 Loss_G: 3.1508 D(x): 0.6780 D(G(z)): 0.3843 / 0.3817\n",
"[92/100][127/391] Loss_D: 2.8811 Loss_G: 3.1696 D(x): 0.7047 D(G(z)): 0.4324 / 0.3731\n",
"[92/100][128/391] Loss_D: 2.3138 Loss_G: 2.5056 D(x): 0.7422 D(G(z)): 0.4193 / 0.4586\n",
"[92/100][129/391] Loss_D: 2.5646 Loss_G: 2.5198 D(x): 0.7516 D(G(z)): 0.4560 / 0.4504\n",
"[92/100][130/391] Loss_D: 2.7130 Loss_G: 2.3152 D(x): 0.7270 D(G(z)): 0.4220 / 0.4983\n",
"[92/100][131/391] Loss_D: 3.6618 Loss_G: 2.6964 D(x): 0.5656 D(G(z)): 0.5387 / 0.4425\n",
"[92/100][132/391] Loss_D: 2.8600 Loss_G: 3.3995 D(x): 0.6799 D(G(z)): 0.4376 / 0.3622\n",
"[92/100][133/391] Loss_D: 2.5283 Loss_G: 2.5244 D(x): 0.6607 D(G(z)): 0.3719 / 0.4573\n",
"[92/100][134/391] Loss_D: 3.0526 Loss_G: 3.8152 D(x): 0.6268 D(G(z)): 0.4743 / 0.3111\n",
"[92/100][135/391] Loss_D: 2.6052 Loss_G: 2.1960 D(x): 0.6662 D(G(z)): 0.3981 / 0.5055\n",
"[92/100][136/391] Loss_D: 2.8001 Loss_G: 4.2631 D(x): 0.6381 D(G(z)): 0.3791 / 0.2568\n",
"[92/100][137/391] Loss_D: 3.0383 Loss_G: 3.1151 D(x): 0.6056 D(G(z)): 0.4034 / 0.3831\n",
"[92/100][138/391] Loss_D: 2.9028 Loss_G: 3.1968 D(x): 0.6452 D(G(z)): 0.4801 / 0.3649\n",
"[92/100][139/391] Loss_D: 3.2070 Loss_G: 1.9363 D(x): 0.5891 D(G(z)): 0.3762 / 0.5575\n",
"[92/100][140/391] Loss_D: 2.8277 Loss_G: 2.3249 D(x): 0.6341 D(G(z)): 0.3985 / 0.5026\n",
"[92/100][141/391] Loss_D: 2.9040 Loss_G: 3.3130 D(x): 0.7355 D(G(z)): 0.4758 / 0.3539\n",
"[92/100][142/391] Loss_D: 2.8554 Loss_G: 2.4417 D(x): 0.6280 D(G(z)): 0.4225 / 0.4796\n",
"[92/100][143/391] Loss_D: 2.9743 Loss_G: 2.5440 D(x): 0.6655 D(G(z)): 0.4312 / 0.4516\n",
"[92/100][144/391] Loss_D: 2.8818 Loss_G: 2.3447 D(x): 0.6227 D(G(z)): 0.4250 / 0.4947\n",
"[92/100][145/391] Loss_D: 2.7018 Loss_G: 2.5234 D(x): 0.6621 D(G(z)): 0.3409 / 0.4702\n",
"[92/100][146/391] Loss_D: 2.8449 Loss_G: 2.2770 D(x): 0.7248 D(G(z)): 0.4625 / 0.4873\n",
"[92/100][147/391] Loss_D: 2.8351 Loss_G: 2.3435 D(x): 0.6823 D(G(z)): 0.4158 / 0.4819\n",
"[92/100][148/391] Loss_D: 2.7454 Loss_G: 2.9940 D(x): 0.6642 D(G(z)): 0.4696 / 0.3943\n",
"[92/100][149/391] Loss_D: 2.4448 Loss_G: 3.9694 D(x): 0.7513 D(G(z)): 0.3997 / 0.2832\n",
"[92/100][150/391] Loss_D: 2.9734 Loss_G: 2.7787 D(x): 0.6771 D(G(z)): 0.4668 / 0.4162\n",
"[92/100][151/391] Loss_D: 3.6294 Loss_G: 2.6181 D(x): 0.6116 D(G(z)): 0.4821 / 0.4533\n",
"[92/100][152/391] Loss_D: 2.9378 Loss_G: 2.3340 D(x): 0.6481 D(G(z)): 0.4769 / 0.4827\n",
"[92/100][153/391] Loss_D: 2.8141 Loss_G: 3.2083 D(x): 0.6866 D(G(z)): 0.4104 / 0.3766\n",
"[92/100][154/391] Loss_D: 2.5933 Loss_G: 2.3004 D(x): 0.7120 D(G(z)): 0.4247 / 0.4954\n",
"[92/100][155/391] Loss_D: 3.0855 Loss_G: 2.8487 D(x): 0.6744 D(G(z)): 0.4760 / 0.4022\n",
"[92/100][156/391] Loss_D: 3.1129 Loss_G: 3.8174 D(x): 0.5598 D(G(z)): 0.4177 / 0.3087\n",
"[92/100][157/391] Loss_D: 3.1183 Loss_G: 3.4938 D(x): 0.6146 D(G(z)): 0.4486 / 0.3354\n",
"[92/100][158/391] Loss_D: 2.9266 Loss_G: 2.8497 D(x): 0.6104 D(G(z)): 0.3939 / 0.4293\n",
"[92/100][159/391] Loss_D: 2.7927 Loss_G: 2.8771 D(x): 0.6712 D(G(z)): 0.4019 / 0.4139\n",
"[92/100][160/391] Loss_D: 2.6344 Loss_G: 2.1739 D(x): 0.6421 D(G(z)): 0.3092 / 0.5080\n",
"[92/100][161/391] Loss_D: 2.7352 Loss_G: 1.8870 D(x): 0.6763 D(G(z)): 0.4047 / 0.5639\n",
"[92/100][162/391] Loss_D: 2.8841 Loss_G: 2.0607 D(x): 0.6974 D(G(z)): 0.4704 / 0.5385\n",
"[92/100][163/391] Loss_D: 3.2116 Loss_G: 2.2922 D(x): 0.6442 D(G(z)): 0.5051 / 0.4990\n",
"[92/100][164/391] Loss_D: 2.4353 Loss_G: 3.6368 D(x): 0.7414 D(G(z)): 0.4292 / 0.3427\n",
"[92/100][165/391] Loss_D: 2.4085 Loss_G: 2.1632 D(x): 0.7412 D(G(z)): 0.4136 / 0.5294\n",
"[92/100][166/391] Loss_D: 2.3346 Loss_G: 2.2919 D(x): 0.7925 D(G(z)): 0.3918 / 0.4998\n",
"[92/100][167/391] Loss_D: 3.2258 Loss_G: 3.0832 D(x): 0.5763 D(G(z)): 0.4125 / 0.3888\n",
"[92/100][168/391] Loss_D: 2.4394 Loss_G: 2.8877 D(x): 0.7705 D(G(z)): 0.4141 / 0.4158\n",
"[92/100][169/391] Loss_D: 2.6435 Loss_G: 2.5691 D(x): 0.6477 D(G(z)): 0.3464 / 0.4574\n",
"[92/100][170/391] Loss_D: 2.7081 Loss_G: 2.2534 D(x): 0.6596 D(G(z)): 0.3920 / 0.5125\n",
"[92/100][171/391] Loss_D: 2.8070 Loss_G: 2.9931 D(x): 0.7306 D(G(z)): 0.4731 / 0.4016\n",
"[92/100][172/391] Loss_D: 2.6868 Loss_G: 2.6237 D(x): 0.6774 D(G(z)): 0.4077 / 0.4448\n",
"[92/100][173/391] Loss_D: 2.8282 Loss_G: 2.5725 D(x): 0.7060 D(G(z)): 0.4371 / 0.4537\n",
"[92/100][174/391] Loss_D: 3.4230 Loss_G: 3.3610 D(x): 0.5695 D(G(z)): 0.4950 / 0.3571\n",
"[92/100][175/391] Loss_D: 3.6310 Loss_G: 2.6081 D(x): 0.5074 D(G(z)): 0.4304 / 0.4396\n",
"[92/100][176/391] Loss_D: 2.7488 Loss_G: 3.4643 D(x): 0.7429 D(G(z)): 0.4459 / 0.3377\n",
"[92/100][177/391] Loss_D: 2.5814 Loss_G: 3.1174 D(x): 0.7442 D(G(z)): 0.3777 / 0.3885\n",
"[92/100][178/391] Loss_D: 2.9502 Loss_G: 2.2906 D(x): 0.6042 D(G(z)): 0.3811 / 0.4794\n",
"[92/100][179/391] Loss_D: 3.8164 Loss_G: 2.9711 D(x): 0.5522 D(G(z)): 0.5093 / 0.3985\n",
"[92/100][180/391] Loss_D: 2.9717 Loss_G: 2.2589 D(x): 0.6434 D(G(z)): 0.4311 / 0.4967\n",
"[92/100][181/391] Loss_D: 3.4851 Loss_G: 2.2677 D(x): 0.6387 D(G(z)): 0.4236 / 0.4991\n",
"[92/100][182/391] Loss_D: 3.3977 Loss_G: 2.0745 D(x): 0.6438 D(G(z)): 0.5543 / 0.5389\n",
"[92/100][183/391] Loss_D: 2.5721 Loss_G: 2.4543 D(x): 0.6769 D(G(z)): 0.4254 / 0.4483\n",
"[92/100][184/391] Loss_D: 2.7212 Loss_G: 3.3182 D(x): 0.7458 D(G(z)): 0.4696 / 0.3451\n",
"[92/100][185/391] Loss_D: 3.0739 Loss_G: 3.2374 D(x): 0.6978 D(G(z)): 0.4984 / 0.3534\n",
"[92/100][186/391] Loss_D: 2.9310 Loss_G: 3.6586 D(x): 0.5962 D(G(z)): 0.3883 / 0.3268\n",
"[92/100][187/391] Loss_D: 2.7107 Loss_G: 2.9618 D(x): 0.7539 D(G(z)): 0.4599 / 0.3928\n",
"[92/100][188/391] Loss_D: 2.7585 Loss_G: 3.4430 D(x): 0.6375 D(G(z)): 0.3424 / 0.3423\n",
"[92/100][189/391] Loss_D: 2.9367 Loss_G: 2.6974 D(x): 0.6241 D(G(z)): 0.4089 / 0.4389\n",
"[92/100][190/391] Loss_D: 2.9708 Loss_G: 2.8033 D(x): 0.5945 D(G(z)): 0.3930 / 0.4384\n",
"[92/100][191/391] Loss_D: 2.4388 Loss_G: 3.1682 D(x): 0.7765 D(G(z)): 0.3879 / 0.3811\n",
"[92/100][192/391] Loss_D: 2.7407 Loss_G: 2.1749 D(x): 0.7375 D(G(z)): 0.4835 / 0.5239\n",
"[92/100][193/391] Loss_D: 3.0147 Loss_G: 3.2119 D(x): 0.7180 D(G(z)): 0.5374 / 0.3762\n",
"[92/100][194/391] Loss_D: 3.6156 Loss_G: 2.8643 D(x): 0.4973 D(G(z)): 0.4059 / 0.4316\n",
"[92/100][195/391] Loss_D: 3.1666 Loss_G: 2.5184 D(x): 0.5281 D(G(z)): 0.3670 / 0.4641\n",
"[92/100][196/391] Loss_D: 2.8149 Loss_G: 2.8653 D(x): 0.5772 D(G(z)): 0.4003 / 0.4186\n",
"[92/100][197/391] Loss_D: 2.6080 Loss_G: 2.2131 D(x): 0.7110 D(G(z)): 0.3426 / 0.5196\n",
"[92/100][198/391] Loss_D: 2.1307 Loss_G: 2.2829 D(x): 0.8020 D(G(z)): 0.3877 / 0.4993\n",
"[92/100][199/391] Loss_D: 2.7597 Loss_G: 2.5377 D(x): 0.7018 D(G(z)): 0.4708 / 0.4655\n",
"[92/100][200/391] Loss_D: 3.0672 Loss_G: 2.4492 D(x): 0.7216 D(G(z)): 0.5092 / 0.4726\n",
"[92/100][201/391] Loss_D: 2.4806 Loss_G: 2.4994 D(x): 0.7955 D(G(z)): 0.3812 / 0.4684\n",
"[92/100][202/391] Loss_D: 3.0725 Loss_G: 2.0583 D(x): 0.6115 D(G(z)): 0.4538 / 0.5294\n",
"[92/100][203/391] Loss_D: 2.6165 Loss_G: 3.9327 D(x): 0.7491 D(G(z)): 0.4347 / 0.2973\n",
"[92/100][204/391] Loss_D: 2.2516 Loss_G: 2.8296 D(x): 0.7195 D(G(z)): 0.3859 / 0.4406\n",
"[92/100][205/391] Loss_D: 3.0879 Loss_G: 3.0728 D(x): 0.5676 D(G(z)): 0.4132 / 0.3957\n",
"[92/100][206/391] Loss_D: 3.1717 Loss_G: 2.7392 D(x): 0.5872 D(G(z)): 0.4392 / 0.4249\n",
"[92/100][207/391] Loss_D: 3.0141 Loss_G: 3.0559 D(x): 0.6131 D(G(z)): 0.3922 / 0.3863\n",
"[92/100][208/391] Loss_D: 2.6050 Loss_G: 1.9673 D(x): 0.6573 D(G(z)): 0.3522 / 0.5294\n",
"[92/100][209/391] Loss_D: 2.9480 Loss_G: 2.4241 D(x): 0.6333 D(G(z)): 0.4402 / 0.4787\n",
"[92/100][210/391] Loss_D: 3.0734 Loss_G: 2.4125 D(x): 0.7568 D(G(z)): 0.5436 / 0.4791\n",
"[92/100][211/391] Loss_D: 3.7038 Loss_G: 3.0040 D(x): 0.7073 D(G(z)): 0.5018 / 0.4156\n",
"[92/100][212/391] Loss_D: 3.9284 Loss_G: 3.1312 D(x): 0.6788 D(G(z)): 0.6328 / 0.3787\n",
"[92/100][213/391] Loss_D: 2.5939 Loss_G: 2.8767 D(x): 0.7242 D(G(z)): 0.4031 / 0.4206\n",
"[92/100][214/391] Loss_D: 2.6641 Loss_G: 2.0894 D(x): 0.5624 D(G(z)): 0.2890 / 0.5366\n",
"[92/100][215/391] Loss_D: 3.1471 Loss_G: 2.8900 D(x): 0.5685 D(G(z)): 0.3970 / 0.4056\n",
"[92/100][216/391] Loss_D: 2.5985 Loss_G: 2.7854 D(x): 0.6913 D(G(z)): 0.3552 / 0.4194\n",
"[92/100][217/391] Loss_D: 2.8411 Loss_G: 2.7245 D(x): 0.6854 D(G(z)): 0.4387 / 0.4270\n",
"[92/100][218/391] Loss_D: 2.3930 Loss_G: 2.5750 D(x): 0.6976 D(G(z)): 0.3246 / 0.4452\n",
"[92/100][219/391] Loss_D: 3.0546 Loss_G: 2.7335 D(x): 0.6675 D(G(z)): 0.4968 / 0.4303\n",
"[92/100][220/391] Loss_D: 2.6208 Loss_G: 2.8230 D(x): 0.6264 D(G(z)): 0.2882 / 0.4350\n",
"[92/100][221/391] Loss_D: 3.5044 Loss_G: 3.6581 D(x): 0.6454 D(G(z)): 0.5700 / 0.3235\n",
"[92/100][222/391] Loss_D: 3.1523 Loss_G: 2.6190 D(x): 0.6195 D(G(z)): 0.4752 / 0.4566\n",
"[92/100][223/391] Loss_D: 3.3139 Loss_G: 3.2228 D(x): 0.6132 D(G(z)): 0.4793 / 0.3696\n",
"[92/100][224/391] Loss_D: 3.0775 Loss_G: 2.4362 D(x): 0.6231 D(G(z)): 0.4955 / 0.4788\n",
"[92/100][225/391] Loss_D: 3.2762 Loss_G: 2.7878 D(x): 0.6005 D(G(z)): 0.4584 / 0.4352\n",
"[92/100][226/391] Loss_D: 2.7041 Loss_G: 2.7805 D(x): 0.6476 D(G(z)): 0.4076 / 0.4235\n",
"[92/100][227/391] Loss_D: 2.6136 Loss_G: 3.1265 D(x): 0.7005 D(G(z)): 0.3834 / 0.3798\n",
"[92/100][228/391] Loss_D: 2.5702 Loss_G: 2.6957 D(x): 0.6806 D(G(z)): 0.3686 / 0.4457\n",
"[92/100][229/391] Loss_D: 2.8911 Loss_G: 2.5813 D(x): 0.6124 D(G(z)): 0.3883 / 0.4458\n",
"[92/100][230/391] Loss_D: 3.2063 Loss_G: 2.1197 D(x): 0.7154 D(G(z)): 0.6072 / 0.5195\n",
"[92/100][231/391] Loss_D: 2.8071 Loss_G: 2.4559 D(x): 0.6806 D(G(z)): 0.4325 / 0.4710\n",
"[92/100][232/391] Loss_D: 3.0468 Loss_G: 3.2136 D(x): 0.7073 D(G(z)): 0.5160 / 0.3860\n",
"[92/100][233/391] Loss_D: 3.2233 Loss_G: 2.1138 D(x): 0.6605 D(G(z)): 0.5319 / 0.5221\n",
"[92/100][234/391] Loss_D: 2.7098 Loss_G: 3.3023 D(x): 0.6026 D(G(z)): 0.3638 / 0.3615\n",
"[92/100][235/391] Loss_D: 2.8221 Loss_G: 2.6815 D(x): 0.5777 D(G(z)): 0.3411 / 0.4388\n",
"[92/100][236/391] Loss_D: 2.4516 Loss_G: 3.1562 D(x): 0.7393 D(G(z)): 0.3703 / 0.3676\n",
"[92/100][237/391] Loss_D: 2.5795 Loss_G: 2.8379 D(x): 0.7061 D(G(z)): 0.4001 / 0.4254\n",
"[92/100][238/391] Loss_D: 2.9963 Loss_G: 2.6880 D(x): 0.6001 D(G(z)): 0.3984 / 0.4415\n",
"[92/100][239/391] Loss_D: 2.8310 Loss_G: 2.5928 D(x): 0.7128 D(G(z)): 0.4862 / 0.4580\n",
"[92/100][240/391] Loss_D: 3.1080 Loss_G: 3.8021 D(x): 0.6559 D(G(z)): 0.4779 / 0.3138\n",
"[92/100][241/391] Loss_D: 3.8456 Loss_G: 3.1903 D(x): 0.6418 D(G(z)): 0.3411 / 0.3831\n",
"[92/100][242/391] Loss_D: 3.1194 Loss_G: 3.2832 D(x): 0.6455 D(G(z)): 0.5077 / 0.3641\n",
"[92/100][243/391] Loss_D: 2.8952 Loss_G: 2.8600 D(x): 0.5812 D(G(z)): 0.3281 / 0.4060\n",
"[92/100][244/391] Loss_D: 2.9696 Loss_G: 2.1221 D(x): 0.6059 D(G(z)): 0.3924 / 0.5190\n",
"[92/100][245/391] Loss_D: 2.6626 Loss_G: 2.6083 D(x): 0.6207 D(G(z)): 0.3305 / 0.4377\n",
"[92/100][246/391] Loss_D: 3.0313 Loss_G: 1.9603 D(x): 0.7746 D(G(z)): 0.5100 / 0.5355\n",
"[92/100][247/391] Loss_D: 3.2312 Loss_G: 2.1641 D(x): 0.6453 D(G(z)): 0.4964 / 0.4927\n",
"[92/100][248/391] Loss_D: 2.4088 Loss_G: 2.6739 D(x): 0.7384 D(G(z)): 0.4011 / 0.4368\n",
"[92/100][249/391] Loss_D: 2.6806 Loss_G: 3.5228 D(x): 0.6898 D(G(z)): 0.4320 / 0.3511\n",
"[92/100][250/391] Loss_D: 3.1670 Loss_G: 1.7577 D(x): 0.6245 D(G(z)): 0.4470 / 0.5946\n",
"[92/100][251/391] Loss_D: 2.2301 Loss_G: 3.0468 D(x): 0.7807 D(G(z)): 0.3229 / 0.3899\n",
"[92/100][252/391] Loss_D: 3.3774 Loss_G: 2.9304 D(x): 0.6598 D(G(z)): 0.5249 / 0.4160\n",
"[92/100][253/391] Loss_D: 3.0612 Loss_G: 2.8751 D(x): 0.6144 D(G(z)): 0.4378 / 0.4279\n",
"[92/100][254/391] Loss_D: 2.5177 Loss_G: 2.2553 D(x): 0.6785 D(G(z)): 0.4029 / 0.4930\n",
"[92/100][255/391] Loss_D: 2.4001 Loss_G: 2.8704 D(x): 0.7130 D(G(z)): 0.3498 / 0.4081\n",
"[92/100][256/391] Loss_D: 2.7036 Loss_G: 2.6368 D(x): 0.6757 D(G(z)): 0.4039 / 0.4401\n",
"[92/100][257/391] Loss_D: 3.5628 Loss_G: 2.4435 D(x): 0.5558 D(G(z)): 0.4703 / 0.4713\n",
"[92/100][258/391] Loss_D: 3.7393 Loss_G: 2.8014 D(x): 0.5862 D(G(z)): 0.5563 / 0.4177\n",
"[92/100][259/391] Loss_D: 2.8512 Loss_G: 2.8173 D(x): 0.7100 D(G(z)): 0.4766 / 0.4142\n",
"[92/100][260/391] Loss_D: 3.9772 Loss_G: 2.8200 D(x): 0.5826 D(G(z)): 0.5985 / 0.4155\n",
"[92/100][261/391] Loss_D: 3.2612 Loss_G: 2.6595 D(x): 0.6400 D(G(z)): 0.4760 / 0.4640\n",
"[92/100][262/391] Loss_D: 3.0797 Loss_G: 2.7780 D(x): 0.6855 D(G(z)): 0.4709 / 0.4323\n",
"[92/100][263/391] Loss_D: 2.8179 Loss_G: 2.6398 D(x): 0.6896 D(G(z)): 0.3869 / 0.4412\n",
"[92/100][264/391] Loss_D: 2.7633 Loss_G: 3.7927 D(x): 0.5748 D(G(z)): 0.3279 / 0.3157\n",
"[92/100][265/391] Loss_D: 3.1778 Loss_G: 3.2423 D(x): 0.6316 D(G(z)): 0.4550 / 0.3676\n",
"[92/100][266/391] Loss_D: 2.7288 Loss_G: 2.4428 D(x): 0.6900 D(G(z)): 0.3680 / 0.4658\n",
"[92/100][267/391] Loss_D: 2.8067 Loss_G: 2.2433 D(x): 0.7144 D(G(z)): 0.4599 / 0.4984\n",
"[92/100][268/391] Loss_D: 2.2654 Loss_G: 3.5618 D(x): 0.7358 D(G(z)): 0.4111 / 0.3440\n",
"[92/100][269/391] Loss_D: 3.0277 Loss_G: 3.1047 D(x): 0.7739 D(G(z)): 0.5389 / 0.3879\n",
"[92/100][270/391] Loss_D: 2.7269 Loss_G: 2.8440 D(x): 0.7177 D(G(z)): 0.4337 / 0.4262\n",
"[92/100][271/391] Loss_D: 3.6363 Loss_G: 3.9933 D(x): 0.6766 D(G(z)): 0.4373 / 0.3056\n",
"[92/100][272/391] Loss_D: 2.8385 Loss_G: 3.6618 D(x): 0.6243 D(G(z)): 0.3861 / 0.3268\n",
"[92/100][273/391] Loss_D: 2.8220 Loss_G: 3.1899 D(x): 0.6199 D(G(z)): 0.3834 / 0.3733\n",
"[92/100][274/391] Loss_D: 2.7392 Loss_G: 2.9986 D(x): 0.6035 D(G(z)): 0.3190 / 0.3935\n",
"[92/100][275/391] Loss_D: 2.8408 Loss_G: 3.2532 D(x): 0.6470 D(G(z)): 0.3885 / 0.3825\n",
"[92/100][276/391] Loss_D: 2.6541 Loss_G: 3.1687 D(x): 0.7144 D(G(z)): 0.3835 / 0.3838\n",
"[92/100][277/391] Loss_D: 2.5443 Loss_G: 2.7258 D(x): 0.7259 D(G(z)): 0.3669 / 0.4267\n",
"[92/100][278/391] Loss_D: 2.5309 Loss_G: 2.9609 D(x): 0.6714 D(G(z)): 0.4102 / 0.4166\n",
"[92/100][279/391] Loss_D: 2.6025 Loss_G: 2.1355 D(x): 0.6874 D(G(z)): 0.3644 / 0.5076\n",
"[92/100][280/391] Loss_D: 2.7391 Loss_G: 2.4317 D(x): 0.7092 D(G(z)): 0.4577 / 0.4823\n",
"[92/100][281/391] Loss_D: 3.4723 Loss_G: 2.2010 D(x): 0.5635 D(G(z)): 0.4771 / 0.5125\n",
"[92/100][282/391] Loss_D: 2.6924 Loss_G: 2.7122 D(x): 0.6711 D(G(z)): 0.3821 / 0.4357\n",
"[92/100][283/391] Loss_D: 2.3853 Loss_G: 2.3291 D(x): 0.7581 D(G(z)): 0.3911 / 0.4887\n",
"[92/100][284/391] Loss_D: 2.5810 Loss_G: 2.5826 D(x): 0.6881 D(G(z)): 0.4302 / 0.4510\n",
"[92/100][285/391] Loss_D: 3.5605 Loss_G: 3.1787 D(x): 0.6745 D(G(z)): 0.6032 / 0.3716\n",
"[92/100][286/391] Loss_D: 3.3146 Loss_G: 2.7865 D(x): 0.6319 D(G(z)): 0.5299 / 0.4356\n",
"[92/100][287/391] Loss_D: 3.0214 Loss_G: 3.8296 D(x): 0.6620 D(G(z)): 0.4605 / 0.2949\n",
"[92/100][288/391] Loss_D: 2.7762 Loss_G: 2.9538 D(x): 0.6232 D(G(z)): 0.4128 / 0.4041\n",
"[92/100][289/391] Loss_D: 2.3927 Loss_G: 2.3361 D(x): 0.7224 D(G(z)): 0.3472 / 0.5021\n",
"[92/100][290/391] Loss_D: 3.0587 Loss_G: 2.6037 D(x): 0.5811 D(G(z)): 0.3294 / 0.4544\n",
"[92/100][291/391] Loss_D: 3.3156 Loss_G: 2.6866 D(x): 0.5865 D(G(z)): 0.4487 / 0.4310\n",
"[92/100][292/391] Loss_D: 3.3551 Loss_G: 2.6780 D(x): 0.6281 D(G(z)): 0.4845 / 0.4457\n",
"[92/100][293/391] Loss_D: 2.5747 Loss_G: 3.3100 D(x): 0.7543 D(G(z)): 0.4373 / 0.3587\n",
"[92/100][294/391] Loss_D: 2.4208 Loss_G: 2.1847 D(x): 0.7285 D(G(z)): 0.4128 / 0.4898\n",
"[92/100][295/391] Loss_D: 3.5569 Loss_G: 2.5811 D(x): 0.5843 D(G(z)): 0.5198 / 0.4431\n",
"[92/100][296/391] Loss_D: 2.9942 Loss_G: 2.9492 D(x): 0.6368 D(G(z)): 0.4357 / 0.3878\n",
"[92/100][297/391] Loss_D: 3.0854 Loss_G: 3.0859 D(x): 0.6365 D(G(z)): 0.4315 / 0.3822\n",
"[92/100][298/391] Loss_D: 2.2843 Loss_G: 2.4120 D(x): 0.7342 D(G(z)): 0.3415 / 0.4638\n",
"[92/100][299/391] Loss_D: 3.2426 Loss_G: 2.6125 D(x): 0.6142 D(G(z)): 0.4794 / 0.4488\n",
"[92/100][300/391] Loss_D: 3.1327 Loss_G: 2.4827 D(x): 0.5646 D(G(z)): 0.3751 / 0.4728\n",
"[92/100][301/391] Loss_D: 3.6833 Loss_G: 2.4902 D(x): 0.7532 D(G(z)): 0.4131 / 0.4547\n",
"[92/100][302/391] Loss_D: 2.4961 Loss_G: 1.9194 D(x): 0.7200 D(G(z)): 0.4147 / 0.5650\n",
"[92/100][303/391] Loss_D: 2.5572 Loss_G: 3.7605 D(x): 0.7067 D(G(z)): 0.3971 / 0.3253\n",
"[92/100][304/391] Loss_D: 2.2052 Loss_G: 2.8801 D(x): 0.7108 D(G(z)): 0.3616 / 0.4089\n",
"[92/100][305/391] Loss_D: 2.9201 Loss_G: 3.0155 D(x): 0.6149 D(G(z)): 0.3962 / 0.3861\n",
"[92/100][306/391] Loss_D: 3.1605 Loss_G: 2.6754 D(x): 0.5901 D(G(z)): 0.4345 / 0.4403\n",
"[92/100][307/391] Loss_D: 2.8634 Loss_G: 2.2460 D(x): 0.6930 D(G(z)): 0.4615 / 0.4860\n",
"[92/100][308/391] Loss_D: 2.6484 Loss_G: 3.9562 D(x): 0.6625 D(G(z)): 0.4045 / 0.3030\n",
"[92/100][309/391] Loss_D: 2.9253 Loss_G: 2.7004 D(x): 0.6973 D(G(z)): 0.4898 / 0.4458\n",
"[92/100][310/391] Loss_D: 3.1251 Loss_G: 2.4097 D(x): 0.6276 D(G(z)): 0.4264 / 0.4793\n",
"[92/100][311/391] Loss_D: 3.7304 Loss_G: 2.3203 D(x): 0.7007 D(G(z)): 0.5972 / 0.4918\n",
"[92/100][312/391] Loss_D: 3.0908 Loss_G: 3.1637 D(x): 0.6852 D(G(z)): 0.4954 / 0.3800\n",
"[92/100][313/391] Loss_D: 3.2617 Loss_G: 3.6545 D(x): 0.5584 D(G(z)): 0.4154 / 0.3297\n",
"[92/100][314/391] Loss_D: 3.4669 Loss_G: 3.2776 D(x): 0.5989 D(G(z)): 0.5386 / 0.3609\n",
"[92/100][315/391] Loss_D: 2.9631 Loss_G: 3.3274 D(x): 0.6943 D(G(z)): 0.4912 / 0.3594\n",
"[92/100][316/391] Loss_D: 2.6371 Loss_G: 2.4518 D(x): 0.6286 D(G(z)): 0.2883 / 0.4621\n",
"[92/100][317/391] Loss_D: 2.7594 Loss_G: 2.8124 D(x): 0.7000 D(G(z)): 0.3967 / 0.4204\n",
"[92/100][318/391] Loss_D: 2.8809 Loss_G: 2.8114 D(x): 0.6138 D(G(z)): 0.4368 / 0.4192\n",
"[92/100][319/391] Loss_D: 2.7207 Loss_G: 2.8576 D(x): 0.7304 D(G(z)): 0.4649 / 0.4154\n",
"[92/100][320/391] Loss_D: 3.2123 Loss_G: 2.6877 D(x): 0.5413 D(G(z)): 0.3019 / 0.4225\n",
"[92/100][321/391] Loss_D: 2.6808 Loss_G: 2.7469 D(x): 0.7459 D(G(z)): 0.4203 / 0.4262\n",
"[92/100][322/391] Loss_D: 2.6421 Loss_G: 3.0552 D(x): 0.6519 D(G(z)): 0.3760 / 0.3910\n",
"[92/100][323/391] Loss_D: 3.0444 Loss_G: 2.6252 D(x): 0.7164 D(G(z)): 0.5077 / 0.4526\n",
"[92/100][324/391] Loss_D: 2.8859 Loss_G: 2.4107 D(x): 0.6730 D(G(z)): 0.4872 / 0.4833\n",
"[92/100][325/391] Loss_D: 2.7122 Loss_G: 3.0441 D(x): 0.6897 D(G(z)): 0.3723 / 0.3981\n",
"[92/100][326/391] Loss_D: 2.7978 Loss_G: 3.6509 D(x): 0.6314 D(G(z)): 0.3792 / 0.3236\n",
"[92/100][327/391] Loss_D: 3.1913 Loss_G: 2.7681 D(x): 0.5882 D(G(z)): 0.4400 / 0.4146\n",
"[92/100][328/391] Loss_D: 2.5441 Loss_G: 2.4718 D(x): 0.7027 D(G(z)): 0.4122 / 0.4524\n",
"[92/100][329/391] Loss_D: 2.3959 Loss_G: 2.3933 D(x): 0.8014 D(G(z)): 0.4100 / 0.4870\n",
"[92/100][330/391] Loss_D: 2.7597 Loss_G: 2.0701 D(x): 0.6526 D(G(z)): 0.3751 / 0.5377\n",
"[92/100][331/391] Loss_D: 3.5992 Loss_G: 2.5420 D(x): 0.7557 D(G(z)): 0.3437 / 0.4636\n",
"[92/100][332/391] Loss_D: 2.4856 Loss_G: 2.2693 D(x): 0.6942 D(G(z)): 0.3944 / 0.4914\n",
"[92/100][333/391] Loss_D: 3.5841 Loss_G: 3.1813 D(x): 0.5744 D(G(z)): 0.4587 / 0.3607\n",
"[92/100][334/391] Loss_D: 2.9178 Loss_G: 2.9030 D(x): 0.6546 D(G(z)): 0.4735 / 0.4087\n",
"[92/100][335/391] Loss_D: 2.4705 Loss_G: 2.5878 D(x): 0.7066 D(G(z)): 0.3488 / 0.4538\n",
"[92/100][336/391] Loss_D: 2.9869 Loss_G: 3.6821 D(x): 0.7056 D(G(z)): 0.4618 / 0.3128\n",
"[92/100][337/391] Loss_D: 3.0174 Loss_G: 1.6217 D(x): 0.6218 D(G(z)): 0.4368 / 0.5707\n",
"[92/100][338/391] Loss_D: 3.1823 Loss_G: 2.2323 D(x): 0.5862 D(G(z)): 0.4408 / 0.5000\n",
"[92/100][339/391] Loss_D: 2.7130 Loss_G: 2.2835 D(x): 0.7331 D(G(z)): 0.4327 / 0.4931\n",
"[92/100][340/391] Loss_D: 2.7687 Loss_G: 2.1558 D(x): 0.6824 D(G(z)): 0.4151 / 0.5151\n",
"[92/100][341/391] Loss_D: 2.5581 Loss_G: 1.8404 D(x): 0.7358 D(G(z)): 0.4127 / 0.5590\n",
"[92/100][342/391] Loss_D: 2.7354 Loss_G: 2.8319 D(x): 0.7550 D(G(z)): 0.4715 / 0.4160\n",
"[92/100][343/391] Loss_D: 3.6945 Loss_G: 2.9490 D(x): 0.5760 D(G(z)): 0.5284 / 0.3988\n",
"[92/100][344/391] Loss_D: 2.5311 Loss_G: 3.4890 D(x): 0.6439 D(G(z)): 0.3497 / 0.3479\n",
"[92/100][345/391] Loss_D: 3.0505 Loss_G: 2.8324 D(x): 0.6374 D(G(z)): 0.4365 / 0.4063\n",
"[92/100][346/391] Loss_D: 2.8053 Loss_G: 2.4438 D(x): 0.6884 D(G(z)): 0.4362 / 0.4749\n",
"[92/100][347/391] Loss_D: 2.9210 Loss_G: 2.1817 D(x): 0.6793 D(G(z)): 0.4157 / 0.5035\n",
"[92/100][348/391] Loss_D: 3.6279 Loss_G: 2.9926 D(x): 0.5146 D(G(z)): 0.3890 / 0.3994\n",
"[92/100][349/391] Loss_D: 3.2608 Loss_G: 2.8130 D(x): 0.6158 D(G(z)): 0.4977 / 0.4199\n",
"[92/100][350/391] Loss_D: 2.9448 Loss_G: 2.3259 D(x): 0.7235 D(G(z)): 0.5070 / 0.4887\n",
"[92/100][351/391] Loss_D: 3.2700 Loss_G: 3.0402 D(x): 0.6838 D(G(z)): 0.5202 / 0.3979\n",
"[92/100][352/391] Loss_D: 2.5327 Loss_G: 2.4078 D(x): 0.7280 D(G(z)): 0.4383 / 0.4992\n",
"[92/100][353/391] Loss_D: 3.2423 Loss_G: 1.8370 D(x): 0.5556 D(G(z)): 0.4283 / 0.5712\n",
"[92/100][354/391] Loss_D: 2.5759 Loss_G: 2.7037 D(x): 0.6363 D(G(z)): 0.3370 / 0.4360\n",
"[92/100][355/391] Loss_D: 2.7958 Loss_G: 3.1373 D(x): 0.7053 D(G(z)): 0.4821 / 0.3799\n",
"[92/100][356/391] Loss_D: 2.1444 Loss_G: 2.9407 D(x): 0.7762 D(G(z)): 0.3119 / 0.4085\n",
"[92/100][357/391] Loss_D: 3.3831 Loss_G: 2.7071 D(x): 0.6432 D(G(z)): 0.5416 / 0.4130\n",
"[92/100][358/391] Loss_D: 2.4765 Loss_G: 2.5477 D(x): 0.6965 D(G(z)): 0.4006 / 0.4631\n",
"[92/100][359/391] Loss_D: 3.2488 Loss_G: 2.5623 D(x): 0.5750 D(G(z)): 0.4292 / 0.4722\n",
"[92/100][360/391] Loss_D: 2.8313 Loss_G: 2.4118 D(x): 0.6626 D(G(z)): 0.3995 / 0.4609\n",
"[92/100][361/391] Loss_D: 3.5324 Loss_G: 2.6729 D(x): 0.6007 D(G(z)): 0.3533 / 0.4420\n",
"[92/100][362/391] Loss_D: 3.1588 Loss_G: 2.1811 D(x): 0.6705 D(G(z)): 0.4819 / 0.5075\n",
"[92/100][363/391] Loss_D: 2.9833 Loss_G: 3.6088 D(x): 0.6404 D(G(z)): 0.4484 / 0.3353\n",
"[92/100][364/391] Loss_D: 3.6417 Loss_G: 2.7418 D(x): 0.7063 D(G(z)): 0.5957 / 0.4232\n",
"[92/100][365/391] Loss_D: 2.5910 Loss_G: 3.7382 D(x): 0.7625 D(G(z)): 0.4300 / 0.3101\n",
"[92/100][366/391] Loss_D: 2.7964 Loss_G: 3.2908 D(x): 0.6643 D(G(z)): 0.4218 / 0.3496\n",
"[92/100][367/391] Loss_D: 2.8167 Loss_G: 2.8717 D(x): 0.6208 D(G(z)): 0.3703 / 0.4225\n",
"[92/100][368/391] Loss_D: 2.7684 Loss_G: 3.6640 D(x): 0.6621 D(G(z)): 0.3841 / 0.3324\n",
"[92/100][369/391] Loss_D: 2.8763 Loss_G: 2.7972 D(x): 0.6172 D(G(z)): 0.3950 / 0.4336\n",
"[92/100][370/391] Loss_D: 2.3069 Loss_G: 2.6427 D(x): 0.6845 D(G(z)): 0.2794 / 0.4487\n",
"[92/100][371/391] Loss_D: 2.5955 Loss_G: 2.7319 D(x): 0.7724 D(G(z)): 0.4315 / 0.4295\n",
"[92/100][372/391] Loss_D: 2.8736 Loss_G: 3.1314 D(x): 0.6277 D(G(z)): 0.4264 / 0.3718\n",
"[92/100][373/391] Loss_D: 2.8625 Loss_G: 2.0493 D(x): 0.7007 D(G(z)): 0.4463 / 0.5405\n",
"[92/100][374/391] Loss_D: 2.8681 Loss_G: 2.7609 D(x): 0.7231 D(G(z)): 0.5138 / 0.4349\n",
"[92/100][375/391] Loss_D: 2.9951 Loss_G: 2.7318 D(x): 0.6482 D(G(z)): 0.4356 / 0.4297\n",
"[92/100][376/391] Loss_D: 2.8663 Loss_G: 3.2706 D(x): 0.6548 D(G(z)): 0.3732 / 0.3644\n",
"[92/100][377/391] Loss_D: 2.9895 Loss_G: 2.7980 D(x): 0.6220 D(G(z)): 0.3907 / 0.4097\n",
"[92/100][378/391] Loss_D: 2.3456 Loss_G: 2.4323 D(x): 0.7092 D(G(z)): 0.3971 / 0.4793\n",
"[92/100][379/391] Loss_D: 2.9807 Loss_G: 3.0813 D(x): 0.6765 D(G(z)): 0.5087 / 0.3959\n",
"[92/100][380/391] Loss_D: 2.6145 Loss_G: 2.9679 D(x): 0.6859 D(G(z)): 0.3605 / 0.4085\n",
"[92/100][381/391] Loss_D: 2.8307 Loss_G: 1.8864 D(x): 0.6772 D(G(z)): 0.4430 / 0.5757\n",
"[92/100][382/391] Loss_D: 3.0737 Loss_G: 2.1339 D(x): 0.6219 D(G(z)): 0.4263 / 0.5234\n",
"[92/100][383/391] Loss_D: 2.8537 Loss_G: 3.0772 D(x): 0.6279 D(G(z)): 0.3875 / 0.3813\n",
"[92/100][384/391] Loss_D: 2.6114 Loss_G: 2.8813 D(x): 0.7148 D(G(z)): 0.4752 / 0.4133\n",
"[92/100][385/391] Loss_D: 2.7375 Loss_G: 2.0734 D(x): 0.6675 D(G(z)): 0.4137 / 0.5134\n",
"[92/100][386/391] Loss_D: 3.5370 Loss_G: 2.7330 D(x): 0.6073 D(G(z)): 0.5235 / 0.4262\n",
"[92/100][387/391] Loss_D: 3.1294 Loss_G: 3.2988 D(x): 0.5885 D(G(z)): 0.4143 / 0.3540\n",
"[92/100][388/391] Loss_D: 2.3215 Loss_G: 2.9204 D(x): 0.7548 D(G(z)): 0.3959 / 0.4069\n",
"[92/100][389/391] Loss_D: 3.0585 Loss_G: 3.1019 D(x): 0.5699 D(G(z)): 0.3468 / 0.3855\n",
"[92/100][390/391] Loss_D: 2.4110 Loss_G: 1.8138 D(x): 0.6852 D(G(z)): 0.2995 / 0.5725\n",
"[92/100][391/391] Loss_D: 3.7672 Loss_G: 2.9298 D(x): 0.7336 D(G(z)): 0.3984 / 0.4176\n",
"[93/100][1/391] Loss_D: 3.7989 Loss_G: 3.6143 D(x): 0.7510 D(G(z)): 0.5175 / 0.3390\n",
"[93/100][2/391] Loss_D: 2.4459 Loss_G: 2.5135 D(x): 0.7340 D(G(z)): 0.3839 / 0.4555\n",
"[93/100][3/391] Loss_D: 2.4814 Loss_G: 3.1298 D(x): 0.7461 D(G(z)): 0.4189 / 0.4017\n",
"[93/100][4/391] Loss_D: 2.5050 Loss_G: 2.5211 D(x): 0.7047 D(G(z)): 0.4170 / 0.4642\n",
"[93/100][5/391] Loss_D: 3.0631 Loss_G: 2.5481 D(x): 0.6212 D(G(z)): 0.4395 / 0.4407\n",
"[93/100][6/391] Loss_D: 2.7081 Loss_G: 2.4634 D(x): 0.7207 D(G(z)): 0.4304 / 0.4634\n",
"[93/100][7/391] Loss_D: 2.9319 Loss_G: 2.6356 D(x): 0.6183 D(G(z)): 0.3289 / 0.4394\n",
"[93/100][8/391] Loss_D: 2.5785 Loss_G: 3.0737 D(x): 0.6290 D(G(z)): 0.3554 / 0.3994\n",
"[93/100][9/391] Loss_D: 2.9549 Loss_G: 2.5358 D(x): 0.6310 D(G(z)): 0.4740 / 0.4622\n",
"[93/100][10/391] Loss_D: 2.4333 Loss_G: 2.7271 D(x): 0.6928 D(G(z)): 0.3454 / 0.4320\n",
"[93/100][11/391] Loss_D: 3.1440 Loss_G: 2.1141 D(x): 0.6505 D(G(z)): 0.4982 / 0.5359\n",
"[93/100][12/391] Loss_D: 3.1296 Loss_G: 3.2277 D(x): 0.6289 D(G(z)): 0.4694 / 0.3760\n",
"[93/100][13/391] Loss_D: 3.4276 Loss_G: 2.4317 D(x): 0.6957 D(G(z)): 0.5682 / 0.4671\n",
"[93/100][14/391] Loss_D: 3.1982 Loss_G: 2.6557 D(x): 0.6247 D(G(z)): 0.4801 / 0.4304\n",
"[93/100][15/391] Loss_D: 3.0722 Loss_G: 2.5818 D(x): 0.6396 D(G(z)): 0.4894 / 0.4315\n",
"[93/100][16/391] Loss_D: 3.1691 Loss_G: 2.3455 D(x): 0.6519 D(G(z)): 0.4509 / 0.4805\n",
"[93/100][17/391] Loss_D: 2.8502 Loss_G: 3.1753 D(x): 0.6202 D(G(z)): 0.3911 / 0.3726\n",
"[93/100][18/391] Loss_D: 2.6647 Loss_G: 3.0188 D(x): 0.6496 D(G(z)): 0.3726 / 0.3928\n",
"[93/100][19/391] Loss_D: 2.4057 Loss_G: 2.1631 D(x): 0.7413 D(G(z)): 0.4035 / 0.5050\n",
"[93/100][20/391] Loss_D: 3.3207 Loss_G: 2.9132 D(x): 0.5935 D(G(z)): 0.4880 / 0.4333\n",
"[93/100][21/391] Loss_D: 2.9180 Loss_G: 3.4704 D(x): 0.6862 D(G(z)): 0.5038 / 0.3411\n",
"[93/100][22/391] Loss_D: 2.7087 Loss_G: 2.4067 D(x): 0.6961 D(G(z)): 0.4359 / 0.4799\n",
"[93/100][23/391] Loss_D: 2.7022 Loss_G: 2.6830 D(x): 0.6138 D(G(z)): 0.3233 / 0.4469\n",
"[93/100][24/391] Loss_D: 3.4556 Loss_G: 2.4188 D(x): 0.6302 D(G(z)): 0.5043 / 0.4728\n",
"[93/100][25/391] Loss_D: 3.0072 Loss_G: 3.0001 D(x): 0.6113 D(G(z)): 0.4127 / 0.3957\n",
"[93/100][26/391] Loss_D: 3.2850 Loss_G: 3.1776 D(x): 0.6229 D(G(z)): 0.4825 / 0.3779\n",
"[93/100][27/391] Loss_D: 2.9329 Loss_G: 3.4040 D(x): 0.6605 D(G(z)): 0.3404 / 0.3482\n",
"[93/100][28/391] Loss_D: 3.7929 Loss_G: 2.5419 D(x): 0.4598 D(G(z)): 0.4065 / 0.4492\n",
"[93/100][29/391] Loss_D: 2.6562 Loss_G: 2.2221 D(x): 0.7579 D(G(z)): 0.4835 / 0.5073\n",
"[93/100][30/391] Loss_D: 2.8121 Loss_G: 2.1797 D(x): 0.6293 D(G(z)): 0.4330 / 0.5350\n",
"[93/100][31/391] Loss_D: 3.7812 Loss_G: 2.7655 D(x): 0.7056 D(G(z)): 0.5699 / 0.4288\n",
"[93/100][32/391] Loss_D: 3.3693 Loss_G: 3.1395 D(x): 0.6028 D(G(z)): 0.4991 / 0.3759\n",
"[93/100][33/391] Loss_D: 2.3711 Loss_G: 2.3248 D(x): 0.7739 D(G(z)): 0.3582 / 0.4831\n",
"[93/100][34/391] Loss_D: 2.6581 Loss_G: 3.3532 D(x): 0.7208 D(G(z)): 0.4583 / 0.3490\n",
"[93/100][35/391] Loss_D: 3.0465 Loss_G: 2.9101 D(x): 0.6388 D(G(z)): 0.4629 / 0.4097\n",
"[93/100][36/391] Loss_D: 3.1469 Loss_G: 3.3298 D(x): 0.5861 D(G(z)): 0.4061 / 0.3664\n",
"[93/100][37/391] Loss_D: 3.1397 Loss_G: 2.5477 D(x): 0.5467 D(G(z)): 0.4054 / 0.4564\n",
"[93/100][38/391] Loss_D: 2.6056 Loss_G: 2.2028 D(x): 0.6894 D(G(z)): 0.3805 / 0.5075\n",
"[93/100][39/391] Loss_D: 2.5920 Loss_G: 2.2930 D(x): 0.7180 D(G(z)): 0.3982 / 0.4971\n",
"[93/100][40/391] Loss_D: 3.1149 Loss_G: 2.5823 D(x): 0.6868 D(G(z)): 0.4807 / 0.4527\n",
"[93/100][41/391] Loss_D: 3.4151 Loss_G: 2.2746 D(x): 0.7040 D(G(z)): 0.5494 / 0.4960\n",
"[93/100][42/391] Loss_D: 2.7713 Loss_G: 2.4443 D(x): 0.7006 D(G(z)): 0.4353 / 0.4766\n",
"[93/100][43/391] Loss_D: 2.7467 Loss_G: 2.6619 D(x): 0.7209 D(G(z)): 0.4043 / 0.4298\n",
"[93/100][44/391] Loss_D: 2.9984 Loss_G: 2.9091 D(x): 0.6238 D(G(z)): 0.4530 / 0.4148\n",
"[93/100][45/391] Loss_D: 3.3096 Loss_G: 2.9020 D(x): 0.5314 D(G(z)): 0.4228 / 0.4155\n",
"[93/100][46/391] Loss_D: 2.8522 Loss_G: 2.3946 D(x): 0.6833 D(G(z)): 0.4232 / 0.4772\n",
"[93/100][47/391] Loss_D: 2.8365 Loss_G: 2.6085 D(x): 0.6748 D(G(z)): 0.4244 / 0.4427\n",
"[93/100][48/391] Loss_D: 2.4973 Loss_G: 2.6501 D(x): 0.7365 D(G(z)): 0.4491 / 0.4472\n",
"[93/100][49/391] Loss_D: 3.0788 Loss_G: 3.2448 D(x): 0.6272 D(G(z)): 0.4679 / 0.3491\n",
"[93/100][50/391] Loss_D: 2.4118 Loss_G: 3.5591 D(x): 0.6888 D(G(z)): 0.3409 / 0.3348\n",
"[93/100][51/391] Loss_D: 2.6556 Loss_G: 3.4754 D(x): 0.6294 D(G(z)): 0.3196 / 0.3450\n",
"[93/100][52/391] Loss_D: 3.1131 Loss_G: 3.5449 D(x): 0.6250 D(G(z)): 0.4507 / 0.3366\n",
"[93/100][53/391] Loss_D: 2.4942 Loss_G: 2.3828 D(x): 0.7890 D(G(z)): 0.4599 / 0.4740\n",
"[93/100][54/391] Loss_D: 2.6449 Loss_G: 3.2934 D(x): 0.7314 D(G(z)): 0.4399 / 0.3582\n",
"[93/100][55/391] Loss_D: 2.6877 Loss_G: 3.4101 D(x): 0.6643 D(G(z)): 0.3735 / 0.3483\n",
"[93/100][56/391] Loss_D: 3.0009 Loss_G: 3.0071 D(x): 0.6489 D(G(z)): 0.4072 / 0.4014\n",
"[93/100][57/391] Loss_D: 2.9369 Loss_G: 3.6142 D(x): 0.6145 D(G(z)): 0.3595 / 0.3296\n",
"[93/100][58/391] Loss_D: 2.7526 Loss_G: 2.5330 D(x): 0.6282 D(G(z)): 0.3630 / 0.4537\n",
"[93/100][59/391] Loss_D: 2.8121 Loss_G: 2.9171 D(x): 0.7370 D(G(z)): 0.4925 / 0.4119\n",
"[93/100][60/391] Loss_D: 3.0618 Loss_G: 2.8412 D(x): 0.7485 D(G(z)): 0.5238 / 0.4227\n",
"[93/100][61/391] Loss_D: 3.5918 Loss_G: 2.8083 D(x): 0.6281 D(G(z)): 0.3478 / 0.4261\n",
"[93/100][62/391] Loss_D: 3.4324 Loss_G: 2.9695 D(x): 0.6467 D(G(z)): 0.5646 / 0.3936\n",
"[93/100][63/391] Loss_D: 3.0359 Loss_G: 4.1886 D(x): 0.6078 D(G(z)): 0.4313 / 0.2807\n",
"[93/100][64/391] Loss_D: 2.4538 Loss_G: 2.4758 D(x): 0.7166 D(G(z)): 0.4163 / 0.4687\n",
"[93/100][65/391] Loss_D: 3.2440 Loss_G: 3.0249 D(x): 0.5040 D(G(z)): 0.2961 / 0.4014\n",
"[93/100][66/391] Loss_D: 2.8286 Loss_G: 3.3818 D(x): 0.6109 D(G(z)): 0.3746 / 0.3518\n",
"[93/100][67/391] Loss_D: 2.8852 Loss_G: 3.4327 D(x): 0.7793 D(G(z)): 0.4403 / 0.3579\n",
"[93/100][68/391] Loss_D: 2.7540 Loss_G: 2.2292 D(x): 0.6675 D(G(z)): 0.4480 / 0.4963\n",
"[93/100][69/391] Loss_D: 3.6547 Loss_G: 1.5934 D(x): 0.4985 D(G(z)): 0.3824 / 0.6242\n",
"[93/100][70/391] Loss_D: 2.8210 Loss_G: 2.3182 D(x): 0.7085 D(G(z)): 0.4540 / 0.5039\n",
"[93/100][71/391] Loss_D: 2.5643 Loss_G: 2.7485 D(x): 0.7752 D(G(z)): 0.4294 / 0.4274\n",
"[93/100][72/391] Loss_D: 3.0013 Loss_G: 2.5826 D(x): 0.6540 D(G(z)): 0.4948 / 0.4531\n",
"[93/100][73/391] Loss_D: 2.5595 Loss_G: 2.9377 D(x): 0.7064 D(G(z)): 0.4271 / 0.4042\n",
"[93/100][74/391] Loss_D: 2.7470 Loss_G: 2.6216 D(x): 0.7193 D(G(z)): 0.4790 / 0.4567\n",
"[93/100][75/391] Loss_D: 2.1731 Loss_G: 3.1321 D(x): 0.7720 D(G(z)): 0.3095 / 0.3891\n",
"[93/100][76/391] Loss_D: 2.7929 Loss_G: 3.0820 D(x): 0.6419 D(G(z)): 0.3881 / 0.3903\n",
"[93/100][77/391] Loss_D: 2.6509 Loss_G: 2.3179 D(x): 0.7387 D(G(z)): 0.3791 / 0.4783\n",
"[93/100][78/391] Loss_D: 3.2641 Loss_G: 3.4744 D(x): 0.5209 D(G(z)): 0.3471 / 0.3327\n",
"[93/100][79/391] Loss_D: 3.0995 Loss_G: 2.2940 D(x): 0.6492 D(G(z)): 0.5132 / 0.4868\n",
"[93/100][80/391] Loss_D: 3.2816 Loss_G: 2.3338 D(x): 0.6261 D(G(z)): 0.4563 / 0.5035\n",
"[93/100][81/391] Loss_D: 2.7827 Loss_G: 2.7375 D(x): 0.7069 D(G(z)): 0.4947 / 0.4430\n",
"[93/100][82/391] Loss_D: 3.4743 Loss_G: 2.2260 D(x): 0.6840 D(G(z)): 0.5459 / 0.4843\n",
"[93/100][83/391] Loss_D: 2.8850 Loss_G: 2.4978 D(x): 0.6564 D(G(z)): 0.4054 / 0.4626\n",
"[93/100][84/391] Loss_D: 2.0653 Loss_G: 2.5977 D(x): 0.7188 D(G(z)): 0.3060 / 0.4633\n",
"[93/100][85/391] Loss_D: 2.8717 Loss_G: 3.0012 D(x): 0.5863 D(G(z)): 0.3492 / 0.3834\n",
"[93/100][86/391] Loss_D: 3.4692 Loss_G: 3.7299 D(x): 0.6600 D(G(z)): 0.5586 / 0.3301\n",
"[93/100][87/391] Loss_D: 3.3569 Loss_G: 2.5627 D(x): 0.6387 D(G(z)): 0.4849 / 0.4534\n",
"[93/100][88/391] Loss_D: 3.1080 Loss_G: 2.2747 D(x): 0.5997 D(G(z)): 0.4757 / 0.4970\n",
"[93/100][89/391] Loss_D: 2.7027 Loss_G: 1.9822 D(x): 0.6699 D(G(z)): 0.3710 / 0.5510\n",
"[93/100][90/391] Loss_D: 2.8569 Loss_G: 2.8129 D(x): 0.7018 D(G(z)): 0.4671 / 0.4254\n",
"[93/100][91/391] Loss_D: 3.5551 Loss_G: 3.1646 D(x): 0.6794 D(G(z)): 0.4901 / 0.3856\n",
"[93/100][92/391] Loss_D: 2.7436 Loss_G: 3.5166 D(x): 0.6966 D(G(z)): 0.4740 / 0.3476\n",
"[93/100][93/391] Loss_D: 2.6256 Loss_G: 1.9616 D(x): 0.6838 D(G(z)): 0.3739 / 0.5526\n",
"[93/100][94/391] Loss_D: 2.9391 Loss_G: 2.9539 D(x): 0.5487 D(G(z)): 0.3538 / 0.4109\n",
"[93/100][95/391] Loss_D: 3.1760 Loss_G: 2.4771 D(x): 0.6014 D(G(z)): 0.4542 / 0.4518\n",
"[93/100][96/391] Loss_D: 3.7496 Loss_G: 2.3344 D(x): 0.5656 D(G(z)): 0.5405 / 0.4750\n",
"[93/100][97/391] Loss_D: 2.8374 Loss_G: 3.1733 D(x): 0.6927 D(G(z)): 0.4114 / 0.3765\n",
"[93/100][98/391] Loss_D: 2.8253 Loss_G: 2.6603 D(x): 0.6721 D(G(z)): 0.4421 / 0.4326\n",
"[93/100][99/391] Loss_D: 3.5318 Loss_G: 3.5005 D(x): 0.6014 D(G(z)): 0.5159 / 0.3692\n",
"[93/100][100/391] Loss_D: 3.3085 Loss_G: 1.6285 D(x): 0.6327 D(G(z)): 0.5090 / 0.6174\n",
"[93/100][101/391] Loss_D: 2.6889 Loss_G: 3.2711 D(x): 0.7368 D(G(z)): 0.4681 / 0.3654\n",
"[93/100][102/391] Loss_D: 2.3405 Loss_G: 2.1962 D(x): 0.7416 D(G(z)): 0.2995 / 0.4992\n",
"[93/100][103/391] Loss_D: 2.7728 Loss_G: 3.2348 D(x): 0.6529 D(G(z)): 0.3868 / 0.3757\n",
"[93/100][104/391] Loss_D: 2.8764 Loss_G: 3.1092 D(x): 0.6689 D(G(z)): 0.4402 / 0.3868\n",
"[93/100][105/391] Loss_D: 2.6150 Loss_G: 3.4039 D(x): 0.7334 D(G(z)): 0.3840 / 0.3415\n",
"[93/100][106/391] Loss_D: 2.4786 Loss_G: 3.1213 D(x): 0.6708 D(G(z)): 0.3545 / 0.3915\n",
"[93/100][107/391] Loss_D: 2.9587 Loss_G: 2.8986 D(x): 0.6437 D(G(z)): 0.4060 / 0.4118\n",
"[93/100][108/391] Loss_D: 2.7323 Loss_G: 3.6217 D(x): 0.7344 D(G(z)): 0.4797 / 0.3288\n",
"[93/100][109/391] Loss_D: 2.3820 Loss_G: 3.2792 D(x): 0.7094 D(G(z)): 0.3873 / 0.3594\n",
"[93/100][110/391] Loss_D: 2.8557 Loss_G: 2.8817 D(x): 0.6835 D(G(z)): 0.4143 / 0.4209\n",
"[93/100][111/391] Loss_D: 3.0375 Loss_G: 4.4939 D(x): 0.6347 D(G(z)): 0.3881 / 0.2549\n",
"[93/100][112/391] Loss_D: 3.5099 Loss_G: 3.4160 D(x): 0.6435 D(G(z)): 0.5544 / 0.3438\n",
"[93/100][113/391] Loss_D: 2.9756 Loss_G: 2.4168 D(x): 0.6767 D(G(z)): 0.4550 / 0.4723\n",
"[93/100][114/391] Loss_D: 2.7467 Loss_G: 2.3511 D(x): 0.6480 D(G(z)): 0.4004 / 0.4856\n",
"[93/100][115/391] Loss_D: 3.3914 Loss_G: 3.3791 D(x): 0.6325 D(G(z)): 0.4910 / 0.3547\n",
"[93/100][116/391] Loss_D: 2.9220 Loss_G: 2.8378 D(x): 0.6561 D(G(z)): 0.4353 / 0.4045\n",
"[93/100][117/391] Loss_D: 2.8169 Loss_G: 2.5004 D(x): 0.6290 D(G(z)): 0.3333 / 0.4556\n",
"[93/100][118/391] Loss_D: 2.3896 Loss_G: 2.2069 D(x): 0.7782 D(G(z)): 0.4490 / 0.5082\n",
"[93/100][119/391] Loss_D: 2.6298 Loss_G: 3.0784 D(x): 0.7739 D(G(z)): 0.4568 / 0.4005\n",
"[93/100][120/391] Loss_D: 3.1280 Loss_G: 3.2866 D(x): 0.6285 D(G(z)): 0.4853 / 0.3611\n",
"[93/100][121/391] Loss_D: 3.7041 Loss_G: 3.5126 D(x): 0.6660 D(G(z)): 0.5377 / 0.3556\n",
"[93/100][122/391] Loss_D: 2.7303 Loss_G: 2.6296 D(x): 0.6910 D(G(z)): 0.4096 / 0.4344\n",
"[93/100][123/391] Loss_D: 3.1073 Loss_G: 3.5441 D(x): 0.5822 D(G(z)): 0.3904 / 0.3459\n",
"[93/100][124/391] Loss_D: 2.7548 Loss_G: 2.7573 D(x): 0.6258 D(G(z)): 0.3906 / 0.4416\n",
"[93/100][125/391] Loss_D: 2.6969 Loss_G: 4.4649 D(x): 0.6430 D(G(z)): 0.3172 / 0.2470\n",
"[93/100][126/391] Loss_D: 2.7482 Loss_G: 3.2355 D(x): 0.6710 D(G(z)): 0.4225 / 0.3592\n",
"[93/100][127/391] Loss_D: 2.9009 Loss_G: 2.7654 D(x): 0.6196 D(G(z)): 0.3833 / 0.4119\n",
"[93/100][128/391] Loss_D: 2.4717 Loss_G: 2.6566 D(x): 0.7397 D(G(z)): 0.4445 / 0.4297\n",
"[93/100][129/391] Loss_D: 3.4954 Loss_G: 4.0181 D(x): 0.6122 D(G(z)): 0.5441 / 0.2947\n",
"[93/100][130/391] Loss_D: 2.7636 Loss_G: 2.5914 D(x): 0.6448 D(G(z)): 0.3382 / 0.4503\n",
"[93/100][131/391] Loss_D: 2.7041 Loss_G: 3.2878 D(x): 0.7858 D(G(z)): 0.4641 / 0.3608\n",
"[93/100][132/391] Loss_D: 3.3022 Loss_G: 3.7201 D(x): 0.6707 D(G(z)): 0.5076 / 0.3123\n",
"[93/100][133/391] Loss_D: 2.7407 Loss_G: 3.0842 D(x): 0.6699 D(G(z)): 0.4404 / 0.3971\n",
"[93/100][134/391] Loss_D: 2.6558 Loss_G: 3.5967 D(x): 0.6471 D(G(z)): 0.3938 / 0.3289\n",
"[93/100][135/391] Loss_D: 3.3336 Loss_G: 2.3523 D(x): 0.6004 D(G(z)): 0.4972 / 0.4853\n",
"[93/100][136/391] Loss_D: 3.0224 Loss_G: 3.8389 D(x): 0.5901 D(G(z)): 0.3961 / 0.3040\n",
"[93/100][137/391] Loss_D: 3.3465 Loss_G: 3.4803 D(x): 0.5823 D(G(z)): 0.4730 / 0.3388\n",
"[93/100][138/391] Loss_D: 2.7099 Loss_G: 2.3966 D(x): 0.6460 D(G(z)): 0.4381 / 0.4854\n",
"[93/100][139/391] Loss_D: 3.1238 Loss_G: 2.8926 D(x): 0.6007 D(G(z)): 0.4024 / 0.4136\n",
"[93/100][140/391] Loss_D: 2.4156 Loss_G: 3.2271 D(x): 0.7660 D(G(z)): 0.4053 / 0.3645\n",
"[93/100][141/391] Loss_D: 2.8819 Loss_G: 2.5840 D(x): 0.6238 D(G(z)): 0.3690 / 0.4562\n",
"[93/100][142/391] Loss_D: 3.1504 Loss_G: 3.4068 D(x): 0.7140 D(G(z)): 0.5609 / 0.3594\n",
"[93/100][143/391] Loss_D: 2.9992 Loss_G: 2.9532 D(x): 0.6918 D(G(z)): 0.4764 / 0.4109\n",
"[93/100][144/391] Loss_D: 2.6324 Loss_G: 2.7179 D(x): 0.6425 D(G(z)): 0.3851 / 0.4419\n",
"[93/100][145/391] Loss_D: 2.8199 Loss_G: 2.9244 D(x): 0.6532 D(G(z)): 0.3794 / 0.3985\n",
"[93/100][146/391] Loss_D: 2.6473 Loss_G: 3.1085 D(x): 0.7346 D(G(z)): 0.4153 / 0.3939\n",
"[93/100][147/391] Loss_D: 2.7381 Loss_G: 2.1457 D(x): 0.7198 D(G(z)): 0.4102 / 0.5131\n",
"[93/100][148/391] Loss_D: 2.4216 Loss_G: 2.4860 D(x): 0.6766 D(G(z)): 0.3835 / 0.4585\n",
"[93/100][149/391] Loss_D: 2.8487 Loss_G: 2.5988 D(x): 0.6977 D(G(z)): 0.4397 / 0.4466\n",
"[93/100][150/391] Loss_D: 2.8575 Loss_G: 2.4514 D(x): 0.6790 D(G(z)): 0.4538 / 0.4970\n",
"[93/100][151/391] Loss_D: 3.5481 Loss_G: 3.4751 D(x): 0.7105 D(G(z)): 0.4016 / 0.3398\n",
"[93/100][152/391] Loss_D: 2.4424 Loss_G: 3.4484 D(x): 0.6273 D(G(z)): 0.2981 / 0.3581\n",
"[93/100][153/391] Loss_D: 3.0040 Loss_G: 3.8143 D(x): 0.6677 D(G(z)): 0.4532 / 0.3198\n",
"[93/100][154/391] Loss_D: 3.0192 Loss_G: 2.2458 D(x): 0.6845 D(G(z)): 0.5094 / 0.5076\n",
"[93/100][155/391] Loss_D: 3.0435 Loss_G: 3.1489 D(x): 0.6288 D(G(z)): 0.4050 / 0.3835\n",
"[93/100][156/391] Loss_D: 3.1230 Loss_G: 1.6615 D(x): 0.5746 D(G(z)): 0.3954 / 0.6021\n",
"[93/100][157/391] Loss_D: 2.8363 Loss_G: 2.2579 D(x): 0.7221 D(G(z)): 0.4411 / 0.4968\n",
"[93/100][158/391] Loss_D: 2.6757 Loss_G: 3.4467 D(x): 0.7570 D(G(z)): 0.5232 / 0.3580\n",
"[93/100][159/391] Loss_D: 2.6537 Loss_G: 2.5071 D(x): 0.7520 D(G(z)): 0.4187 / 0.4515\n",
"[93/100][160/391] Loss_D: 2.8123 Loss_G: 4.7159 D(x): 0.6954 D(G(z)): 0.4453 / 0.2497\n",
"[93/100][161/391] Loss_D: 2.5887 Loss_G: 2.7896 D(x): 0.7262 D(G(z)): 0.3790 / 0.4352\n",
"[93/100][162/391] Loss_D: 2.4364 Loss_G: 3.3848 D(x): 0.6899 D(G(z)): 0.3542 / 0.3418\n",
"[93/100][163/391] Loss_D: 2.5283 Loss_G: 2.6295 D(x): 0.6725 D(G(z)): 0.3374 / 0.4444\n",
"[93/100][164/391] Loss_D: 2.6063 Loss_G: 2.9442 D(x): 0.6360 D(G(z)): 0.3402 / 0.4045\n",
"[93/100][165/391] Loss_D: 3.1213 Loss_G: 2.8765 D(x): 0.5649 D(G(z)): 0.4122 / 0.4042\n",
"[93/100][166/391] Loss_D: 2.8929 Loss_G: 2.6456 D(x): 0.6402 D(G(z)): 0.4228 / 0.4343\n",
"[93/100][167/391] Loss_D: 3.5107 Loss_G: 2.0603 D(x): 0.5563 D(G(z)): 0.4793 / 0.5236\n",
"[93/100][168/391] Loss_D: 2.6206 Loss_G: 2.1634 D(x): 0.6900 D(G(z)): 0.3860 / 0.5156\n",
"[93/100][169/391] Loss_D: 2.4553 Loss_G: 2.2448 D(x): 0.7304 D(G(z)): 0.4255 / 0.5093\n",
"[93/100][170/391] Loss_D: 3.0265 Loss_G: 1.9773 D(x): 0.6768 D(G(z)): 0.4677 / 0.5480\n",
"[93/100][171/391] Loss_D: 2.8095 Loss_G: 3.0226 D(x): 0.7086 D(G(z)): 0.4511 / 0.4117\n",
"[93/100][172/391] Loss_D: 3.0389 Loss_G: 2.7396 D(x): 0.5847 D(G(z)): 0.3950 / 0.4383\n",
"[93/100][173/391] Loss_D: 2.9207 Loss_G: 3.2562 D(x): 0.7292 D(G(z)): 0.5014 / 0.3554\n",
"[93/100][174/391] Loss_D: 3.1058 Loss_G: 3.4226 D(x): 0.6646 D(G(z)): 0.5082 / 0.3637\n",
"[93/100][175/391] Loss_D: 3.1800 Loss_G: 2.4000 D(x): 0.6159 D(G(z)): 0.4492 / 0.4954\n",
"[93/100][176/391] Loss_D: 2.6286 Loss_G: 3.6171 D(x): 0.7254 D(G(z)): 0.3876 / 0.3201\n",
"[93/100][177/391] Loss_D: 2.8616 Loss_G: 3.3977 D(x): 0.7083 D(G(z)): 0.4405 / 0.3465\n",
"[93/100][178/391] Loss_D: 2.7035 Loss_G: 2.2379 D(x): 0.6241 D(G(z)): 0.3221 / 0.5161\n",
"[93/100][179/391] Loss_D: 2.6416 Loss_G: 3.7217 D(x): 0.6371 D(G(z)): 0.3072 / 0.3235\n",
"[93/100][180/391] Loss_D: 2.6275 Loss_G: 2.9872 D(x): 0.7096 D(G(z)): 0.4224 / 0.4121\n",
"[93/100][181/391] Loss_D: 3.6988 Loss_G: 2.7325 D(x): 0.6426 D(G(z)): 0.5450 / 0.4493\n",
"[93/100][182/391] Loss_D: 2.4484 Loss_G: 2.3959 D(x): 0.7308 D(G(z)): 0.4101 / 0.4942\n",
"[93/100][183/391] Loss_D: 2.8723 Loss_G: 2.8051 D(x): 0.6553 D(G(z)): 0.4722 / 0.4249\n",
"[93/100][184/391] Loss_D: 3.4478 Loss_G: 3.4845 D(x): 0.5235 D(G(z)): 0.3749 / 0.3427\n",
"[93/100][185/391] Loss_D: 2.4746 Loss_G: 2.2937 D(x): 0.7025 D(G(z)): 0.3545 / 0.4780\n",
"[93/100][186/391] Loss_D: 3.0099 Loss_G: 2.4204 D(x): 0.6584 D(G(z)): 0.4645 / 0.4770\n",
"[93/100][187/391] Loss_D: 2.8519 Loss_G: 3.2775 D(x): 0.6715 D(G(z)): 0.4483 / 0.3693\n",
"[93/100][188/391] Loss_D: 2.7961 Loss_G: 1.8546 D(x): 0.6446 D(G(z)): 0.4194 / 0.5564\n",
"[93/100][189/391] Loss_D: 2.5576 Loss_G: 2.5042 D(x): 0.7464 D(G(z)): 0.4353 / 0.4590\n",
"[93/100][190/391] Loss_D: 2.6235 Loss_G: 3.0391 D(x): 0.7187 D(G(z)): 0.4350 / 0.4079\n",
"[93/100][191/391] Loss_D: 3.2699 Loss_G: 2.6663 D(x): 0.5971 D(G(z)): 0.4447 / 0.4550\n",
"[93/100][192/391] Loss_D: 3.4218 Loss_G: 2.4859 D(x): 0.5125 D(G(z)): 0.4124 / 0.4651\n",
"[93/100][193/391] Loss_D: 2.7050 Loss_G: 3.4827 D(x): 0.7816 D(G(z)): 0.5036 / 0.3428\n",
"[93/100][194/391] Loss_D: 2.8552 Loss_G: 2.4943 D(x): 0.6276 D(G(z)): 0.3616 / 0.4525\n",
"[93/100][195/391] Loss_D: 3.0184 Loss_G: 2.1794 D(x): 0.6242 D(G(z)): 0.4211 / 0.4971\n",
"[93/100][196/391] Loss_D: 2.9795 Loss_G: 2.6840 D(x): 0.7047 D(G(z)): 0.5332 / 0.4263\n",
"[93/100][197/391] Loss_D: 2.8388 Loss_G: 3.1477 D(x): 0.6934 D(G(z)): 0.3975 / 0.3876\n",
"[93/100][198/391] Loss_D: 2.9002 Loss_G: 3.8240 D(x): 0.6588 D(G(z)): 0.4163 / 0.3021\n",
"[93/100][199/391] Loss_D: 2.7547 Loss_G: 2.5526 D(x): 0.6635 D(G(z)): 0.3962 / 0.4631\n",
"[93/100][200/391] Loss_D: 2.9049 Loss_G: 2.0837 D(x): 0.6836 D(G(z)): 0.4870 / 0.5181\n",
"[93/100][201/391] Loss_D: 3.1929 Loss_G: 2.5677 D(x): 0.6737 D(G(z)): 0.4976 / 0.4590\n",
"[93/100][202/391] Loss_D: 3.3672 Loss_G: 3.3661 D(x): 0.6443 D(G(z)): 0.5368 / 0.3592\n",
"[93/100][203/391] Loss_D: 3.0358 Loss_G: 2.9802 D(x): 0.6163 D(G(z)): 0.4226 / 0.4153\n",
"[93/100][204/391] Loss_D: 2.7323 Loss_G: 3.2090 D(x): 0.6317 D(G(z)): 0.4175 / 0.3876\n",
"[93/100][205/391] Loss_D: 2.5614 Loss_G: 2.4075 D(x): 0.6597 D(G(z)): 0.3420 / 0.4881\n",
"[93/100][206/391] Loss_D: 3.4675 Loss_G: 2.8298 D(x): 0.5661 D(G(z)): 0.5271 / 0.4153\n",
"[93/100][207/391] Loss_D: 2.5923 Loss_G: 2.4688 D(x): 0.6849 D(G(z)): 0.3468 / 0.4620\n",
"[93/100][208/391] Loss_D: 2.7423 Loss_G: 2.7452 D(x): 0.5963 D(G(z)): 0.3015 / 0.4349\n",
"[93/100][209/391] Loss_D: 3.2079 Loss_G: 2.8136 D(x): 0.5714 D(G(z)): 0.4033 / 0.4200\n",
"[93/100][210/391] Loss_D: 2.6995 Loss_G: 2.8079 D(x): 0.7385 D(G(z)): 0.4016 / 0.4215\n",
"[93/100][211/391] Loss_D: 3.8342 Loss_G: 2.8951 D(x): 0.6880 D(G(z)): 0.5169 / 0.4085\n",
"[93/100][212/391] Loss_D: 2.9505 Loss_G: 2.4481 D(x): 0.6723 D(G(z)): 0.4473 / 0.4634\n",
"[93/100][213/391] Loss_D: 3.0646 Loss_G: 2.5073 D(x): 0.6175 D(G(z)): 0.4350 / 0.4541\n",
"[93/100][214/391] Loss_D: 3.2405 Loss_G: 2.6488 D(x): 0.6365 D(G(z)): 0.5188 / 0.4494\n",
"[93/100][215/391] Loss_D: 2.9002 Loss_G: 3.3630 D(x): 0.6630 D(G(z)): 0.4118 / 0.3534\n",
"[93/100][216/391] Loss_D: 3.5000 Loss_G: 2.5445 D(x): 0.6991 D(G(z)): 0.5800 / 0.4514\n",
"[93/100][217/391] Loss_D: 2.9222 Loss_G: 2.2066 D(x): 0.6192 D(G(z)): 0.4056 / 0.5106\n",
"[93/100][218/391] Loss_D: 3.1693 Loss_G: 2.7601 D(x): 0.6702 D(G(z)): 0.5550 / 0.4340\n",
"[93/100][219/391] Loss_D: 3.0712 Loss_G: 2.0880 D(x): 0.6401 D(G(z)): 0.4770 / 0.5237\n",
"[93/100][220/391] Loss_D: 3.2580 Loss_G: 2.7671 D(x): 0.5683 D(G(z)): 0.4394 / 0.4273\n",
"[93/100][221/391] Loss_D: 2.6827 Loss_G: 3.2662 D(x): 0.6507 D(G(z)): 0.3657 / 0.3767\n",
"[93/100][222/391] Loss_D: 2.5595 Loss_G: 2.5961 D(x): 0.7010 D(G(z)): 0.4195 / 0.4528\n",
"[93/100][223/391] Loss_D: 2.5867 Loss_G: 2.4714 D(x): 0.6855 D(G(z)): 0.3422 / 0.4574\n",
"[93/100][224/391] Loss_D: 3.0792 Loss_G: 2.6696 D(x): 0.6935 D(G(z)): 0.5370 / 0.4452\n",
"[93/100][225/391] Loss_D: 3.5442 Loss_G: 2.4575 D(x): 0.6010 D(G(z)): 0.5341 / 0.4807\n",
"[93/100][226/391] Loss_D: 2.5616 Loss_G: 2.7361 D(x): 0.6277 D(G(z)): 0.3323 / 0.4487\n",
"[93/100][227/391] Loss_D: 3.1250 Loss_G: 2.9539 D(x): 0.6290 D(G(z)): 0.4854 / 0.4054\n",
"[93/100][228/391] Loss_D: 2.8034 Loss_G: 2.3086 D(x): 0.6970 D(G(z)): 0.4752 / 0.4999\n",
"[93/100][229/391] Loss_D: 2.9446 Loss_G: 2.6354 D(x): 0.6418 D(G(z)): 0.4470 / 0.4550\n",
"[93/100][230/391] Loss_D: 2.7633 Loss_G: 2.0264 D(x): 0.6519 D(G(z)): 0.4289 / 0.5421\n",
"[93/100][231/391] Loss_D: 2.8025 Loss_G: 2.8154 D(x): 0.6700 D(G(z)): 0.4144 / 0.4293\n",
"[93/100][232/391] Loss_D: 2.7802 Loss_G: 2.9025 D(x): 0.7154 D(G(z)): 0.4438 / 0.4015\n",
"[93/100][233/391] Loss_D: 2.8682 Loss_G: 2.5492 D(x): 0.6143 D(G(z)): 0.3983 / 0.4587\n",
"[93/100][234/391] Loss_D: 3.0086 Loss_G: 2.7732 D(x): 0.6447 D(G(z)): 0.4665 / 0.4319\n",
"[93/100][235/391] Loss_D: 2.5322 Loss_G: 2.3739 D(x): 0.7100 D(G(z)): 0.4067 / 0.4697\n",
"[93/100][236/391] Loss_D: 3.1703 Loss_G: 2.9662 D(x): 0.6079 D(G(z)): 0.4398 / 0.3933\n",
"[93/100][237/391] Loss_D: 2.8921 Loss_G: 3.1260 D(x): 0.6459 D(G(z)): 0.4360 / 0.3758\n",
"[93/100][238/391] Loss_D: 3.4003 Loss_G: 2.6142 D(x): 0.6475 D(G(z)): 0.5430 / 0.4353\n",
"[93/100][239/391] Loss_D: 2.7595 Loss_G: 2.9333 D(x): 0.6887 D(G(z)): 0.4603 / 0.4081\n",
"[93/100][240/391] Loss_D: 2.6233 Loss_G: 2.4562 D(x): 0.6505 D(G(z)): 0.3718 / 0.4815\n",
"[93/100][241/391] Loss_D: 3.7816 Loss_G: 1.9799 D(x): 0.5838 D(G(z)): 0.4538 / 0.5489\n",
"[93/100][242/391] Loss_D: 3.0656 Loss_G: 2.9520 D(x): 0.6435 D(G(z)): 0.4887 / 0.4157\n",
"[93/100][243/391] Loss_D: 3.0135 Loss_G: 2.9222 D(x): 0.7159 D(G(z)): 0.5166 / 0.3976\n",
"[93/100][244/391] Loss_D: 2.1378 Loss_G: 1.9803 D(x): 0.6883 D(G(z)): 0.2942 / 0.5428\n",
"[93/100][245/391] Loss_D: 2.5213 Loss_G: 2.2923 D(x): 0.7133 D(G(z)): 0.4060 / 0.4982\n",
"[93/100][246/391] Loss_D: 2.9117 Loss_G: 2.8814 D(x): 0.6156 D(G(z)): 0.3617 / 0.3885\n",
"[93/100][247/391] Loss_D: 3.4058 Loss_G: 3.0182 D(x): 0.6015 D(G(z)): 0.5195 / 0.4017\n",
"[93/100][248/391] Loss_D: 3.2253 Loss_G: 3.5776 D(x): 0.5851 D(G(z)): 0.4801 / 0.3325\n",
"[93/100][249/391] Loss_D: 2.7263 Loss_G: 2.3545 D(x): 0.6909 D(G(z)): 0.4469 / 0.5142\n",
"[93/100][250/391] Loss_D: 2.8851 Loss_G: 2.5810 D(x): 0.7031 D(G(z)): 0.4297 / 0.4600\n",
"[93/100][251/391] Loss_D: 3.3003 Loss_G: 2.8733 D(x): 0.5980 D(G(z)): 0.4540 / 0.4178\n",
"[93/100][252/391] Loss_D: 2.4954 Loss_G: 3.1962 D(x): 0.7201 D(G(z)): 0.3590 / 0.3792\n",
"[93/100][253/391] Loss_D: 2.6784 Loss_G: 2.8538 D(x): 0.6641 D(G(z)): 0.3472 / 0.4059\n",
"[93/100][254/391] Loss_D: 2.7638 Loss_G: 2.4873 D(x): 0.7416 D(G(z)): 0.5200 / 0.4697\n",
"[93/100][255/391] Loss_D: 2.7289 Loss_G: 3.2632 D(x): 0.6582 D(G(z)): 0.3717 / 0.3589\n",
"[93/100][256/391] Loss_D: 3.4367 Loss_G: 2.7540 D(x): 0.4963 D(G(z)): 0.3898 / 0.4225\n",
"[93/100][257/391] Loss_D: 3.3426 Loss_G: 3.1825 D(x): 0.6340 D(G(z)): 0.4853 / 0.3785\n",
"[93/100][258/391] Loss_D: 2.6585 Loss_G: 2.4915 D(x): 0.6778 D(G(z)): 0.3943 / 0.4857\n",
"[93/100][259/391] Loss_D: 2.7683 Loss_G: 2.6363 D(x): 0.6673 D(G(z)): 0.4227 / 0.4450\n",
"[93/100][260/391] Loss_D: 2.8259 Loss_G: 2.4414 D(x): 0.6713 D(G(z)): 0.4308 / 0.4723\n",
"[93/100][261/391] Loss_D: 3.1714 Loss_G: 2.6200 D(x): 0.6559 D(G(z)): 0.4779 / 0.4714\n",
"[93/100][262/391] Loss_D: 2.6506 Loss_G: 2.5277 D(x): 0.7335 D(G(z)): 0.4197 / 0.4565\n",
"[93/100][263/391] Loss_D: 2.8223 Loss_G: 3.4303 D(x): 0.7622 D(G(z)): 0.4567 / 0.3432\n",
"[93/100][264/391] Loss_D: 2.7436 Loss_G: 2.4278 D(x): 0.6726 D(G(z)): 0.4538 / 0.4869\n",
"[93/100][265/391] Loss_D: 2.8485 Loss_G: 2.9350 D(x): 0.6965 D(G(z)): 0.4625 / 0.3987\n",
"[93/100][266/391] Loss_D: 3.0210 Loss_G: 3.4776 D(x): 0.6129 D(G(z)): 0.3640 / 0.3434\n",
"[93/100][267/391] Loss_D: 2.7700 Loss_G: 2.1776 D(x): 0.5854 D(G(z)): 0.3371 / 0.5154\n",
"[93/100][268/391] Loss_D: 2.3748 Loss_G: 2.8222 D(x): 0.6885 D(G(z)): 0.3518 / 0.4251\n",
"[93/100][269/391] Loss_D: 3.2394 Loss_G: 2.5448 D(x): 0.6868 D(G(z)): 0.5408 / 0.4563\n",
"[93/100][270/391] Loss_D: 2.8998 Loss_G: 3.4961 D(x): 0.6171 D(G(z)): 0.3949 / 0.3454\n",
"[93/100][271/391] Loss_D: 3.5560 Loss_G: 2.3099 D(x): 0.6785 D(G(z)): 0.4110 / 0.4926\n",
"[93/100][272/391] Loss_D: 2.4713 Loss_G: 2.5806 D(x): 0.6959 D(G(z)): 0.3332 / 0.4569\n",
"[93/100][273/391] Loss_D: 2.8939 Loss_G: 2.6409 D(x): 0.7155 D(G(z)): 0.4930 / 0.4448\n",
"[93/100][274/391] Loss_D: 2.7785 Loss_G: 2.8625 D(x): 0.6015 D(G(z)): 0.3625 / 0.4232\n",
"[93/100][275/391] Loss_D: 2.7799 Loss_G: 1.8082 D(x): 0.6153 D(G(z)): 0.3468 / 0.5663\n",
"[93/100][276/391] Loss_D: 2.8346 Loss_G: 3.4921 D(x): 0.7495 D(G(z)): 0.4647 / 0.3485\n",
"[93/100][277/391] Loss_D: 2.9556 Loss_G: 2.9411 D(x): 0.6506 D(G(z)): 0.4590 / 0.4122\n",
"[93/100][278/391] Loss_D: 2.3050 Loss_G: 1.9580 D(x): 0.7372 D(G(z)): 0.3994 / 0.5430\n",
"[93/100][279/391] Loss_D: 2.5963 Loss_G: 2.7760 D(x): 0.6910 D(G(z)): 0.3592 / 0.4261\n",
"[93/100][280/391] Loss_D: 2.8753 Loss_G: 2.1736 D(x): 0.6335 D(G(z)): 0.3950 / 0.5114\n",
"[93/100][281/391] Loss_D: 3.3381 Loss_G: 3.2619 D(x): 0.7688 D(G(z)): 0.5850 / 0.3685\n",
"[93/100][282/391] Loss_D: 2.3081 Loss_G: 2.6966 D(x): 0.7624 D(G(z)): 0.3546 / 0.4511\n",
"[93/100][283/391] Loss_D: 3.7985 Loss_G: 2.4737 D(x): 0.5963 D(G(z)): 0.5649 / 0.4634\n",
"[93/100][284/391] Loss_D: 2.3437 Loss_G: 2.6815 D(x): 0.6995 D(G(z)): 0.3626 / 0.4404\n",
"[93/100][285/391] Loss_D: 3.2749 Loss_G: 2.9277 D(x): 0.5769 D(G(z)): 0.4334 / 0.4008\n",
"[93/100][286/391] Loss_D: 2.6649 Loss_G: 3.1338 D(x): 0.6177 D(G(z)): 0.2761 / 0.3859\n",
"[93/100][287/391] Loss_D: 3.1910 Loss_G: 2.3514 D(x): 0.6302 D(G(z)): 0.4643 / 0.4875\n",
"[93/100][288/391] Loss_D: 2.6664 Loss_G: 3.7565 D(x): 0.6735 D(G(z)): 0.4335 / 0.3330\n",
"[93/100][289/391] Loss_D: 2.8457 Loss_G: 3.0280 D(x): 0.6409 D(G(z)): 0.3886 / 0.3951\n",
"[93/100][290/391] Loss_D: 2.5769 Loss_G: 2.5079 D(x): 0.7694 D(G(z)): 0.4439 / 0.4718\n",
"[93/100][291/391] Loss_D: 2.5825 Loss_G: 3.0809 D(x): 0.7099 D(G(z)): 0.3878 / 0.3959\n",
"[93/100][292/391] Loss_D: 2.6412 Loss_G: 2.8644 D(x): 0.7031 D(G(z)): 0.4053 / 0.4136\n",
"[93/100][293/391] Loss_D: 3.4720 Loss_G: 2.8250 D(x): 0.6302 D(G(z)): 0.5507 / 0.4308\n",
"[93/100][294/391] Loss_D: 2.2855 Loss_G: 2.7240 D(x): 0.7373 D(G(z)): 0.3862 / 0.4218\n",
"[93/100][295/391] Loss_D: 3.1576 Loss_G: 2.4880 D(x): 0.6904 D(G(z)): 0.5177 / 0.4558\n",
"[93/100][296/391] Loss_D: 3.3881 Loss_G: 3.6427 D(x): 0.5140 D(G(z)): 0.3654 / 0.3215\n",
"[93/100][297/391] Loss_D: 2.6195 Loss_G: 3.5940 D(x): 0.7064 D(G(z)): 0.3159 / 0.3364\n",
"[93/100][298/391] Loss_D: 2.1414 Loss_G: 2.2061 D(x): 0.8004 D(G(z)): 0.3786 / 0.4946\n",
"[93/100][299/391] Loss_D: 2.9239 Loss_G: 2.8578 D(x): 0.6928 D(G(z)): 0.4820 / 0.4115\n",
"[93/100][300/391] Loss_D: 2.7355 Loss_G: 2.4658 D(x): 0.6792 D(G(z)): 0.4095 / 0.4645\n",
"[93/100][301/391] Loss_D: 3.5592 Loss_G: 3.0185 D(x): 0.6733 D(G(z)): 0.4432 / 0.4000\n",
"[93/100][302/391] Loss_D: 2.5576 Loss_G: 3.0212 D(x): 0.7185 D(G(z)): 0.4216 / 0.3937\n",
"[93/100][303/391] Loss_D: 2.5198 Loss_G: 2.9257 D(x): 0.6780 D(G(z)): 0.3382 / 0.3999\n",
"[93/100][304/391] Loss_D: 2.3210 Loss_G: 2.6359 D(x): 0.6713 D(G(z)): 0.3328 / 0.4521\n",
"[93/100][305/391] Loss_D: 3.2159 Loss_G: 2.7686 D(x): 0.5145 D(G(z)): 0.2960 / 0.4339\n",
"[93/100][306/391] Loss_D: 3.7354 Loss_G: 2.3846 D(x): 0.5503 D(G(z)): 0.4976 / 0.4866\n",
"[93/100][307/391] Loss_D: 2.9701 Loss_G: 2.2758 D(x): 0.6356 D(G(z)): 0.4327 / 0.4901\n",
"[93/100][308/391] Loss_D: 2.4349 Loss_G: 2.2263 D(x): 0.7591 D(G(z)): 0.4447 / 0.5256\n",
"[93/100][309/391] Loss_D: 2.9974 Loss_G: 3.4878 D(x): 0.6939 D(G(z)): 0.5314 / 0.3493\n",
"[93/100][310/391] Loss_D: 3.1760 Loss_G: 2.0174 D(x): 0.7198 D(G(z)): 0.5224 / 0.5483\n",
"[93/100][311/391] Loss_D: 3.2081 Loss_G: 3.1423 D(x): 0.6519 D(G(z)): 0.4669 / 0.3872\n",
"[93/100][312/391] Loss_D: 3.0244 Loss_G: 2.8298 D(x): 0.6226 D(G(z)): 0.4223 / 0.4094\n",
"[93/100][313/391] Loss_D: 2.5157 Loss_G: 2.3326 D(x): 0.6409 D(G(z)): 0.3067 / 0.4915\n",
"[93/100][314/391] Loss_D: 2.7009 Loss_G: 3.1496 D(x): 0.6688 D(G(z)): 0.4758 / 0.3906\n",
"[93/100][315/391] Loss_D: 3.1559 Loss_G: 3.1032 D(x): 0.6286 D(G(z)): 0.4736 / 0.3912\n",
"[93/100][316/391] Loss_D: 2.6462 Loss_G: 3.2237 D(x): 0.7003 D(G(z)): 0.3786 / 0.3787\n",
"[93/100][317/391] Loss_D: 2.5025 Loss_G: 2.9161 D(x): 0.7064 D(G(z)): 0.3368 / 0.4111\n",
"[93/100][318/391] Loss_D: 3.1022 Loss_G: 3.4400 D(x): 0.5863 D(G(z)): 0.4602 / 0.3544\n",
"[93/100][319/391] Loss_D: 2.9253 Loss_G: 2.6745 D(x): 0.6158 D(G(z)): 0.4065 / 0.4416\n",
"[93/100][320/391] Loss_D: 2.6066 Loss_G: 2.5985 D(x): 0.7352 D(G(z)): 0.3986 / 0.4650\n",
"[93/100][321/391] Loss_D: 2.7841 Loss_G: 3.2340 D(x): 0.7631 D(G(z)): 0.4630 / 0.3687\n",
"[93/100][322/391] Loss_D: 2.6519 Loss_G: 2.9651 D(x): 0.6905 D(G(z)): 0.4225 / 0.4042\n",
"[93/100][323/391] Loss_D: 3.0037 Loss_G: 2.0433 D(x): 0.6644 D(G(z)): 0.4559 / 0.5293\n",
"[93/100][324/391] Loss_D: 2.9088 Loss_G: 2.5079 D(x): 0.6648 D(G(z)): 0.4752 / 0.4653\n",
"[93/100][325/391] Loss_D: 2.7091 Loss_G: 3.2577 D(x): 0.7117 D(G(z)): 0.3769 / 0.3713\n",
"[93/100][326/391] Loss_D: 2.5755 Loss_G: 2.9046 D(x): 0.7127 D(G(z)): 0.4008 / 0.4075\n",
"[93/100][327/391] Loss_D: 2.6619 Loss_G: 2.3182 D(x): 0.6542 D(G(z)): 0.3526 / 0.4885\n",
"[93/100][328/391] Loss_D: 2.4861 Loss_G: 3.8629 D(x): 0.7043 D(G(z)): 0.4133 / 0.3110\n",
"[93/100][329/391] Loss_D: 2.7033 Loss_G: 2.1559 D(x): 0.7045 D(G(z)): 0.4048 / 0.5131\n",
"[93/100][330/391] Loss_D: 2.8586 Loss_G: 2.0252 D(x): 0.5745 D(G(z)): 0.2706 / 0.5462\n",
"[93/100][331/391] Loss_D: 3.6837 Loss_G: 2.7682 D(x): 0.6710 D(G(z)): 0.5398 / 0.4285\n",
"[93/100][332/391] Loss_D: 2.8907 Loss_G: 2.6981 D(x): 0.5969 D(G(z)): 0.3796 / 0.4337\n",
"[93/100][333/391] Loss_D: 3.4994 Loss_G: 2.2124 D(x): 0.6916 D(G(z)): 0.5565 / 0.5039\n",
"[93/100][334/391] Loss_D: 3.1237 Loss_G: 3.8157 D(x): 0.6948 D(G(z)): 0.5386 / 0.3079\n",
"[93/100][335/391] Loss_D: 3.3761 Loss_G: 3.7146 D(x): 0.5877 D(G(z)): 0.4850 / 0.3145\n",
"[93/100][336/391] Loss_D: 2.7302 Loss_G: 3.0862 D(x): 0.6937 D(G(z)): 0.3845 / 0.3944\n",
"[93/100][337/391] Loss_D: 3.3588 Loss_G: 3.2016 D(x): 0.5975 D(G(z)): 0.4836 / 0.3680\n",
"[93/100][338/391] Loss_D: 2.9850 Loss_G: 2.9314 D(x): 0.6446 D(G(z)): 0.4729 / 0.4270\n",
"[93/100][339/391] Loss_D: 2.4952 Loss_G: 2.8824 D(x): 0.6981 D(G(z)): 0.3739 / 0.4025\n",
"[93/100][340/391] Loss_D: 3.6387 Loss_G: 3.2719 D(x): 0.5814 D(G(z)): 0.4976 / 0.3673\n",
"[93/100][341/391] Loss_D: 3.2323 Loss_G: 1.9605 D(x): 0.6569 D(G(z)): 0.4999 / 0.5360\n",
"[93/100][342/391] Loss_D: 2.4346 Loss_G: 2.4858 D(x): 0.6693 D(G(z)): 0.3117 / 0.4712\n",
"[93/100][343/391] Loss_D: 3.0511 Loss_G: 2.1750 D(x): 0.6001 D(G(z)): 0.3513 / 0.5209\n",
"[93/100][344/391] Loss_D: 2.5317 Loss_G: 2.3023 D(x): 0.7602 D(G(z)): 0.4726 / 0.4978\n",
"[93/100][345/391] Loss_D: 3.3968 Loss_G: 2.1002 D(x): 0.6623 D(G(z)): 0.5419 / 0.5199\n",
"[93/100][346/391] Loss_D: 2.8657 Loss_G: 2.3461 D(x): 0.6490 D(G(z)): 0.3998 / 0.4881\n",
"[93/100][347/391] Loss_D: 3.3058 Loss_G: 2.7769 D(x): 0.5513 D(G(z)): 0.3941 / 0.4158\n",
"[93/100][348/391] Loss_D: 2.5687 Loss_G: 2.9528 D(x): 0.7942 D(G(z)): 0.5319 / 0.4028\n",
"[93/100][349/391] Loss_D: 2.7277 Loss_G: 2.7137 D(x): 0.6693 D(G(z)): 0.4013 / 0.4300\n",
"[93/100][350/391] Loss_D: 3.5135 Loss_G: 3.5403 D(x): 0.6799 D(G(z)): 0.5957 / 0.3467\n",
"[93/100][351/391] Loss_D: 2.8857 Loss_G: 3.7616 D(x): 0.5991 D(G(z)): 0.3103 / 0.3300\n",
"[93/100][352/391] Loss_D: 2.1119 Loss_G: 3.3352 D(x): 0.7784 D(G(z)): 0.3562 / 0.3669\n",
"[93/100][353/391] Loss_D: 3.0594 Loss_G: 2.9782 D(x): 0.6479 D(G(z)): 0.4798 / 0.4033\n",
"[93/100][354/391] Loss_D: 2.7799 Loss_G: 3.2475 D(x): 0.6656 D(G(z)): 0.4576 / 0.3852\n",
"[93/100][355/391] Loss_D: 3.0629 Loss_G: 2.5260 D(x): 0.6732 D(G(z)): 0.5076 / 0.4825\n",
"[93/100][356/391] Loss_D: 2.9085 Loss_G: 2.8086 D(x): 0.6205 D(G(z)): 0.4125 / 0.4302\n",
"[93/100][357/391] Loss_D: 2.6992 Loss_G: 2.5714 D(x): 0.6698 D(G(z)): 0.3704 / 0.4543\n",
"[93/100][358/391] Loss_D: 2.7047 Loss_G: 3.3597 D(x): 0.6268 D(G(z)): 0.3634 / 0.3514\n",
"[93/100][359/391] Loss_D: 2.7578 Loss_G: 3.3063 D(x): 0.6444 D(G(z)): 0.3374 / 0.3684\n",
"[93/100][360/391] Loss_D: 2.8920 Loss_G: 3.1905 D(x): 0.6847 D(G(z)): 0.4643 / 0.3923\n",
"[93/100][361/391] Loss_D: 3.5719 Loss_G: 2.9036 D(x): 0.6704 D(G(z)): 0.5364 / 0.4049\n",
"[93/100][362/391] Loss_D: 2.7593 Loss_G: 2.5438 D(x): 0.6412 D(G(z)): 0.3612 / 0.4642\n",
"[93/100][363/391] Loss_D: 2.6336 Loss_G: 3.3943 D(x): 0.7063 D(G(z)): 0.4316 / 0.3541\n",
"[93/100][364/391] Loss_D: 2.7266 Loss_G: 3.4423 D(x): 0.6367 D(G(z)): 0.3243 / 0.3584\n",
"[93/100][365/391] Loss_D: 2.6590 Loss_G: 2.6864 D(x): 0.6608 D(G(z)): 0.3555 / 0.4284\n",
"[93/100][366/391] Loss_D: 2.7032 Loss_G: 2.9581 D(x): 0.7261 D(G(z)): 0.4506 / 0.4042\n",
"[93/100][367/391] Loss_D: 2.8171 Loss_G: 2.4284 D(x): 0.6292 D(G(z)): 0.3338 / 0.4711\n",
"[93/100][368/391] Loss_D: 3.3929 Loss_G: 2.6647 D(x): 0.5365 D(G(z)): 0.3816 / 0.4470\n",
"[93/100][369/391] Loss_D: 2.8514 Loss_G: 2.5140 D(x): 0.7386 D(G(z)): 0.5252 / 0.4654\n",
"[93/100][370/391] Loss_D: 3.3277 Loss_G: 3.2130 D(x): 0.7148 D(G(z)): 0.5965 / 0.3770\n",
"[93/100][371/391] Loss_D: 3.2180 Loss_G: 3.1626 D(x): 0.6978 D(G(z)): 0.5273 / 0.3787\n",
"[93/100][372/391] Loss_D: 2.5222 Loss_G: 2.5747 D(x): 0.6263 D(G(z)): 0.3140 / 0.4616\n",
"[93/100][373/391] Loss_D: 2.4942 Loss_G: 2.8460 D(x): 0.7355 D(G(z)): 0.3523 / 0.4297\n",
"[93/100][374/391] Loss_D: 4.0983 Loss_G: 2.5914 D(x): 0.4823 D(G(z)): 0.4807 / 0.4642\n",
"[93/100][375/391] Loss_D: 3.2034 Loss_G: 3.6968 D(x): 0.7441 D(G(z)): 0.5555 / 0.3205\n",
"[93/100][376/391] Loss_D: 2.6311 Loss_G: 2.2249 D(x): 0.7009 D(G(z)): 0.3402 / 0.5120\n",
"[93/100][377/391] Loss_D: 3.3307 Loss_G: 2.2584 D(x): 0.5780 D(G(z)): 0.4389 / 0.5000\n",
"[93/100][378/391] Loss_D: 3.2571 Loss_G: 2.6161 D(x): 0.5506 D(G(z)): 0.4544 / 0.4539\n",
"[93/100][379/391] Loss_D: 2.8926 Loss_G: 2.4662 D(x): 0.6235 D(G(z)): 0.4054 / 0.4634\n",
"[93/100][380/391] Loss_D: 2.8745 Loss_G: 2.2213 D(x): 0.6450 D(G(z)): 0.4281 / 0.5139\n",
"[93/100][381/391] Loss_D: 2.9958 Loss_G: 3.1780 D(x): 0.6773 D(G(z)): 0.4842 / 0.3895\n",
"[93/100][382/391] Loss_D: 2.9941 Loss_G: 2.5939 D(x): 0.7075 D(G(z)): 0.4953 / 0.4292\n",
"[93/100][383/391] Loss_D: 3.2978 Loss_G: 3.5685 D(x): 0.7338 D(G(z)): 0.5745 / 0.3378\n",
"[93/100][384/391] Loss_D: 2.3247 Loss_G: 2.6776 D(x): 0.6616 D(G(z)): 0.3518 / 0.4420\n",
"[93/100][385/391] Loss_D: 2.5708 Loss_G: 2.9896 D(x): 0.7490 D(G(z)): 0.4255 / 0.3868\n",
"[93/100][386/391] Loss_D: 3.1945 Loss_G: 2.8822 D(x): 0.5656 D(G(z)): 0.3808 / 0.4040\n",
"[93/100][387/391] Loss_D: 3.1697 Loss_G: 2.9116 D(x): 0.6375 D(G(z)): 0.4866 / 0.4123\n",
"[93/100][388/391] Loss_D: 2.5764 Loss_G: 2.2085 D(x): 0.6496 D(G(z)): 0.3743 / 0.5002\n",
"[93/100][389/391] Loss_D: 2.7954 Loss_G: 2.3139 D(x): 0.6752 D(G(z)): 0.3590 / 0.4932\n",
"[93/100][390/391] Loss_D: 2.4327 Loss_G: 2.8607 D(x): 0.7833 D(G(z)): 0.4423 / 0.4306\n",
"[93/100][391/391] Loss_D: 3.7485 Loss_G: 4.1181 D(x): 0.6308 D(G(z)): 0.4227 / 0.2814\n",
"[94/100][1/391] Loss_D: 3.6220 Loss_G: 2.8179 D(x): 0.7093 D(G(z)): 0.3798 / 0.4233\n",
"[94/100][2/391] Loss_D: 2.9190 Loss_G: 3.1071 D(x): 0.6538 D(G(z)): 0.4564 / 0.3981\n",
"[94/100][3/391] Loss_D: 3.2908 Loss_G: 2.9655 D(x): 0.6006 D(G(z)): 0.4626 / 0.4077\n",
"[94/100][4/391] Loss_D: 2.7040 Loss_G: 2.9345 D(x): 0.6846 D(G(z)): 0.4280 / 0.3953\n",
"[94/100][5/391] Loss_D: 2.8586 Loss_G: 3.0043 D(x): 0.6204 D(G(z)): 0.3433 / 0.3959\n",
"[94/100][6/391] Loss_D: 3.0363 Loss_G: 3.6519 D(x): 0.6010 D(G(z)): 0.3797 / 0.3372\n",
"[94/100][7/391] Loss_D: 3.0945 Loss_G: 3.2814 D(x): 0.6899 D(G(z)): 0.4640 / 0.3725\n",
"[94/100][8/391] Loss_D: 2.3110 Loss_G: 3.4023 D(x): 0.6912 D(G(z)): 0.3180 / 0.3656\n",
"[94/100][9/391] Loss_D: 2.7627 Loss_G: 2.6404 D(x): 0.6646 D(G(z)): 0.4164 / 0.4444\n",
"[94/100][10/391] Loss_D: 2.6539 Loss_G: 1.9922 D(x): 0.7179 D(G(z)): 0.4452 / 0.5508\n",
"[94/100][11/391] Loss_D: 3.1170 Loss_G: 2.8082 D(x): 0.7100 D(G(z)): 0.5146 / 0.4154\n",
"[94/100][12/391] Loss_D: 2.9845 Loss_G: 3.2952 D(x): 0.7055 D(G(z)): 0.5025 / 0.3629\n",
"[94/100][13/391] Loss_D: 2.5984 Loss_G: 2.7907 D(x): 0.6760 D(G(z)): 0.3721 / 0.4189\n",
"[94/100][14/391] Loss_D: 2.4388 Loss_G: 2.6341 D(x): 0.6700 D(G(z)): 0.3592 / 0.4426\n",
"[94/100][15/391] Loss_D: 2.6995 Loss_G: 2.4941 D(x): 0.6649 D(G(z)): 0.4227 / 0.4559\n",
"[94/100][16/391] Loss_D: 3.2743 Loss_G: 2.7590 D(x): 0.6737 D(G(z)): 0.4880 / 0.4319\n",
"[94/100][17/391] Loss_D: 2.8196 Loss_G: 2.9088 D(x): 0.6105 D(G(z)): 0.3464 / 0.4132\n",
"[94/100][18/391] Loss_D: 2.9048 Loss_G: 2.7661 D(x): 0.6393 D(G(z)): 0.4093 / 0.4211\n",
"[94/100][19/391] Loss_D: 2.9794 Loss_G: 3.0601 D(x): 0.6261 D(G(z)): 0.4007 / 0.4087\n",
"[94/100][20/391] Loss_D: 2.5676 Loss_G: 2.7404 D(x): 0.6646 D(G(z)): 0.3540 / 0.4366\n",
"[94/100][21/391] Loss_D: 2.3370 Loss_G: 2.7407 D(x): 0.7308 D(G(z)): 0.3737 / 0.4356\n",
"[94/100][22/391] Loss_D: 2.7446 Loss_G: 3.0500 D(x): 0.7074 D(G(z)): 0.4179 / 0.3870\n",
"[94/100][23/391] Loss_D: 2.6985 Loss_G: 2.3191 D(x): 0.7026 D(G(z)): 0.4625 / 0.4897\n",
"[94/100][24/391] Loss_D: 3.9271 Loss_G: 1.6852 D(x): 0.5627 D(G(z)): 0.5396 / 0.5799\n",
"[94/100][25/391] Loss_D: 2.5710 Loss_G: 2.6207 D(x): 0.7696 D(G(z)): 0.4122 / 0.4256\n",
"[94/100][26/391] Loss_D: 2.8686 Loss_G: 2.6085 D(x): 0.6957 D(G(z)): 0.4585 / 0.4350\n",
"[94/100][27/391] Loss_D: 3.5968 Loss_G: 2.8189 D(x): 0.5843 D(G(z)): 0.4874 / 0.4080\n",
"[94/100][28/391] Loss_D: 2.8609 Loss_G: 2.6910 D(x): 0.6203 D(G(z)): 0.4332 / 0.4326\n",
"[94/100][29/391] Loss_D: 2.9987 Loss_G: 3.2787 D(x): 0.6972 D(G(z)): 0.4921 / 0.3683\n",
"[94/100][30/391] Loss_D: 2.9097 Loss_G: 3.3314 D(x): 0.7062 D(G(z)): 0.4912 / 0.3590\n",
"[94/100][31/391] Loss_D: 3.7532 Loss_G: 2.9376 D(x): 0.6214 D(G(z)): 0.3200 / 0.4088\n",
"[94/100][32/391] Loss_D: 3.0076 Loss_G: 2.9973 D(x): 0.6026 D(G(z)): 0.4127 / 0.3952\n",
"[94/100][33/391] Loss_D: 3.1708 Loss_G: 3.7237 D(x): 0.5950 D(G(z)): 0.3989 / 0.3160\n",
"[94/100][34/391] Loss_D: 2.6496 Loss_G: 2.9283 D(x): 0.6759 D(G(z)): 0.4187 / 0.3999\n",
"[94/100][35/391] Loss_D: 3.1501 Loss_G: 2.5222 D(x): 0.7379 D(G(z)): 0.5583 / 0.4489\n",
"[94/100][36/391] Loss_D: 3.4606 Loss_G: 2.3980 D(x): 0.5525 D(G(z)): 0.4400 / 0.4766\n",
"[94/100][37/391] Loss_D: 2.8310 Loss_G: 3.4991 D(x): 0.6629 D(G(z)): 0.4444 / 0.3372\n",
"[94/100][38/391] Loss_D: 2.8925 Loss_G: 2.6472 D(x): 0.6015 D(G(z)): 0.3673 / 0.4472\n",
"[94/100][39/391] Loss_D: 2.9728 Loss_G: 3.0067 D(x): 0.7292 D(G(z)): 0.5072 / 0.3829\n",
"[94/100][40/391] Loss_D: 2.6361 Loss_G: 2.7920 D(x): 0.7315 D(G(z)): 0.3817 / 0.4293\n",
"[94/100][41/391] Loss_D: 3.3372 Loss_G: 2.3355 D(x): 0.6592 D(G(z)): 0.5095 / 0.4815\n",
"[94/100][42/391] Loss_D: 3.2464 Loss_G: 2.2729 D(x): 0.5178 D(G(z)): 0.3755 / 0.5013\n",
"[94/100][43/391] Loss_D: 2.9833 Loss_G: 2.9828 D(x): 0.6623 D(G(z)): 0.4220 / 0.4177\n",
"[94/100][44/391] Loss_D: 2.6834 Loss_G: 2.7066 D(x): 0.6659 D(G(z)): 0.4048 / 0.4390\n",
"[94/100][45/391] Loss_D: 2.7241 Loss_G: 1.7954 D(x): 0.7364 D(G(z)): 0.4237 / 0.5754\n",
"[94/100][46/391] Loss_D: 2.6887 Loss_G: 2.8009 D(x): 0.6565 D(G(z)): 0.3434 / 0.4138\n",
"[94/100][47/391] Loss_D: 2.8689 Loss_G: 2.2494 D(x): 0.6413 D(G(z)): 0.4026 / 0.5100\n",
"[94/100][48/391] Loss_D: 3.1477 Loss_G: 2.0480 D(x): 0.6332 D(G(z)): 0.4838 / 0.5488\n",
"[94/100][49/391] Loss_D: 2.4464 Loss_G: 2.4381 D(x): 0.7240 D(G(z)): 0.4315 / 0.4631\n",
"[94/100][50/391] Loss_D: 2.7574 Loss_G: 2.7998 D(x): 0.6449 D(G(z)): 0.4129 / 0.4206\n",
"[94/100][51/391] Loss_D: 2.9080 Loss_G: 2.2340 D(x): 0.6309 D(G(z)): 0.3975 / 0.4968\n",
"[94/100][52/391] Loss_D: 2.7493 Loss_G: 2.4165 D(x): 0.7136 D(G(z)): 0.4293 / 0.4872\n",
"[94/100][53/391] Loss_D: 2.8252 Loss_G: 2.4477 D(x): 0.7308 D(G(z)): 0.4689 / 0.4789\n",
"[94/100][54/391] Loss_D: 2.9029 Loss_G: 2.4376 D(x): 0.6254 D(G(z)): 0.4023 / 0.4744\n",
"[94/100][55/391] Loss_D: 2.6728 Loss_G: 2.8296 D(x): 0.6850 D(G(z)): 0.4038 / 0.4095\n",
"[94/100][56/391] Loss_D: 3.1401 Loss_G: 3.2091 D(x): 0.7325 D(G(z)): 0.5175 / 0.3702\n",
"[94/100][57/391] Loss_D: 2.6131 Loss_G: 2.7591 D(x): 0.7538 D(G(z)): 0.3952 / 0.4279\n",
"[94/100][58/391] Loss_D: 3.0306 Loss_G: 3.7329 D(x): 0.5958 D(G(z)): 0.4153 / 0.3079\n",
"[94/100][59/391] Loss_D: 3.0472 Loss_G: 2.3418 D(x): 0.6369 D(G(z)): 0.4711 / 0.4928\n",
"[94/100][60/391] Loss_D: 2.9912 Loss_G: 3.5195 D(x): 0.6419 D(G(z)): 0.4466 / 0.3563\n",
"[94/100][61/391] Loss_D: 3.5756 Loss_G: 3.0314 D(x): 0.7220 D(G(z)): 0.3840 / 0.4082\n",
"[94/100][62/391] Loss_D: 2.4160 Loss_G: 3.6689 D(x): 0.7656 D(G(z)): 0.4243 / 0.3247\n",
"[94/100][63/391] Loss_D: 3.2324 Loss_G: 3.0167 D(x): 0.5939 D(G(z)): 0.4491 / 0.3898\n",
"[94/100][64/391] Loss_D: 2.6244 Loss_G: 3.6085 D(x): 0.7079 D(G(z)): 0.4583 / 0.3405\n",
"[94/100][65/391] Loss_D: 2.6090 Loss_G: 3.4482 D(x): 0.6646 D(G(z)): 0.3911 / 0.3524\n",
"[94/100][66/391] Loss_D: 2.8129 Loss_G: 3.2742 D(x): 0.5622 D(G(z)): 0.2867 / 0.3769\n",
"[94/100][67/391] Loss_D: 3.5271 Loss_G: 2.1915 D(x): 0.5113 D(G(z)): 0.3869 / 0.4888\n",
"[94/100][68/391] Loss_D: 2.5635 Loss_G: 2.1326 D(x): 0.6679 D(G(z)): 0.3767 / 0.5063\n",
"[94/100][69/391] Loss_D: 2.9456 Loss_G: 1.8388 D(x): 0.6760 D(G(z)): 0.4655 / 0.5610\n",
"[94/100][70/391] Loss_D: 2.5555 Loss_G: 2.8610 D(x): 0.7150 D(G(z)): 0.4105 / 0.4098\n",
"[94/100][71/391] Loss_D: 3.8176 Loss_G: 2.3844 D(x): 0.7322 D(G(z)): 0.6463 / 0.5039\n",
"[94/100][72/391] Loss_D: 2.7917 Loss_G: 2.5845 D(x): 0.6625 D(G(z)): 0.4410 / 0.4524\n",
"[94/100][73/391] Loss_D: 2.7018 Loss_G: 3.1010 D(x): 0.6738 D(G(z)): 0.4086 / 0.3861\n",
"[94/100][74/391] Loss_D: 3.0651 Loss_G: 2.3803 D(x): 0.6858 D(G(z)): 0.5042 / 0.4981\n",
"[94/100][75/391] Loss_D: 2.7011 Loss_G: 3.7981 D(x): 0.6789 D(G(z)): 0.4283 / 0.3121\n",
"[94/100][76/391] Loss_D: 3.1539 Loss_G: 3.5886 D(x): 0.5835 D(G(z)): 0.4526 / 0.3395\n",
"[94/100][77/391] Loss_D: 3.6412 Loss_G: 2.6081 D(x): 0.5997 D(G(z)): 0.5343 / 0.4565\n",
"[94/100][78/391] Loss_D: 3.3215 Loss_G: 2.9446 D(x): 0.5325 D(G(z)): 0.4012 / 0.4184\n",
"[94/100][79/391] Loss_D: 2.2060 Loss_G: 2.6984 D(x): 0.7317 D(G(z)): 0.3093 / 0.4419\n",
"[94/100][80/391] Loss_D: 3.1120 Loss_G: 3.0521 D(x): 0.6701 D(G(z)): 0.4851 / 0.4053\n",
"[94/100][81/391] Loss_D: 2.8376 Loss_G: 2.4960 D(x): 0.6927 D(G(z)): 0.4603 / 0.4834\n",
"[94/100][82/391] Loss_D: 3.0194 Loss_G: 1.9693 D(x): 0.6309 D(G(z)): 0.4161 / 0.5448\n",
"[94/100][83/391] Loss_D: 2.6425 Loss_G: 2.7273 D(x): 0.7221 D(G(z)): 0.4168 / 0.4433\n",
"[94/100][84/391] Loss_D: 2.3261 Loss_G: 2.8283 D(x): 0.7420 D(G(z)): 0.4088 / 0.4273\n",
"[94/100][85/391] Loss_D: 2.4533 Loss_G: 4.2635 D(x): 0.6934 D(G(z)): 0.3148 / 0.2758\n",
"[94/100][86/391] Loss_D: 3.2015 Loss_G: 2.7660 D(x): 0.6328 D(G(z)): 0.4709 / 0.4017\n",
"[94/100][87/391] Loss_D: 3.2383 Loss_G: 2.6016 D(x): 0.6426 D(G(z)): 0.4679 / 0.4480\n",
"[94/100][88/391] Loss_D: 2.8730 Loss_G: 3.0196 D(x): 0.5917 D(G(z)): 0.3829 / 0.4001\n",
"[94/100][89/391] Loss_D: 2.6479 Loss_G: 3.0473 D(x): 0.7537 D(G(z)): 0.4605 / 0.3796\n",
"[94/100][90/391] Loss_D: 2.8209 Loss_G: 2.8820 D(x): 0.6738 D(G(z)): 0.4450 / 0.4193\n",
"[94/100][91/391] Loss_D: 3.6104 Loss_G: 3.1727 D(x): 0.7445 D(G(z)): 0.4491 / 0.3752\n",
"[94/100][92/391] Loss_D: 2.5839 Loss_G: 3.5203 D(x): 0.6403 D(G(z)): 0.3444 / 0.3300\n",
"[94/100][93/391] Loss_D: 3.4313 Loss_G: 2.4095 D(x): 0.5477 D(G(z)): 0.4273 / 0.4715\n",
"[94/100][94/391] Loss_D: 2.5616 Loss_G: 2.3773 D(x): 0.6677 D(G(z)): 0.4155 / 0.4866\n",
"[94/100][95/391] Loss_D: 3.1716 Loss_G: 2.5345 D(x): 0.6404 D(G(z)): 0.4697 / 0.4511\n",
"[94/100][96/391] Loss_D: 3.4975 Loss_G: 3.7430 D(x): 0.6013 D(G(z)): 0.5225 / 0.3065\n",
"[94/100][97/391] Loss_D: 3.2393 Loss_G: 2.7928 D(x): 0.5926 D(G(z)): 0.4172 / 0.4166\n",
"[94/100][98/391] Loss_D: 2.7426 Loss_G: 3.1953 D(x): 0.7124 D(G(z)): 0.4595 / 0.4007\n",
"[94/100][99/391] Loss_D: 3.4501 Loss_G: 2.7600 D(x): 0.6817 D(G(z)): 0.5665 / 0.4289\n",
"[94/100][100/391] Loss_D: 2.9312 Loss_G: 2.3228 D(x): 0.6487 D(G(z)): 0.4465 / 0.5021\n",
"[94/100][101/391] Loss_D: 3.4066 Loss_G: 2.6071 D(x): 0.5548 D(G(z)): 0.4462 / 0.4536\n",
"[94/100][102/391] Loss_D: 2.8060 Loss_G: 3.2236 D(x): 0.7721 D(G(z)): 0.4539 / 0.3706\n",
"[94/100][103/391] Loss_D: 2.7134 Loss_G: 2.9489 D(x): 0.7159 D(G(z)): 0.4190 / 0.4087\n",
"[94/100][104/391] Loss_D: 2.3871 Loss_G: 3.2302 D(x): 0.7891 D(G(z)): 0.4451 / 0.3606\n",
"[94/100][105/391] Loss_D: 2.3937 Loss_G: 2.8839 D(x): 0.7289 D(G(z)): 0.3119 / 0.4119\n",
"[94/100][106/391] Loss_D: 2.8206 Loss_G: 2.7100 D(x): 0.5909 D(G(z)): 0.3426 / 0.4380\n",
"[94/100][107/391] Loss_D: 2.9266 Loss_G: 2.9689 D(x): 0.6562 D(G(z)): 0.4020 / 0.4037\n",
"[94/100][108/391] Loss_D: 2.6416 Loss_G: 2.0331 D(x): 0.6499 D(G(z)): 0.2579 / 0.5421\n",
"[94/100][109/391] Loss_D: 2.7765 Loss_G: 2.6755 D(x): 0.6733 D(G(z)): 0.4555 / 0.4504\n",
"[94/100][110/391] Loss_D: 3.0245 Loss_G: 2.5057 D(x): 0.6641 D(G(z)): 0.4534 / 0.4755\n",
"[94/100][111/391] Loss_D: 2.7207 Loss_G: 3.0538 D(x): 0.7152 D(G(z)): 0.4204 / 0.3966\n",
"[94/100][112/391] Loss_D: 3.5615 Loss_G: 3.3838 D(x): 0.6393 D(G(z)): 0.5674 / 0.3494\n",
"[94/100][113/391] Loss_D: 2.5172 Loss_G: 3.1589 D(x): 0.7430 D(G(z)): 0.4079 / 0.3766\n",
"[94/100][114/391] Loss_D: 2.3620 Loss_G: 2.8602 D(x): 0.7000 D(G(z)): 0.3882 / 0.4178\n",
"[94/100][115/391] Loss_D: 3.8516 Loss_G: 2.9398 D(x): 0.5541 D(G(z)): 0.5449 / 0.3954\n",
"[94/100][116/391] Loss_D: 2.6225 Loss_G: 3.4302 D(x): 0.6554 D(G(z)): 0.3365 / 0.3526\n",
"[94/100][117/391] Loss_D: 2.9585 Loss_G: 3.3509 D(x): 0.5923 D(G(z)): 0.3480 / 0.3598\n",
"[94/100][118/391] Loss_D: 2.6110 Loss_G: 2.1644 D(x): 0.6552 D(G(z)): 0.3542 / 0.5070\n",
"[94/100][119/391] Loss_D: 2.6155 Loss_G: 3.0602 D(x): 0.7046 D(G(z)): 0.4093 / 0.3914\n",
"[94/100][120/391] Loss_D: 3.2663 Loss_G: 2.6071 D(x): 0.6421 D(G(z)): 0.5126 / 0.4606\n",
"[94/100][121/391] Loss_D: 3.6212 Loss_G: 2.7085 D(x): 0.6672 D(G(z)): 0.4178 / 0.4374\n",
"[94/100][122/391] Loss_D: 2.8435 Loss_G: 3.2602 D(x): 0.7724 D(G(z)): 0.5122 / 0.3574\n",
"[94/100][123/391] Loss_D: 2.6058 Loss_G: 3.2226 D(x): 0.6824 D(G(z)): 0.3855 / 0.3710\n",
"[94/100][124/391] Loss_D: 3.1328 Loss_G: 3.1588 D(x): 0.6203 D(G(z)): 0.4577 / 0.3866\n",
"[94/100][125/391] Loss_D: 3.4817 Loss_G: 2.6525 D(x): 0.5825 D(G(z)): 0.4247 / 0.4394\n",
"[94/100][126/391] Loss_D: 2.5897 Loss_G: 3.0571 D(x): 0.6708 D(G(z)): 0.3931 / 0.3945\n",
"[94/100][127/391] Loss_D: 2.7442 Loss_G: 2.6123 D(x): 0.6787 D(G(z)): 0.3452 / 0.4324\n",
"[94/100][128/391] Loss_D: 3.0503 Loss_G: 3.0621 D(x): 0.6249 D(G(z)): 0.5007 / 0.3950\n",
"[94/100][129/391] Loss_D: 2.8915 Loss_G: 3.0112 D(x): 0.7137 D(G(z)): 0.5036 / 0.3989\n",
"[94/100][130/391] Loss_D: 2.9499 Loss_G: 1.9811 D(x): 0.7219 D(G(z)): 0.4714 / 0.5528\n",
"[94/100][131/391] Loss_D: 2.9607 Loss_G: 3.3205 D(x): 0.7466 D(G(z)): 0.5207 / 0.3656\n",
"[94/100][132/391] Loss_D: 2.3739 Loss_G: 3.0987 D(x): 0.7515 D(G(z)): 0.3637 / 0.3834\n",
"[94/100][133/391] Loss_D: 2.7053 Loss_G: 3.7733 D(x): 0.6501 D(G(z)): 0.3962 / 0.3216\n",
"[94/100][134/391] Loss_D: 2.5753 Loss_G: 3.4409 D(x): 0.6221 D(G(z)): 0.3551 / 0.3489\n",
"[94/100][135/391] Loss_D: 3.3711 Loss_G: 2.5887 D(x): 0.4798 D(G(z)): 0.3226 / 0.4407\n",
"[94/100][136/391] Loss_D: 2.5530 Loss_G: 2.9244 D(x): 0.7207 D(G(z)): 0.3725 / 0.4046\n",
"[94/100][137/391] Loss_D: 3.0342 Loss_G: 2.3236 D(x): 0.6159 D(G(z)): 0.4078 / 0.4692\n",
"[94/100][138/391] Loss_D: 2.7512 Loss_G: 2.5900 D(x): 0.6417 D(G(z)): 0.3994 / 0.4577\n",
"[94/100][139/391] Loss_D: 3.7042 Loss_G: 2.1023 D(x): 0.5837 D(G(z)): 0.5245 / 0.5310\n",
"[94/100][140/391] Loss_D: 2.4809 Loss_G: 2.4601 D(x): 0.7373 D(G(z)): 0.3788 / 0.4788\n",
"[94/100][141/391] Loss_D: 3.0812 Loss_G: 2.9740 D(x): 0.7423 D(G(z)): 0.5249 / 0.4059\n",
"[94/100][142/391] Loss_D: 2.9335 Loss_G: 2.1398 D(x): 0.6759 D(G(z)): 0.4846 / 0.5194\n",
"[94/100][143/391] Loss_D: 2.6141 Loss_G: 2.6196 D(x): 0.7576 D(G(z)): 0.4498 / 0.4495\n",
"[94/100][144/391] Loss_D: 2.4636 Loss_G: 3.3040 D(x): 0.6936 D(G(z)): 0.4004 / 0.3652\n",
"[94/100][145/391] Loss_D: 3.3778 Loss_G: 3.6485 D(x): 0.6112 D(G(z)): 0.4867 / 0.3175\n",
"[94/100][146/391] Loss_D: 3.1169 Loss_G: 2.5459 D(x): 0.5574 D(G(z)): 0.3547 / 0.4671\n",
"[94/100][147/391] Loss_D: 2.8718 Loss_G: 2.1662 D(x): 0.7027 D(G(z)): 0.4404 / 0.5069\n",
"[94/100][148/391] Loss_D: 2.2605 Loss_G: 3.4163 D(x): 0.8008 D(G(z)): 0.4688 / 0.3508\n",
"[94/100][149/391] Loss_D: 2.5143 Loss_G: 2.1253 D(x): 0.7423 D(G(z)): 0.4137 / 0.5374\n",
"[94/100][150/391] Loss_D: 2.7695 Loss_G: 3.1864 D(x): 0.6506 D(G(z)): 0.3675 / 0.3946\n",
"[94/100][151/391] Loss_D: 3.5084 Loss_G: 2.9147 D(x): 0.6857 D(G(z)): 0.3513 / 0.4028\n",
"[94/100][152/391] Loss_D: 2.5573 Loss_G: 3.3363 D(x): 0.6604 D(G(z)): 0.3729 / 0.3621\n",
"[94/100][153/391] Loss_D: 2.4976 Loss_G: 2.9073 D(x): 0.7060 D(G(z)): 0.3439 / 0.4071\n",
"[94/100][154/391] Loss_D: 2.4884 Loss_G: 2.8165 D(x): 0.7475 D(G(z)): 0.4533 / 0.4159\n",
"[94/100][155/391] Loss_D: 3.0534 Loss_G: 3.3297 D(x): 0.6649 D(G(z)): 0.4258 / 0.3476\n",
"[94/100][156/391] Loss_D: 3.0132 Loss_G: 2.6870 D(x): 0.6492 D(G(z)): 0.4474 / 0.4448\n",
"[94/100][157/391] Loss_D: 2.9578 Loss_G: 3.7600 D(x): 0.6923 D(G(z)): 0.4675 / 0.3286\n",
"[94/100][158/391] Loss_D: 3.7139 Loss_G: 2.7014 D(x): 0.4654 D(G(z)): 0.3479 / 0.4439\n",
"[94/100][159/391] Loss_D: 3.1909 Loss_G: 2.5028 D(x): 0.6094 D(G(z)): 0.4274 / 0.4687\n",
"[94/100][160/391] Loss_D: 3.8127 Loss_G: 2.0550 D(x): 0.6062 D(G(z)): 0.5785 / 0.5319\n",
"[94/100][161/391] Loss_D: 3.1324 Loss_G: 2.1650 D(x): 0.7005 D(G(z)): 0.5032 / 0.5179\n",
"[94/100][162/391] Loss_D: 2.5249 Loss_G: 2.8583 D(x): 0.6967 D(G(z)): 0.3932 / 0.4156\n",
"[94/100][163/391] Loss_D: 2.5188 Loss_G: 2.8026 D(x): 0.7119 D(G(z)): 0.3695 / 0.4122\n",
"[94/100][164/391] Loss_D: 1.9719 Loss_G: 2.2609 D(x): 0.7865 D(G(z)): 0.3221 / 0.5011\n",
"[94/100][165/391] Loss_D: 2.4971 Loss_G: 2.7383 D(x): 0.7866 D(G(z)): 0.4310 / 0.4251\n",
"[94/100][166/391] Loss_D: 2.5353 Loss_G: 2.5523 D(x): 0.7446 D(G(z)): 0.4197 / 0.4425\n",
"[94/100][167/391] Loss_D: 2.8829 Loss_G: 2.4970 D(x): 0.6008 D(G(z)): 0.3144 / 0.4586\n",
"[94/100][168/391] Loss_D: 2.7658 Loss_G: 2.6158 D(x): 0.6352 D(G(z)): 0.3139 / 0.4483\n",
"[94/100][169/391] Loss_D: 2.6604 Loss_G: 2.8775 D(x): 0.7212 D(G(z)): 0.4467 / 0.4064\n",
"[94/100][170/391] Loss_D: 2.8750 Loss_G: 2.8050 D(x): 0.6770 D(G(z)): 0.4444 / 0.4346\n",
"[94/100][171/391] Loss_D: 3.3964 Loss_G: 2.9888 D(x): 0.5978 D(G(z)): 0.4550 / 0.4013\n",
"[94/100][172/391] Loss_D: 3.5273 Loss_G: 2.6711 D(x): 0.5642 D(G(z)): 0.4934 / 0.4402\n",
"[94/100][173/391] Loss_D: 2.4442 Loss_G: 3.3793 D(x): 0.7089 D(G(z)): 0.3601 / 0.3563\n",
"[94/100][174/391] Loss_D: 2.0460 Loss_G: 2.8820 D(x): 0.7452 D(G(z)): 0.3268 / 0.4237\n",
"[94/100][175/391] Loss_D: 2.9180 Loss_G: 2.8568 D(x): 0.7145 D(G(z)): 0.4896 / 0.4152\n",
"[94/100][176/391] Loss_D: 2.8531 Loss_G: 2.5417 D(x): 0.6761 D(G(z)): 0.4107 / 0.4665\n",
"[94/100][177/391] Loss_D: 2.7759 Loss_G: 2.7357 D(x): 0.7344 D(G(z)): 0.4362 / 0.4269\n",
"[94/100][178/391] Loss_D: 2.1466 Loss_G: 2.9881 D(x): 0.7347 D(G(z)): 0.3522 / 0.4083\n",
"[94/100][179/391] Loss_D: 3.1305 Loss_G: 1.8723 D(x): 0.5654 D(G(z)): 0.3402 / 0.5609\n",
"[94/100][180/391] Loss_D: 2.9324 Loss_G: 2.5250 D(x): 0.6788 D(G(z)): 0.4827 / 0.4582\n",
"[94/100][181/391] Loss_D: 3.5397 Loss_G: 2.6166 D(x): 0.6334 D(G(z)): 0.4073 / 0.4473\n",
"[94/100][182/391] Loss_D: 3.6431 Loss_G: 2.5219 D(x): 0.5124 D(G(z)): 0.4719 / 0.4628\n",
"[94/100][183/391] Loss_D: 2.8173 Loss_G: 2.3976 D(x): 0.6833 D(G(z)): 0.4625 / 0.4810\n",
"[94/100][184/391] Loss_D: 2.9451 Loss_G: 2.7135 D(x): 0.7395 D(G(z)): 0.5228 / 0.4436\n",
"[94/100][185/391] Loss_D: 2.8822 Loss_G: 3.4005 D(x): 0.6811 D(G(z)): 0.4211 / 0.3457\n",
"[94/100][186/391] Loss_D: 2.7806 Loss_G: 3.6583 D(x): 0.6471 D(G(z)): 0.4093 / 0.3342\n",
"[94/100][187/391] Loss_D: 2.8691 Loss_G: 2.8780 D(x): 0.6683 D(G(z)): 0.4730 / 0.4176\n",
"[94/100][188/391] Loss_D: 2.5635 Loss_G: 2.5238 D(x): 0.6745 D(G(z)): 0.4050 / 0.4538\n",
"[94/100][189/391] Loss_D: 2.9398 Loss_G: 3.0881 D(x): 0.7145 D(G(z)): 0.4642 / 0.3828\n",
"[94/100][190/391] Loss_D: 2.6312 Loss_G: 2.9771 D(x): 0.7246 D(G(z)): 0.4328 / 0.4037\n",
"[94/100][191/391] Loss_D: 3.1666 Loss_G: 2.7253 D(x): 0.6049 D(G(z)): 0.4167 / 0.4303\n",
"[94/100][192/391] Loss_D: 2.9057 Loss_G: 3.3863 D(x): 0.7292 D(G(z)): 0.5003 / 0.3571\n",
"[94/100][193/391] Loss_D: 2.6008 Loss_G: 4.0107 D(x): 0.6360 D(G(z)): 0.3481 / 0.2982\n",
"[94/100][194/391] Loss_D: 2.8467 Loss_G: 3.5247 D(x): 0.6357 D(G(z)): 0.4167 / 0.3323\n",
"[94/100][195/391] Loss_D: 2.6482 Loss_G: 2.5790 D(x): 0.6745 D(G(z)): 0.3677 / 0.4490\n",
"[94/100][196/391] Loss_D: 2.8590 Loss_G: 2.2764 D(x): 0.6548 D(G(z)): 0.4609 / 0.5026\n",
"[94/100][197/391] Loss_D: 2.7250 Loss_G: 2.9435 D(x): 0.6920 D(G(z)): 0.3372 / 0.3984\n",
"[94/100][198/391] Loss_D: 2.6201 Loss_G: 1.9088 D(x): 0.6870 D(G(z)): 0.3957 / 0.5520\n",
"[94/100][199/391] Loss_D: 2.8472 Loss_G: 2.0641 D(x): 0.6935 D(G(z)): 0.4671 / 0.5466\n",
"[94/100][200/391] Loss_D: 2.7035 Loss_G: 2.6917 D(x): 0.6243 D(G(z)): 0.3537 / 0.4392\n",
"[94/100][201/391] Loss_D: 2.5554 Loss_G: 2.6266 D(x): 0.7698 D(G(z)): 0.4281 / 0.4498\n",
"[94/100][202/391] Loss_D: 3.3516 Loss_G: 2.2083 D(x): 0.6459 D(G(z)): 0.5279 / 0.5160\n",
"[94/100][203/391] Loss_D: 2.6272 Loss_G: 2.8179 D(x): 0.6999 D(G(z)): 0.3740 / 0.4359\n",
"[94/100][204/391] Loss_D: 2.8342 Loss_G: 2.7728 D(x): 0.6280 D(G(z)): 0.4373 / 0.4336\n",
"[94/100][205/391] Loss_D: 2.6991 Loss_G: 2.2769 D(x): 0.6156 D(G(z)): 0.3610 / 0.4915\n",
"[94/100][206/391] Loss_D: 2.7704 Loss_G: 2.1734 D(x): 0.6746 D(G(z)): 0.4340 / 0.5126\n",
"[94/100][207/391] Loss_D: 2.9134 Loss_G: 3.1280 D(x): 0.5962 D(G(z)): 0.3937 / 0.3755\n",
"[94/100][208/391] Loss_D: 2.5703 Loss_G: 2.5324 D(x): 0.7102 D(G(z)): 0.4631 / 0.4527\n",
"[94/100][209/391] Loss_D: 2.7838 Loss_G: 2.7999 D(x): 0.6098 D(G(z)): 0.3503 / 0.4290\n",
"[94/100][210/391] Loss_D: 2.9211 Loss_G: 2.8467 D(x): 0.6332 D(G(z)): 0.3872 / 0.4377\n",
"[94/100][211/391] Loss_D: 3.6273 Loss_G: 2.3769 D(x): 0.6393 D(G(z)): 0.4700 / 0.4735\n",
"[94/100][212/391] Loss_D: 2.7655 Loss_G: 2.1441 D(x): 0.7343 D(G(z)): 0.4574 / 0.5333\n",
"[94/100][213/391] Loss_D: 3.0955 Loss_G: 2.2551 D(x): 0.7265 D(G(z)): 0.5183 / 0.5154\n",
"[94/100][214/391] Loss_D: 2.9064 Loss_G: 2.5792 D(x): 0.6600 D(G(z)): 0.4592 / 0.4509\n",
"[94/100][215/391] Loss_D: 3.0637 Loss_G: 2.1893 D(x): 0.5466 D(G(z)): 0.3489 / 0.4923\n",
"[94/100][216/391] Loss_D: 2.4957 Loss_G: 2.4298 D(x): 0.7037 D(G(z)): 0.3305 / 0.4738\n",
"[94/100][217/391] Loss_D: 3.3039 Loss_G: 2.2368 D(x): 0.6430 D(G(z)): 0.5279 / 0.4998\n",
"[94/100][218/391] Loss_D: 2.8684 Loss_G: 1.9777 D(x): 0.6511 D(G(z)): 0.4604 / 0.5489\n",
"[94/100][219/391] Loss_D: 3.3549 Loss_G: 2.8916 D(x): 0.6178 D(G(z)): 0.5217 / 0.4216\n",
"[94/100][220/391] Loss_D: 2.9491 Loss_G: 3.2550 D(x): 0.7486 D(G(z)): 0.5395 / 0.3863\n",
"[94/100][221/391] Loss_D: 2.7119 Loss_G: 3.7730 D(x): 0.6541 D(G(z)): 0.4322 / 0.3165\n",
"[94/100][222/391] Loss_D: 2.6406 Loss_G: 2.9982 D(x): 0.6177 D(G(z)): 0.3346 / 0.3996\n",
"[94/100][223/391] Loss_D: 2.5706 Loss_G: 3.0624 D(x): 0.6596 D(G(z)): 0.3216 / 0.3887\n",
"[94/100][224/391] Loss_D: 2.8426 Loss_G: 3.5748 D(x): 0.6617 D(G(z)): 0.4210 / 0.3240\n",
"[94/100][225/391] Loss_D: 2.6055 Loss_G: 2.6193 D(x): 0.6939 D(G(z)): 0.3730 / 0.4545\n",
"[94/100][226/391] Loss_D: 2.7018 Loss_G: 2.6638 D(x): 0.6920 D(G(z)): 0.4636 / 0.4441\n",
"[94/100][227/391] Loss_D: 2.5605 Loss_G: 3.2305 D(x): 0.6457 D(G(z)): 0.2691 / 0.3747\n",
"[94/100][228/391] Loss_D: 2.8721 Loss_G: 2.5936 D(x): 0.6894 D(G(z)): 0.4767 / 0.4564\n",
"[94/100][229/391] Loss_D: 3.8948 Loss_G: 3.1039 D(x): 0.5102 D(G(z)): 0.5051 / 0.3928\n",
"[94/100][230/391] Loss_D: 2.2765 Loss_G: 2.4258 D(x): 0.7168 D(G(z)): 0.3583 / 0.4775\n",
"[94/100][231/391] Loss_D: 2.9541 Loss_G: 1.5950 D(x): 0.7472 D(G(z)): 0.5056 / 0.6192\n",
"[94/100][232/391] Loss_D: 2.7080 Loss_G: 2.0607 D(x): 0.7732 D(G(z)): 0.4677 / 0.5230\n",
"[94/100][233/391] Loss_D: 2.5658 Loss_G: 2.6705 D(x): 0.6760 D(G(z)): 0.3900 / 0.4411\n",
"[94/100][234/391] Loss_D: 3.0518 Loss_G: 2.5637 D(x): 0.6233 D(G(z)): 0.4641 / 0.4494\n",
"[94/100][235/391] Loss_D: 2.9169 Loss_G: 2.7131 D(x): 0.6924 D(G(z)): 0.4751 / 0.4272\n",
"[94/100][236/391] Loss_D: 3.5040 Loss_G: 4.2606 D(x): 0.5860 D(G(z)): 0.4849 / 0.2660\n",
"[94/100][237/391] Loss_D: 3.0452 Loss_G: 2.4797 D(x): 0.6549 D(G(z)): 0.4961 / 0.4518\n",
"[94/100][238/391] Loss_D: 2.4058 Loss_G: 3.8025 D(x): 0.7719 D(G(z)): 0.4911 / 0.3141\n",
"[94/100][239/391] Loss_D: 2.8281 Loss_G: 2.9227 D(x): 0.5861 D(G(z)): 0.3105 / 0.4095\n",
"[94/100][240/391] Loss_D: 3.2215 Loss_G: 3.9685 D(x): 0.5745 D(G(z)): 0.4241 / 0.3089\n",
"[94/100][241/391] Loss_D: 3.8306 Loss_G: 2.4344 D(x): 0.6329 D(G(z)): 0.5184 / 0.4807\n",
"[94/100][242/391] Loss_D: 3.1013 Loss_G: 2.2159 D(x): 0.6741 D(G(z)): 0.5236 / 0.5000\n",
"[94/100][243/391] Loss_D: 2.7577 Loss_G: 1.8393 D(x): 0.6362 D(G(z)): 0.3982 / 0.5650\n",
"[94/100][244/391] Loss_D: 3.1038 Loss_G: 3.1013 D(x): 0.6350 D(G(z)): 0.4641 / 0.3967\n",
"[94/100][245/391] Loss_D: 3.2969 Loss_G: 2.9346 D(x): 0.5624 D(G(z)): 0.4739 / 0.4158\n",
"[94/100][246/391] Loss_D: 3.1025 Loss_G: 2.1373 D(x): 0.6709 D(G(z)): 0.4680 / 0.5123\n",
"[94/100][247/391] Loss_D: 2.8321 Loss_G: 2.7305 D(x): 0.6802 D(G(z)): 0.4200 / 0.4313\n",
"[94/100][248/391] Loss_D: 3.6557 Loss_G: 2.8870 D(x): 0.4681 D(G(z)): 0.3762 / 0.3981\n",
"[94/100][249/391] Loss_D: 2.6318 Loss_G: 2.7437 D(x): 0.6594 D(G(z)): 0.3948 / 0.4296\n",
"[94/100][250/391] Loss_D: 2.6006 Loss_G: 3.3298 D(x): 0.7404 D(G(z)): 0.3830 / 0.3584\n",
"[94/100][251/391] Loss_D: 3.0137 Loss_G: 2.7001 D(x): 0.6110 D(G(z)): 0.3755 / 0.4372\n",
"[94/100][252/391] Loss_D: 3.3602 Loss_G: 3.0141 D(x): 0.6556 D(G(z)): 0.5106 / 0.3967\n",
"[94/100][253/391] Loss_D: 2.6109 Loss_G: 3.0915 D(x): 0.7479 D(G(z)): 0.4516 / 0.3899\n",
"[94/100][254/391] Loss_D: 2.2521 Loss_G: 3.8057 D(x): 0.7604 D(G(z)): 0.4092 / 0.3109\n",
"[94/100][255/391] Loss_D: 3.3687 Loss_G: 2.3626 D(x): 0.6196 D(G(z)): 0.4934 / 0.4638\n",
"[94/100][256/391] Loss_D: 2.8153 Loss_G: 3.4781 D(x): 0.6273 D(G(z)): 0.4149 / 0.3412\n",
"[94/100][257/391] Loss_D: 2.9543 Loss_G: 2.6649 D(x): 0.6921 D(G(z)): 0.4272 / 0.4291\n",
"[94/100][258/391] Loss_D: 2.8315 Loss_G: 2.3687 D(x): 0.6320 D(G(z)): 0.3385 / 0.4828\n",
"[94/100][259/391] Loss_D: 3.1697 Loss_G: 2.5737 D(x): 0.5782 D(G(z)): 0.4192 / 0.4506\n",
"[94/100][260/391] Loss_D: 2.3588 Loss_G: 1.8161 D(x): 0.7491 D(G(z)): 0.3674 / 0.5694\n",
"[94/100][261/391] Loss_D: 3.1205 Loss_G: 2.1310 D(x): 0.7249 D(G(z)): 0.4952 / 0.5136\n",
"[94/100][262/391] Loss_D: 2.8757 Loss_G: 2.4193 D(x): 0.6092 D(G(z)): 0.3414 / 0.4561\n",
"[94/100][263/391] Loss_D: 3.2392 Loss_G: 2.5489 D(x): 0.6335 D(G(z)): 0.4803 / 0.4476\n",
"[94/100][264/391] Loss_D: 3.1776 Loss_G: 3.5578 D(x): 0.6615 D(G(z)): 0.5468 / 0.3533\n",
"[94/100][265/391] Loss_D: 3.0564 Loss_G: 3.0905 D(x): 0.6521 D(G(z)): 0.4663 / 0.3852\n",
"[94/100][266/391] Loss_D: 2.8940 Loss_G: 3.0190 D(x): 0.7348 D(G(z)): 0.4517 / 0.4158\n",
"[94/100][267/391] Loss_D: 2.9321 Loss_G: 2.5776 D(x): 0.6262 D(G(z)): 0.4023 / 0.4501\n",
"[94/100][268/391] Loss_D: 2.8176 Loss_G: 2.6257 D(x): 0.6094 D(G(z)): 0.4314 / 0.4511\n",
"[94/100][269/391] Loss_D: 2.8661 Loss_G: 2.6378 D(x): 0.6928 D(G(z)): 0.4343 / 0.4517\n",
"[94/100][270/391] Loss_D: 2.7209 Loss_G: 2.7752 D(x): 0.7005 D(G(z)): 0.3896 / 0.4297\n",
"[94/100][271/391] Loss_D: 3.8142 Loss_G: 2.9682 D(x): 0.6185 D(G(z)): 0.5256 / 0.4043\n",
"[94/100][272/391] Loss_D: 2.9586 Loss_G: 2.9346 D(x): 0.6302 D(G(z)): 0.4088 / 0.4150\n",
"[94/100][273/391] Loss_D: 2.8017 Loss_G: 2.8548 D(x): 0.7419 D(G(z)): 0.4899 / 0.4125\n",
"[94/100][274/391] Loss_D: 2.7707 Loss_G: 2.3025 D(x): 0.6525 D(G(z)): 0.4039 / 0.4894\n",
"[94/100][275/391] Loss_D: 2.5850 Loss_G: 2.9337 D(x): 0.6408 D(G(z)): 0.3050 / 0.4069\n",
"[94/100][276/391] Loss_D: 3.2731 Loss_G: 2.2447 D(x): 0.6079 D(G(z)): 0.4640 / 0.5056\n",
"[94/100][277/391] Loss_D: 2.4373 Loss_G: 3.4451 D(x): 0.7307 D(G(z)): 0.3387 / 0.3487\n",
"[94/100][278/391] Loss_D: 2.0684 Loss_G: 3.3882 D(x): 0.7648 D(G(z)): 0.3304 / 0.3552\n",
"[94/100][279/391] Loss_D: 2.7297 Loss_G: 3.0016 D(x): 0.8147 D(G(z)): 0.5421 / 0.4038\n",
"[94/100][280/391] Loss_D: 2.5211 Loss_G: 2.6375 D(x): 0.6702 D(G(z)): 0.3031 / 0.4512\n",
"[94/100][281/391] Loss_D: 3.4001 Loss_G: 2.0880 D(x): 0.6768 D(G(z)): 0.5485 / 0.5262\n",
"[94/100][282/391] Loss_D: 2.4476 Loss_G: 3.4108 D(x): 0.7139 D(G(z)): 0.3717 / 0.3726\n",
"[94/100][283/391] Loss_D: 3.1105 Loss_G: 3.4496 D(x): 0.5578 D(G(z)): 0.3651 / 0.3528\n",
"[94/100][284/391] Loss_D: 2.5473 Loss_G: 2.7574 D(x): 0.7510 D(G(z)): 0.4838 / 0.4281\n",
"[94/100][285/391] Loss_D: 2.5282 Loss_G: 3.4910 D(x): 0.7188 D(G(z)): 0.3597 / 0.3467\n",
"[94/100][286/391] Loss_D: 2.6902 Loss_G: 3.3758 D(x): 0.6907 D(G(z)): 0.4248 / 0.3556\n",
"[94/100][287/391] Loss_D: 2.7458 Loss_G: 2.7399 D(x): 0.6496 D(G(z)): 0.2779 / 0.4311\n",
"[94/100][288/391] Loss_D: 2.6650 Loss_G: 3.3689 D(x): 0.6945 D(G(z)): 0.4670 / 0.3731\n",
"[94/100][289/391] Loss_D: 2.8652 Loss_G: 2.5945 D(x): 0.6247 D(G(z)): 0.3197 / 0.4580\n",
"[94/100][290/391] Loss_D: 2.6539 Loss_G: 2.0537 D(x): 0.7194 D(G(z)): 0.4391 / 0.5442\n",
"[94/100][291/391] Loss_D: 3.0519 Loss_G: 2.2022 D(x): 0.6033 D(G(z)): 0.3771 / 0.5069\n",
"[94/100][292/391] Loss_D: 2.9025 Loss_G: 2.6184 D(x): 0.6712 D(G(z)): 0.4345 / 0.4581\n",
"[94/100][293/391] Loss_D: 2.9583 Loss_G: 3.7179 D(x): 0.7084 D(G(z)): 0.4746 / 0.3067\n",
"[94/100][294/391] Loss_D: 2.6365 Loss_G: 2.5843 D(x): 0.6666 D(G(z)): 0.4329 / 0.4565\n",
"[94/100][295/391] Loss_D: 2.5946 Loss_G: 2.4886 D(x): 0.7055 D(G(z)): 0.3892 / 0.4691\n",
"[94/100][296/391] Loss_D: 2.9852 Loss_G: 2.7177 D(x): 0.6074 D(G(z)): 0.3745 / 0.4329\n",
"[94/100][297/391] Loss_D: 2.8762 Loss_G: 2.6571 D(x): 0.6497 D(G(z)): 0.3879 / 0.4462\n",
"[94/100][298/391] Loss_D: 2.9914 Loss_G: 2.6261 D(x): 0.6937 D(G(z)): 0.5014 / 0.4370\n",
"[94/100][299/391] Loss_D: 2.9013 Loss_G: 2.5615 D(x): 0.7529 D(G(z)): 0.4990 / 0.4593\n",
"[94/100][300/391] Loss_D: 3.3826 Loss_G: 2.6501 D(x): 0.6666 D(G(z)): 0.5329 / 0.4392\n",
"[94/100][301/391] Loss_D: 3.6368 Loss_G: 2.5871 D(x): 0.6000 D(G(z)): 0.3819 / 0.4479\n",
"[94/100][302/391] Loss_D: 2.9292 Loss_G: 2.6668 D(x): 0.6050 D(G(z)): 0.4097 / 0.4572\n",
"[94/100][303/391] Loss_D: 2.7355 Loss_G: 3.2229 D(x): 0.7464 D(G(z)): 0.4720 / 0.3748\n",
"[94/100][304/391] Loss_D: 2.2404 Loss_G: 3.4610 D(x): 0.7184 D(G(z)): 0.3803 / 0.3475\n",
"[94/100][305/391] Loss_D: 3.7116 Loss_G: 2.9688 D(x): 0.6312 D(G(z)): 0.5986 / 0.3993\n",
"[94/100][306/391] Loss_D: 2.3939 Loss_G: 3.5879 D(x): 0.7105 D(G(z)): 0.3008 / 0.3354\n",
"[94/100][307/391] Loss_D: 2.9223 Loss_G: 3.2870 D(x): 0.7173 D(G(z)): 0.4749 / 0.3661\n",
"[94/100][308/391] Loss_D: 3.0333 Loss_G: 3.0635 D(x): 0.5664 D(G(z)): 0.3356 / 0.3972\n",
"[94/100][309/391] Loss_D: 2.9592 Loss_G: 3.5320 D(x): 0.5927 D(G(z)): 0.3909 / 0.3392\n",
"[94/100][310/391] Loss_D: 3.2610 Loss_G: 2.7162 D(x): 0.6103 D(G(z)): 0.4662 / 0.4367\n",
"[94/100][311/391] Loss_D: 2.9677 Loss_G: 2.3031 D(x): 0.6547 D(G(z)): 0.3920 / 0.5037\n",
"[94/100][312/391] Loss_D: 2.3751 Loss_G: 2.6191 D(x): 0.7505 D(G(z)): 0.3736 / 0.4723\n",
"[94/100][313/391] Loss_D: 2.8715 Loss_G: 2.1954 D(x): 0.6516 D(G(z)): 0.4444 / 0.5147\n",
"[94/100][314/391] Loss_D: 2.5465 Loss_G: 2.2467 D(x): 0.6520 D(G(z)): 0.4058 / 0.4959\n",
"[94/100][315/391] Loss_D: 2.5653 Loss_G: 2.5433 D(x): 0.7174 D(G(z)): 0.4010 / 0.4447\n",
"[94/100][316/391] Loss_D: 2.7971 Loss_G: 3.1305 D(x): 0.6778 D(G(z)): 0.4142 / 0.3810\n",
"[94/100][317/391] Loss_D: 2.7230 Loss_G: 2.1561 D(x): 0.7318 D(G(z)): 0.4284 / 0.5021\n",
"[94/100][318/391] Loss_D: 2.3415 Loss_G: 2.3230 D(x): 0.7112 D(G(z)): 0.3667 / 0.4912\n",
"[94/100][319/391] Loss_D: 2.8262 Loss_G: 2.8883 D(x): 0.6729 D(G(z)): 0.4553 / 0.4325\n",
"[94/100][320/391] Loss_D: 2.9922 Loss_G: 2.6789 D(x): 0.6602 D(G(z)): 0.4556 / 0.4531\n",
"[94/100][321/391] Loss_D: 3.3344 Loss_G: 3.5580 D(x): 0.5851 D(G(z)): 0.4426 / 0.3380\n",
"[94/100][322/391] Loss_D: 3.1174 Loss_G: 2.8029 D(x): 0.6788 D(G(z)): 0.5205 / 0.4083\n",
"[94/100][323/391] Loss_D: 2.3429 Loss_G: 1.9441 D(x): 0.7713 D(G(z)): 0.3380 / 0.5448\n",
"[94/100][324/391] Loss_D: 2.8424 Loss_G: 3.0453 D(x): 0.6508 D(G(z)): 0.4312 / 0.3866\n",
"[94/100][325/391] Loss_D: 3.1141 Loss_G: 2.8100 D(x): 0.6497 D(G(z)): 0.4256 / 0.4200\n",
"[94/100][326/391] Loss_D: 2.9635 Loss_G: 3.6069 D(x): 0.6371 D(G(z)): 0.4472 / 0.3224\n",
"[94/100][327/391] Loss_D: 2.8746 Loss_G: 2.3763 D(x): 0.6422 D(G(z)): 0.4307 / 0.4793\n",
"[94/100][328/391] Loss_D: 2.3445 Loss_G: 3.1030 D(x): 0.7075 D(G(z)): 0.3045 / 0.3847\n",
"[94/100][329/391] Loss_D: 2.9880 Loss_G: 1.8152 D(x): 0.6980 D(G(z)): 0.4760 / 0.5856\n",
"[94/100][330/391] Loss_D: 2.8875 Loss_G: 2.4468 D(x): 0.6428 D(G(z)): 0.4062 / 0.5050\n",
"[94/100][331/391] Loss_D: 3.5130 Loss_G: 2.4720 D(x): 0.6472 D(G(z)): 0.3779 / 0.4692\n",
"[94/100][332/391] Loss_D: 2.3470 Loss_G: 2.4145 D(x): 0.7108 D(G(z)): 0.3467 / 0.4736\n",
"[94/100][333/391] Loss_D: 3.0011 Loss_G: 2.6581 D(x): 0.7491 D(G(z)): 0.4936 / 0.4469\n",
"[94/100][334/391] Loss_D: 2.6137 Loss_G: 2.7017 D(x): 0.7416 D(G(z)): 0.4749 / 0.4250\n",
"[94/100][335/391] Loss_D: 2.7778 Loss_G: 4.3146 D(x): 0.7120 D(G(z)): 0.4591 / 0.2699\n",
"[94/100][336/391] Loss_D: 3.3628 Loss_G: 3.7359 D(x): 0.5976 D(G(z)): 0.4330 / 0.3243\n",
"[94/100][337/391] Loss_D: 2.7654 Loss_G: 3.2659 D(x): 0.6505 D(G(z)): 0.2741 / 0.3563\n",
"[94/100][338/391] Loss_D: 2.9153 Loss_G: 2.4756 D(x): 0.6226 D(G(z)): 0.3810 / 0.4577\n",
"[94/100][339/391] Loss_D: 3.1325 Loss_G: 2.6347 D(x): 0.6379 D(G(z)): 0.4827 / 0.4518\n",
"[94/100][340/391] Loss_D: 2.7242 Loss_G: 3.6195 D(x): 0.6771 D(G(z)): 0.4210 / 0.3391\n",
"[94/100][341/391] Loss_D: 2.6972 Loss_G: 3.1653 D(x): 0.7531 D(G(z)): 0.4521 / 0.3934\n",
"[94/100][342/391] Loss_D: 2.8314 Loss_G: 2.9312 D(x): 0.6446 D(G(z)): 0.3941 / 0.3992\n",
"[94/100][343/391] Loss_D: 2.7412 Loss_G: 2.8702 D(x): 0.6594 D(G(z)): 0.3893 / 0.4138\n",
"[94/100][344/391] Loss_D: 2.7420 Loss_G: 2.7557 D(x): 0.6516 D(G(z)): 0.4352 / 0.4204\n",
"[94/100][345/391] Loss_D: 2.7251 Loss_G: 2.6508 D(x): 0.7154 D(G(z)): 0.4382 / 0.4300\n",
"[94/100][346/391] Loss_D: 3.2497 Loss_G: 3.0687 D(x): 0.7440 D(G(z)): 0.5628 / 0.3942\n",
"[94/100][347/391] Loss_D: 3.2836 Loss_G: 3.2638 D(x): 0.6228 D(G(z)): 0.4751 / 0.3399\n",
"[94/100][348/391] Loss_D: 2.9531 Loss_G: 3.1775 D(x): 0.6221 D(G(z)): 0.4344 / 0.3873\n",
"[94/100][349/391] Loss_D: 3.0496 Loss_G: 4.0234 D(x): 0.5743 D(G(z)): 0.3458 / 0.2909\n",
"[94/100][350/391] Loss_D: 2.7407 Loss_G: 2.5942 D(x): 0.6159 D(G(z)): 0.3470 / 0.4677\n",
"[94/100][351/391] Loss_D: 2.7135 Loss_G: 2.9476 D(x): 0.7195 D(G(z)): 0.4304 / 0.4105\n",
"[94/100][352/391] Loss_D: 2.6339 Loss_G: 2.5201 D(x): 0.6636 D(G(z)): 0.3997 / 0.4485\n",
"[94/100][353/391] Loss_D: 2.6596 Loss_G: 3.5026 D(x): 0.7570 D(G(z)): 0.4417 / 0.3452\n",
"[94/100][354/391] Loss_D: 2.8646 Loss_G: 3.1572 D(x): 0.6516 D(G(z)): 0.4689 / 0.3941\n",
"[94/100][355/391] Loss_D: 2.6614 Loss_G: 2.6802 D(x): 0.6428 D(G(z)): 0.3821 / 0.4357\n",
"[94/100][356/391] Loss_D: 2.8896 Loss_G: 2.0655 D(x): 0.5676 D(G(z)): 0.3633 / 0.5338\n",
"[94/100][357/391] Loss_D: 2.9123 Loss_G: 2.7055 D(x): 0.6850 D(G(z)): 0.4589 / 0.4197\n",
"[94/100][358/391] Loss_D: 2.9369 Loss_G: 2.2514 D(x): 0.6382 D(G(z)): 0.4705 / 0.4991\n",
"[94/100][359/391] Loss_D: 2.4276 Loss_G: 2.9192 D(x): 0.7464 D(G(z)): 0.4138 / 0.4007\n",
"[94/100][360/391] Loss_D: 2.8604 Loss_G: 2.9440 D(x): 0.6396 D(G(z)): 0.4099 / 0.4162\n",
"[94/100][361/391] Loss_D: 3.4497 Loss_G: 2.9190 D(x): 0.6430 D(G(z)): 0.3728 / 0.4091\n",
"[94/100][362/391] Loss_D: 2.6182 Loss_G: 2.3421 D(x): 0.7289 D(G(z)): 0.4204 / 0.4825\n",
"[94/100][363/391] Loss_D: 2.7704 Loss_G: 1.9602 D(x): 0.6874 D(G(z)): 0.4405 / 0.5434\n",
"[94/100][364/391] Loss_D: 3.3195 Loss_G: 2.6664 D(x): 0.6494 D(G(z)): 0.5240 / 0.4395\n",
"[94/100][365/391] Loss_D: 2.7974 Loss_G: 4.7304 D(x): 0.6776 D(G(z)): 0.4129 / 0.2330\n",
"[94/100][366/391] Loss_D: 2.5724 Loss_G: 3.2900 D(x): 0.6085 D(G(z)): 0.3061 / 0.3717\n",
"[94/100][367/391] Loss_D: 2.5301 Loss_G: 2.7304 D(x): 0.7694 D(G(z)): 0.3800 / 0.4272\n",
"[94/100][368/391] Loss_D: 2.6893 Loss_G: 2.2295 D(x): 0.6745 D(G(z)): 0.3930 / 0.5001\n",
"[94/100][369/391] Loss_D: 2.2695 Loss_G: 2.6142 D(x): 0.7185 D(G(z)): 0.3365 / 0.4515\n",
"[94/100][370/391] Loss_D: 2.9638 Loss_G: 1.6773 D(x): 0.6883 D(G(z)): 0.4938 / 0.5774\n",
"[94/100][371/391] Loss_D: 3.2594 Loss_G: 2.7821 D(x): 0.6527 D(G(z)): 0.4902 / 0.4273\n",
"[94/100][372/391] Loss_D: 3.1860 Loss_G: 2.7133 D(x): 0.5632 D(G(z)): 0.4273 / 0.4369\n",
"[94/100][373/391] Loss_D: 3.2444 Loss_G: 2.6594 D(x): 0.6084 D(G(z)): 0.4558 / 0.4245\n",
"[94/100][374/391] Loss_D: 2.2109 Loss_G: 2.9306 D(x): 0.7677 D(G(z)): 0.3565 / 0.4106\n",
"[94/100][375/391] Loss_D: 2.7673 Loss_G: 3.8774 D(x): 0.6757 D(G(z)): 0.3600 / 0.3044\n",
"[94/100][376/391] Loss_D: 3.2510 Loss_G: 3.1025 D(x): 0.6727 D(G(z)): 0.5211 / 0.3906\n",
"[94/100][377/391] Loss_D: 3.2320 Loss_G: 2.3716 D(x): 0.6425 D(G(z)): 0.4829 / 0.4631\n",
"[94/100][378/391] Loss_D: 2.2555 Loss_G: 2.5378 D(x): 0.7164 D(G(z)): 0.3516 / 0.4715\n",
"[94/100][379/391] Loss_D: 3.1004 Loss_G: 2.9880 D(x): 0.5629 D(G(z)): 0.4221 / 0.4002\n",
"[94/100][380/391] Loss_D: 2.7290 Loss_G: 3.1910 D(x): 0.7036 D(G(z)): 0.4350 / 0.3837\n",
"[94/100][381/391] Loss_D: 2.4885 Loss_G: 3.0060 D(x): 0.8065 D(G(z)): 0.4553 / 0.4094\n",
"[94/100][382/391] Loss_D: 2.9369 Loss_G: 2.6590 D(x): 0.7173 D(G(z)): 0.5116 / 0.4700\n",
"[94/100][383/391] Loss_D: 2.6171 Loss_G: 3.0974 D(x): 0.6766 D(G(z)): 0.3612 / 0.3862\n",
"[94/100][384/391] Loss_D: 2.4092 Loss_G: 2.8238 D(x): 0.6609 D(G(z)): 0.3651 / 0.4278\n",
"[94/100][385/391] Loss_D: 2.5036 Loss_G: 2.8496 D(x): 0.6547 D(G(z)): 0.3322 / 0.4078\n",
"[94/100][386/391] Loss_D: 3.5799 Loss_G: 2.6734 D(x): 0.6677 D(G(z)): 0.5531 / 0.4394\n",
"[94/100][387/391] Loss_D: 3.1201 Loss_G: 2.7423 D(x): 0.6708 D(G(z)): 0.4964 / 0.4430\n",
"[94/100][388/391] Loss_D: 2.7917 Loss_G: 2.6678 D(x): 0.6250 D(G(z)): 0.3520 / 0.4628\n",
"[94/100][389/391] Loss_D: 2.9513 Loss_G: 2.7557 D(x): 0.6042 D(G(z)): 0.3810 / 0.4470\n",
"[94/100][390/391] Loss_D: 2.7524 Loss_G: 2.6905 D(x): 0.7033 D(G(z)): 0.4600 / 0.4520\n",
"[94/100][391/391] Loss_D: 3.6457 Loss_G: 2.9977 D(x): 0.6446 D(G(z)): 0.3955 / 0.3832\n",
"[95/100][1/391] Loss_D: 3.4635 Loss_G: 3.3887 D(x): 0.6727 D(G(z)): 0.4406 / 0.3565\n",
"[95/100][2/391] Loss_D: 2.7511 Loss_G: 2.5762 D(x): 0.6476 D(G(z)): 0.4106 / 0.4559\n",
"[95/100][3/391] Loss_D: 2.4480 Loss_G: 2.6591 D(x): 0.7200 D(G(z)): 0.3496 / 0.4435\n",
"[95/100][4/391] Loss_D: 2.5494 Loss_G: 3.0519 D(x): 0.6753 D(G(z)): 0.4121 / 0.4119\n",
"[95/100][5/391] Loss_D: 2.7651 Loss_G: 2.6680 D(x): 0.6599 D(G(z)): 0.4308 / 0.4402\n",
"[95/100][6/391] Loss_D: 3.3813 Loss_G: 3.2788 D(x): 0.5680 D(G(z)): 0.4485 / 0.3476\n",
"[95/100][7/391] Loss_D: 2.8880 Loss_G: 2.9078 D(x): 0.7089 D(G(z)): 0.4008 / 0.4081\n",
"[95/100][8/391] Loss_D: 2.0584 Loss_G: 2.4101 D(x): 0.7728 D(G(z)): 0.3797 / 0.4841\n",
"[95/100][9/391] Loss_D: 2.3753 Loss_G: 3.1920 D(x): 0.7681 D(G(z)): 0.4098 / 0.3792\n",
"[95/100][10/391] Loss_D: 3.4095 Loss_G: 2.4563 D(x): 0.6085 D(G(z)): 0.5240 / 0.4760\n",
"[95/100][11/391] Loss_D: 3.2866 Loss_G: 3.1951 D(x): 0.6344 D(G(z)): 0.5378 / 0.3852\n",
"[95/100][12/391] Loss_D: 3.2903 Loss_G: 3.9162 D(x): 0.6395 D(G(z)): 0.4957 / 0.3010\n",
"[95/100][13/391] Loss_D: 3.0165 Loss_G: 2.4338 D(x): 0.5772 D(G(z)): 0.3913 / 0.4740\n",
"[95/100][14/391] Loss_D: 2.7476 Loss_G: 2.4558 D(x): 0.5836 D(G(z)): 0.3266 / 0.4556\n",
"[95/100][15/391] Loss_D: 2.9335 Loss_G: 3.5037 D(x): 0.6797 D(G(z)): 0.5119 / 0.3368\n",
"[95/100][16/391] Loss_D: 3.1036 Loss_G: 3.1682 D(x): 0.5943 D(G(z)): 0.3613 / 0.3685\n",
"[95/100][17/391] Loss_D: 2.9571 Loss_G: 2.3826 D(x): 0.6561 D(G(z)): 0.4694 / 0.4701\n",
"[95/100][18/391] Loss_D: 2.0949 Loss_G: 2.9867 D(x): 0.7622 D(G(z)): 0.3321 / 0.3975\n",
"[95/100][19/391] Loss_D: 2.8480 Loss_G: 3.1352 D(x): 0.6953 D(G(z)): 0.5054 / 0.3872\n",
"[95/100][20/391] Loss_D: 2.7189 Loss_G: 3.6246 D(x): 0.6508 D(G(z)): 0.3640 / 0.3303\n",
"[95/100][21/391] Loss_D: 2.8869 Loss_G: 2.3030 D(x): 0.6373 D(G(z)): 0.4736 / 0.5026\n",
"[95/100][22/391] Loss_D: 2.7961 Loss_G: 3.1478 D(x): 0.6669 D(G(z)): 0.4201 / 0.3922\n",
"[95/100][23/391] Loss_D: 2.5650 Loss_G: 1.5607 D(x): 0.6801 D(G(z)): 0.4081 / 0.6168\n",
"[95/100][24/391] Loss_D: 2.5146 Loss_G: 2.1550 D(x): 0.7440 D(G(z)): 0.4203 / 0.5174\n",
"[95/100][25/391] Loss_D: 2.5399 Loss_G: 2.2327 D(x): 0.7242 D(G(z)): 0.3471 / 0.5122\n",
"[95/100][26/391] Loss_D: 2.5260 Loss_G: 3.3648 D(x): 0.7732 D(G(z)): 0.3836 / 0.3614\n",
"[95/100][27/391] Loss_D: 2.9646 Loss_G: 2.9253 D(x): 0.6593 D(G(z)): 0.4002 / 0.4175\n",
"[95/100][28/391] Loss_D: 2.9606 Loss_G: 2.6476 D(x): 0.6093 D(G(z)): 0.4135 / 0.4399\n",
"[95/100][29/391] Loss_D: 2.8873 Loss_G: 2.1840 D(x): 0.7512 D(G(z)): 0.5345 / 0.5173\n",
"[95/100][30/391] Loss_D: 2.5187 Loss_G: 3.1373 D(x): 0.6811 D(G(z)): 0.4032 / 0.3822\n",
"[95/100][31/391] Loss_D: 3.7426 Loss_G: 3.1547 D(x): 0.7304 D(G(z)): 0.3396 / 0.4077\n",
"[95/100][32/391] Loss_D: 3.4254 Loss_G: 2.7233 D(x): 0.6716 D(G(z)): 0.5563 / 0.4435\n",
"[95/100][33/391] Loss_D: 3.5238 Loss_G: 2.9126 D(x): 0.5370 D(G(z)): 0.4313 / 0.4452\n",
"[95/100][34/391] Loss_D: 3.0064 Loss_G: 2.1407 D(x): 0.6411 D(G(z)): 0.4756 / 0.5157\n",
"[95/100][35/391] Loss_D: 2.5319 Loss_G: 3.5955 D(x): 0.6336 D(G(z)): 0.2995 / 0.3262\n",
"[95/100][36/391] Loss_D: 2.5077 Loss_G: 3.0059 D(x): 0.7194 D(G(z)): 0.3397 / 0.4110\n",
"[95/100][37/391] Loss_D: 3.0283 Loss_G: 2.9668 D(x): 0.6365 D(G(z)): 0.4527 / 0.4072\n",
"[95/100][38/391] Loss_D: 2.5939 Loss_G: 3.3400 D(x): 0.7181 D(G(z)): 0.4550 / 0.3732\n",
"[95/100][39/391] Loss_D: 2.9140 Loss_G: 2.9809 D(x): 0.7208 D(G(z)): 0.4724 / 0.4057\n",
"[95/100][40/391] Loss_D: 2.8054 Loss_G: 2.5950 D(x): 0.7339 D(G(z)): 0.4300 / 0.4702\n",
"[95/100][41/391] Loss_D: 3.6178 Loss_G: 2.5380 D(x): 0.5493 D(G(z)): 0.4746 / 0.4649\n",
"[95/100][42/391] Loss_D: 2.3823 Loss_G: 2.4862 D(x): 0.7432 D(G(z)): 0.3560 / 0.4739\n",
"[95/100][43/391] Loss_D: 2.7613 Loss_G: 3.3265 D(x): 0.6776 D(G(z)): 0.3779 / 0.3532\n",
"[95/100][44/391] Loss_D: 2.9990 Loss_G: 1.9037 D(x): 0.6127 D(G(z)): 0.4608 / 0.5532\n",
"[95/100][45/391] Loss_D: 2.3853 Loss_G: 2.1509 D(x): 0.7155 D(G(z)): 0.3098 / 0.5033\n",
"[95/100][46/391] Loss_D: 2.5129 Loss_G: 2.5679 D(x): 0.6732 D(G(z)): 0.2940 / 0.4493\n",
"[95/100][47/391] Loss_D: 3.1931 Loss_G: 2.4881 D(x): 0.6493 D(G(z)): 0.5399 / 0.4668\n",
"[95/100][48/391] Loss_D: 3.2175 Loss_G: 2.5806 D(x): 0.6260 D(G(z)): 0.5290 / 0.4591\n",
"[95/100][49/391] Loss_D: 2.8521 Loss_G: 3.0137 D(x): 0.6824 D(G(z)): 0.4724 / 0.3916\n",
"[95/100][50/391] Loss_D: 3.2725 Loss_G: 3.4148 D(x): 0.6110 D(G(z)): 0.4913 / 0.3520\n",
"[95/100][51/391] Loss_D: 2.8104 Loss_G: 4.0553 D(x): 0.6501 D(G(z)): 0.3861 / 0.2941\n",
"[95/100][52/391] Loss_D: 2.3105 Loss_G: 2.2478 D(x): 0.6808 D(G(z)): 0.2981 / 0.5122\n",
"[95/100][53/391] Loss_D: 2.5585 Loss_G: 3.3400 D(x): 0.8083 D(G(z)): 0.4559 / 0.3551\n",
"[95/100][54/391] Loss_D: 2.6776 Loss_G: 3.4990 D(x): 0.6698 D(G(z)): 0.3811 / 0.3394\n",
"[95/100][55/391] Loss_D: 2.7158 Loss_G: 2.3943 D(x): 0.7219 D(G(z)): 0.4631 / 0.4674\n",
"[95/100][56/391] Loss_D: 2.4881 Loss_G: 2.6432 D(x): 0.6662 D(G(z)): 0.2491 / 0.4210\n",
"[95/100][57/391] Loss_D: 2.9309 Loss_G: 2.7092 D(x): 0.6625 D(G(z)): 0.4415 / 0.4504\n",
"[95/100][58/391] Loss_D: 3.0980 Loss_G: 3.3482 D(x): 0.6338 D(G(z)): 0.4610 / 0.3627\n",
"[95/100][59/391] Loss_D: 3.2117 Loss_G: 2.4051 D(x): 0.6451 D(G(z)): 0.4828 / 0.4835\n",
"[95/100][60/391] Loss_D: 3.0426 Loss_G: 2.8201 D(x): 0.6467 D(G(z)): 0.4486 / 0.4247\n",
"[95/100][61/391] Loss_D: 3.6422 Loss_G: 2.6762 D(x): 0.6668 D(G(z)): 0.3433 / 0.4438\n",
"[95/100][62/391] Loss_D: 2.7337 Loss_G: 3.5412 D(x): 0.6947 D(G(z)): 0.4559 / 0.3368\n",
"[95/100][63/391] Loss_D: 2.9930 Loss_G: 3.4292 D(x): 0.5980 D(G(z)): 0.3646 / 0.3602\n",
"[95/100][64/391] Loss_D: 2.6039 Loss_G: 3.5762 D(x): 0.7087 D(G(z)): 0.4723 / 0.3386\n",
"[95/100][65/391] Loss_D: 2.6019 Loss_G: 2.5268 D(x): 0.7204 D(G(z)): 0.4122 / 0.4619\n",
"[95/100][66/391] Loss_D: 2.4369 Loss_G: 1.9893 D(x): 0.7336 D(G(z)): 0.3889 / 0.5356\n",
"[95/100][67/391] Loss_D: 3.0571 Loss_G: 3.5544 D(x): 0.6296 D(G(z)): 0.3961 / 0.3324\n",
"[95/100][68/391] Loss_D: 2.2774 Loss_G: 2.9286 D(x): 0.7027 D(G(z)): 0.3383 / 0.4157\n",
"[95/100][69/391] Loss_D: 2.8633 Loss_G: 2.4414 D(x): 0.7047 D(G(z)): 0.4744 / 0.4809\n",
"[95/100][70/391] Loss_D: 2.6361 Loss_G: 2.0434 D(x): 0.7063 D(G(z)): 0.3750 / 0.5446\n",
"[95/100][71/391] Loss_D: 2.7805 Loss_G: 2.6172 D(x): 0.6963 D(G(z)): 0.4429 / 0.4575\n",
"[95/100][72/391] Loss_D: 3.0479 Loss_G: 3.0797 D(x): 0.6339 D(G(z)): 0.4737 / 0.3844\n",
"[95/100][73/391] Loss_D: 3.3297 Loss_G: 2.9186 D(x): 0.5435 D(G(z)): 0.4444 / 0.4084\n",
"[95/100][74/391] Loss_D: 3.1065 Loss_G: 2.4832 D(x): 0.6654 D(G(z)): 0.5071 / 0.4639\n",
"[95/100][75/391] Loss_D: 2.9246 Loss_G: 3.3147 D(x): 0.6079 D(G(z)): 0.4150 / 0.3591\n",
"[95/100][76/391] Loss_D: 2.4827 Loss_G: 2.5457 D(x): 0.7179 D(G(z)): 0.3581 / 0.4433\n",
"[95/100][77/391] Loss_D: 3.1074 Loss_G: 2.4551 D(x): 0.6266 D(G(z)): 0.4039 / 0.4553\n",
"[95/100][78/391] Loss_D: 3.1088 Loss_G: 3.8776 D(x): 0.6284 D(G(z)): 0.4873 / 0.2982\n",
"[95/100][79/391] Loss_D: 2.9722 Loss_G: 2.3188 D(x): 0.6554 D(G(z)): 0.5149 / 0.4886\n",
"[95/100][80/391] Loss_D: 2.8610 Loss_G: 2.0662 D(x): 0.7534 D(G(z)): 0.4638 / 0.5262\n",
"[95/100][81/391] Loss_D: 2.5247 Loss_G: 3.5662 D(x): 0.6889 D(G(z)): 0.3721 / 0.3364\n",
"[95/100][82/391] Loss_D: 3.1801 Loss_G: 3.7877 D(x): 0.7536 D(G(z)): 0.5456 / 0.3187\n",
"[95/100][83/391] Loss_D: 2.7619 Loss_G: 2.8053 D(x): 0.6564 D(G(z)): 0.3811 / 0.4150\n",
"[95/100][84/391] Loss_D: 2.6485 Loss_G: 3.4287 D(x): 0.6057 D(G(z)): 0.3466 / 0.3568\n",
"[95/100][85/391] Loss_D: 2.9220 Loss_G: 3.3436 D(x): 0.6132 D(G(z)): 0.3929 / 0.3619\n",
"[95/100][86/391] Loss_D: 2.8956 Loss_G: 2.3461 D(x): 0.6428 D(G(z)): 0.4064 / 0.4632\n",
"[95/100][87/391] Loss_D: 3.0120 Loss_G: 3.7128 D(x): 0.6836 D(G(z)): 0.4296 / 0.3143\n",
"[95/100][88/391] Loss_D: 2.8068 Loss_G: 2.2864 D(x): 0.5973 D(G(z)): 0.3427 / 0.4967\n",
"[95/100][89/391] Loss_D: 3.4709 Loss_G: 2.7644 D(x): 0.6306 D(G(z)): 0.5547 / 0.4383\n",
"[95/100][90/391] Loss_D: 2.5645 Loss_G: 2.5097 D(x): 0.7226 D(G(z)): 0.3974 / 0.4592\n",
"[95/100][91/391] Loss_D: 3.7215 Loss_G: 2.5673 D(x): 0.7066 D(G(z)): 0.5542 / 0.4602\n",
"[95/100][92/391] Loss_D: 3.1720 Loss_G: 1.8851 D(x): 0.4992 D(G(z)): 0.3150 / 0.5471\n",
"[95/100][93/391] Loss_D: 2.8541 Loss_G: 4.3071 D(x): 0.7079 D(G(z)): 0.4693 / 0.2583\n",
"[95/100][94/391] Loss_D: 2.6357 Loss_G: 2.3853 D(x): 0.6799 D(G(z)): 0.4139 / 0.4699\n",
"[95/100][95/391] Loss_D: 2.7220 Loss_G: 2.8941 D(x): 0.6960 D(G(z)): 0.4085 / 0.4084\n",
"[95/100][96/391] Loss_D: 2.6802 Loss_G: 3.4145 D(x): 0.7178 D(G(z)): 0.4335 / 0.3369\n",
"[95/100][97/391] Loss_D: 2.7561 Loss_G: 2.0615 D(x): 0.7423 D(G(z)): 0.4149 / 0.5247\n",
"[95/100][98/391] Loss_D: 3.2744 Loss_G: 2.6602 D(x): 0.5598 D(G(z)): 0.4071 / 0.4541\n",
"[95/100][99/391] Loss_D: 3.4573 Loss_G: 3.3870 D(x): 0.5713 D(G(z)): 0.4873 / 0.3660\n",
"[95/100][100/391] Loss_D: 3.1338 Loss_G: 2.2825 D(x): 0.6695 D(G(z)): 0.5184 / 0.5176\n",
"[95/100][101/391] Loss_D: 3.2012 Loss_G: 2.2191 D(x): 0.6558 D(G(z)): 0.5237 / 0.5062\n",
"[95/100][102/391] Loss_D: 3.3746 Loss_G: 3.0351 D(x): 0.6635 D(G(z)): 0.5265 / 0.3942\n",
"[95/100][103/391] Loss_D: 2.4832 Loss_G: 4.1126 D(x): 0.7507 D(G(z)): 0.4064 / 0.2890\n",
"[95/100][104/391] Loss_D: 2.5144 Loss_G: 3.9588 D(x): 0.7187 D(G(z)): 0.3970 / 0.3008\n",
"[95/100][105/391] Loss_D: 2.9915 Loss_G: 2.6170 D(x): 0.5779 D(G(z)): 0.2571 / 0.4461\n",
"[95/100][106/391] Loss_D: 2.9515 Loss_G: 2.2701 D(x): 0.6326 D(G(z)): 0.4702 / 0.5024\n",
"[95/100][107/391] Loss_D: 3.1664 Loss_G: 2.3908 D(x): 0.6420 D(G(z)): 0.4615 / 0.4694\n",
"[95/100][108/391] Loss_D: 3.1497 Loss_G: 3.0644 D(x): 0.6644 D(G(z)): 0.4993 / 0.3971\n",
"[95/100][109/391] Loss_D: 3.1007 Loss_G: 3.0887 D(x): 0.6006 D(G(z)): 0.4758 / 0.3850\n",
"[95/100][110/391] Loss_D: 3.5175 Loss_G: 3.0853 D(x): 0.6195 D(G(z)): 0.5530 / 0.4051\n",
"[95/100][111/391] Loss_D: 2.5121 Loss_G: 3.9931 D(x): 0.6822 D(G(z)): 0.3637 / 0.2996\n",
"[95/100][112/391] Loss_D: 2.6607 Loss_G: 1.9857 D(x): 0.6936 D(G(z)): 0.4252 / 0.5720\n",
"[95/100][113/391] Loss_D: 3.2004 Loss_G: 2.3780 D(x): 0.6247 D(G(z)): 0.4614 / 0.4768\n",
"[95/100][114/391] Loss_D: 2.2932 Loss_G: 3.5019 D(x): 0.7012 D(G(z)): 0.3624 / 0.3274\n",
"[95/100][115/391] Loss_D: 2.8958 Loss_G: 4.1124 D(x): 0.5883 D(G(z)): 0.3452 / 0.2773\n",
"[95/100][116/391] Loss_D: 2.7609 Loss_G: 2.2116 D(x): 0.6543 D(G(z)): 0.4090 / 0.4937\n",
"[95/100][117/391] Loss_D: 2.8060 Loss_G: 3.3180 D(x): 0.6776 D(G(z)): 0.3863 / 0.3692\n",
"[95/100][118/391] Loss_D: 2.1512 Loss_G: 2.9471 D(x): 0.7633 D(G(z)): 0.3110 / 0.4009\n",
"[95/100][119/391] Loss_D: 3.0376 Loss_G: 2.7586 D(x): 0.6803 D(G(z)): 0.4852 / 0.4325\n",
"[95/100][120/391] Loss_D: 2.6196 Loss_G: 2.7368 D(x): 0.7666 D(G(z)): 0.4839 / 0.4397\n",
"[95/100][121/391] Loss_D: 3.6203 Loss_G: 2.8187 D(x): 0.5733 D(G(z)): 0.3871 / 0.4266\n",
"[95/100][122/391] Loss_D: 2.9484 Loss_G: 3.2545 D(x): 0.7157 D(G(z)): 0.4752 / 0.3751\n",
"[95/100][123/391] Loss_D: 3.7168 Loss_G: 3.5330 D(x): 0.6084 D(G(z)): 0.5631 / 0.3475\n",
"[95/100][124/391] Loss_D: 3.8787 Loss_G: 2.8150 D(x): 0.5659 D(G(z)): 0.5551 / 0.4183\n",
"[95/100][125/391] Loss_D: 2.8852 Loss_G: 3.0271 D(x): 0.7017 D(G(z)): 0.4278 / 0.4011\n",
"[95/100][126/391] Loss_D: 3.0889 Loss_G: 2.9832 D(x): 0.5659 D(G(z)): 0.4023 / 0.3980\n",
"[95/100][127/391] Loss_D: 3.1110 Loss_G: 2.8480 D(x): 0.5742 D(G(z)): 0.3574 / 0.4138\n",
"[95/100][128/391] Loss_D: 2.8528 Loss_G: 3.4125 D(x): 0.7034 D(G(z)): 0.5095 / 0.3602\n",
"[95/100][129/391] Loss_D: 2.7053 Loss_G: 3.0208 D(x): 0.6634 D(G(z)): 0.4128 / 0.4049\n",
"[95/100][130/391] Loss_D: 2.7662 Loss_G: 2.9977 D(x): 0.7212 D(G(z)): 0.4542 / 0.4085\n",
"[95/100][131/391] Loss_D: 2.8958 Loss_G: 3.0350 D(x): 0.6250 D(G(z)): 0.4347 / 0.4013\n",
"[95/100][132/391] Loss_D: 2.6251 Loss_G: 3.1482 D(x): 0.7364 D(G(z)): 0.4199 / 0.3763\n",
"[95/100][133/391] Loss_D: 2.8832 Loss_G: 2.2485 D(x): 0.6150 D(G(z)): 0.4089 / 0.4980\n",
"[95/100][134/391] Loss_D: 2.9560 Loss_G: 2.2044 D(x): 0.6476 D(G(z)): 0.4691 / 0.5076\n",
"[95/100][135/391] Loss_D: 2.5572 Loss_G: 2.4510 D(x): 0.6838 D(G(z)): 0.3929 / 0.4812\n",
"[95/100][136/391] Loss_D: 2.8199 Loss_G: 3.3312 D(x): 0.7033 D(G(z)): 0.4390 / 0.3678\n",
"[95/100][137/391] Loss_D: 2.8889 Loss_G: 4.0723 D(x): 0.6845 D(G(z)): 0.4424 / 0.2738\n",
"[95/100][138/391] Loss_D: 2.2650 Loss_G: 2.9764 D(x): 0.7158 D(G(z)): 0.3395 / 0.4092\n",
"[95/100][139/391] Loss_D: 2.8624 Loss_G: 3.5854 D(x): 0.7044 D(G(z)): 0.4493 / 0.3463\n",
"[95/100][140/391] Loss_D: 3.2620 Loss_G: 2.9915 D(x): 0.5920 D(G(z)): 0.4602 / 0.4036\n",
"[95/100][141/391] Loss_D: 3.3437 Loss_G: 4.4769 D(x): 0.6913 D(G(z)): 0.5602 / 0.2549\n",
"[95/100][142/391] Loss_D: 2.8081 Loss_G: 2.9950 D(x): 0.6171 D(G(z)): 0.3840 / 0.3961\n",
"[95/100][143/391] Loss_D: 2.4753 Loss_G: 3.1252 D(x): 0.7196 D(G(z)): 0.3536 / 0.3759\n",
"[95/100][144/391] Loss_D: 2.8555 Loss_G: 3.5648 D(x): 0.6055 D(G(z)): 0.3747 / 0.3343\n",
"[95/100][145/391] Loss_D: 2.7251 Loss_G: 2.7327 D(x): 0.7954 D(G(z)): 0.4709 / 0.4422\n",
"[95/100][146/391] Loss_D: 2.6850 Loss_G: 2.9725 D(x): 0.6270 D(G(z)): 0.2926 / 0.3850\n",
"[95/100][147/391] Loss_D: 3.4113 Loss_G: 3.0424 D(x): 0.5593 D(G(z)): 0.4419 / 0.3946\n",
"[95/100][148/391] Loss_D: 2.8168 Loss_G: 2.7001 D(x): 0.6290 D(G(z)): 0.4457 / 0.4302\n",
"[95/100][149/391] Loss_D: 2.7447 Loss_G: 3.0547 D(x): 0.6878 D(G(z)): 0.4107 / 0.3870\n",
"[95/100][150/391] Loss_D: 2.8563 Loss_G: 3.3134 D(x): 0.6979 D(G(z)): 0.4462 / 0.3669\n",
"[95/100][151/391] Loss_D: 3.5924 Loss_G: 2.9150 D(x): 0.6238 D(G(z)): 0.4980 / 0.4103\n",
"[95/100][152/391] Loss_D: 2.9425 Loss_G: 2.8423 D(x): 0.7481 D(G(z)): 0.5336 / 0.4118\n",
"[95/100][153/391] Loss_D: 3.2661 Loss_G: 2.7461 D(x): 0.6114 D(G(z)): 0.4591 / 0.4240\n",
"[95/100][154/391] Loss_D: 2.3361 Loss_G: 3.1492 D(x): 0.7567 D(G(z)): 0.4266 / 0.3971\n",
"[95/100][155/391] Loss_D: 2.5937 Loss_G: 2.7501 D(x): 0.7087 D(G(z)): 0.3309 / 0.4349\n",
"[95/100][156/391] Loss_D: 2.4363 Loss_G: 2.9135 D(x): 0.6771 D(G(z)): 0.2862 / 0.4050\n",
"[95/100][157/391] Loss_D: 3.0085 Loss_G: 3.4648 D(x): 0.6443 D(G(z)): 0.4314 / 0.3398\n",
"[95/100][158/391] Loss_D: 2.3900 Loss_G: 2.4554 D(x): 0.6832 D(G(z)): 0.3234 / 0.4789\n",
"[95/100][159/391] Loss_D: 2.8215 Loss_G: 2.5118 D(x): 0.6800 D(G(z)): 0.4087 / 0.4764\n",
"[95/100][160/391] Loss_D: 2.6522 Loss_G: 2.7831 D(x): 0.7730 D(G(z)): 0.4823 / 0.4296\n",
"[95/100][161/391] Loss_D: 2.9872 Loss_G: 2.6125 D(x): 0.6059 D(G(z)): 0.3800 / 0.4558\n",
"[95/100][162/391] Loss_D: 2.7355 Loss_G: 2.7097 D(x): 0.7040 D(G(z)): 0.4561 / 0.4334\n",
"[95/100][163/391] Loss_D: 2.8182 Loss_G: 3.5457 D(x): 0.6406 D(G(z)): 0.4006 / 0.3283\n",
"[95/100][164/391] Loss_D: 2.5493 Loss_G: 3.1880 D(x): 0.7184 D(G(z)): 0.4403 / 0.3696\n",
"[95/100][165/391] Loss_D: 2.9515 Loss_G: 2.9367 D(x): 0.6032 D(G(z)): 0.4030 / 0.3989\n",
"[95/100][166/391] Loss_D: 2.8284 Loss_G: 2.8609 D(x): 0.6119 D(G(z)): 0.3865 / 0.4096\n",
"[95/100][167/391] Loss_D: 2.8148 Loss_G: 2.1974 D(x): 0.7777 D(G(z)): 0.4604 / 0.4987\n",
"[95/100][168/391] Loss_D: 2.1665 Loss_G: 2.3932 D(x): 0.7505 D(G(z)): 0.2742 / 0.4711\n",
"[95/100][169/391] Loss_D: 2.8595 Loss_G: 2.7686 D(x): 0.6542 D(G(z)): 0.4544 / 0.4393\n",
"[95/100][170/391] Loss_D: 3.0883 Loss_G: 2.6788 D(x): 0.6280 D(G(z)): 0.4266 / 0.4360\n",
"[95/100][171/391] Loss_D: 2.6202 Loss_G: 3.9743 D(x): 0.7332 D(G(z)): 0.4285 / 0.3048\n",
"[95/100][172/391] Loss_D: 2.9770 Loss_G: 2.4779 D(x): 0.6663 D(G(z)): 0.4606 / 0.4661\n",
"[95/100][173/391] Loss_D: 2.8085 Loss_G: 2.5508 D(x): 0.7051 D(G(z)): 0.4797 / 0.4572\n",
"[95/100][174/391] Loss_D: 2.5621 Loss_G: 2.3987 D(x): 0.7178 D(G(z)): 0.4457 / 0.4943\n",
"[95/100][175/391] Loss_D: 2.8890 Loss_G: 2.5496 D(x): 0.6454 D(G(z)): 0.4230 / 0.4554\n",
"[95/100][176/391] Loss_D: 3.1830 Loss_G: 3.5318 D(x): 0.5764 D(G(z)): 0.3831 / 0.3399\n",
"[95/100][177/391] Loss_D: 2.7950 Loss_G: 3.4330 D(x): 0.6096 D(G(z)): 0.3027 / 0.3430\n",
"[95/100][178/391] Loss_D: 2.5246 Loss_G: 2.3661 D(x): 0.7037 D(G(z)): 0.3816 / 0.4797\n",
"[95/100][179/391] Loss_D: 3.0516 Loss_G: 2.4909 D(x): 0.6833 D(G(z)): 0.4775 / 0.4703\n",
"[95/100][180/391] Loss_D: 3.2967 Loss_G: 2.8516 D(x): 0.5843 D(G(z)): 0.4572 / 0.4318\n",
"[95/100][181/391] Loss_D: 3.9953 Loss_G: 2.2078 D(x): 0.6918 D(G(z)): 0.6216 / 0.5052\n",
"[95/100][182/391] Loss_D: 2.8293 Loss_G: 2.8752 D(x): 0.7058 D(G(z)): 0.4910 / 0.4158\n",
"[95/100][183/391] Loss_D: 2.5378 Loss_G: 3.6737 D(x): 0.6654 D(G(z)): 0.3897 / 0.3376\n",
"[95/100][184/391] Loss_D: 2.5340 Loss_G: 2.8537 D(x): 0.6873 D(G(z)): 0.3686 / 0.4121\n",
"[95/100][185/391] Loss_D: 2.8071 Loss_G: 3.1616 D(x): 0.7225 D(G(z)): 0.4427 / 0.3859\n",
"[95/100][186/391] Loss_D: 2.5760 Loss_G: 2.2742 D(x): 0.6423 D(G(z)): 0.3536 / 0.5107\n",
"[95/100][187/391] Loss_D: 2.4349 Loss_G: 3.5092 D(x): 0.7201 D(G(z)): 0.3410 / 0.3347\n",
"[95/100][188/391] Loss_D: 2.6962 Loss_G: 2.8349 D(x): 0.6808 D(G(z)): 0.4690 / 0.4215\n",
"[95/100][189/391] Loss_D: 2.9150 Loss_G: 3.1183 D(x): 0.6699 D(G(z)): 0.4645 / 0.3922\n",
"[95/100][190/391] Loss_D: 2.7450 Loss_G: 3.0660 D(x): 0.7031 D(G(z)): 0.4272 / 0.3816\n",
"[95/100][191/391] Loss_D: 3.1206 Loss_G: 2.9545 D(x): 0.6697 D(G(z)): 0.4552 / 0.4037\n",
"[95/100][192/391] Loss_D: 2.4140 Loss_G: 2.6812 D(x): 0.6243 D(G(z)): 0.2675 / 0.4366\n",
"[95/100][193/391] Loss_D: 2.9034 Loss_G: 2.6777 D(x): 0.6100 D(G(z)): 0.4299 / 0.4497\n",
"[95/100][194/391] Loss_D: 3.2986 Loss_G: 2.6436 D(x): 0.5900 D(G(z)): 0.4467 / 0.4321\n",
"[95/100][195/391] Loss_D: 3.6379 Loss_G: 3.0433 D(x): 0.5857 D(G(z)): 0.5508 / 0.3996\n",
"[95/100][196/391] Loss_D: 2.4864 Loss_G: 2.6577 D(x): 0.7448 D(G(z)): 0.4512 / 0.4427\n",
"[95/100][197/391] Loss_D: 2.5165 Loss_G: 3.4529 D(x): 0.7756 D(G(z)): 0.3562 / 0.3370\n",
"[95/100][198/391] Loss_D: 2.7627 Loss_G: 3.3532 D(x): 0.6797 D(G(z)): 0.4294 / 0.3518\n",
"[95/100][199/391] Loss_D: 3.5096 Loss_G: 2.1079 D(x): 0.5034 D(G(z)): 0.3685 / 0.5253\n",
"[95/100][200/391] Loss_D: 2.9712 Loss_G: 2.4084 D(x): 0.6968 D(G(z)): 0.4927 / 0.4912\n",
"[95/100][201/391] Loss_D: 2.9476 Loss_G: 3.5264 D(x): 0.6817 D(G(z)): 0.4174 / 0.3339\n",
"[95/100][202/391] Loss_D: 3.6070 Loss_G: 3.0902 D(x): 0.6111 D(G(z)): 0.5708 / 0.3955\n",
"[95/100][203/391] Loss_D: 2.6175 Loss_G: 2.7464 D(x): 0.7118 D(G(z)): 0.3965 / 0.4312\n",
"[95/100][204/391] Loss_D: 2.2640 Loss_G: 3.1470 D(x): 0.7362 D(G(z)): 0.4137 / 0.3854\n",
"[95/100][205/391] Loss_D: 2.7462 Loss_G: 3.8530 D(x): 0.6679 D(G(z)): 0.4423 / 0.2952\n",
"[95/100][206/391] Loss_D: 2.7294 Loss_G: 3.3088 D(x): 0.6325 D(G(z)): 0.3480 / 0.3784\n",
"[95/100][207/391] Loss_D: 2.9478 Loss_G: 2.5925 D(x): 0.5889 D(G(z)): 0.3824 / 0.4560\n",
"[95/100][208/391] Loss_D: 2.5152 Loss_G: 3.1573 D(x): 0.7422 D(G(z)): 0.4648 / 0.3791\n",
"[95/100][209/391] Loss_D: 2.7780 Loss_G: 3.3949 D(x): 0.6741 D(G(z)): 0.4290 / 0.3652\n",
"[95/100][210/391] Loss_D: 2.8371 Loss_G: 3.7274 D(x): 0.6935 D(G(z)): 0.4457 / 0.3338\n",
"[95/100][211/391] Loss_D: 3.7008 Loss_G: 2.3595 D(x): 0.7036 D(G(z)): 0.5166 / 0.4984\n",
"[95/100][212/391] Loss_D: 3.7951 Loss_G: 2.3363 D(x): 0.4776 D(G(z)): 0.4041 / 0.4863\n",
"[95/100][213/391] Loss_D: 2.6331 Loss_G: 2.5548 D(x): 0.7935 D(G(z)): 0.4413 / 0.4602\n",
"[95/100][214/391] Loss_D: 2.7257 Loss_G: 3.5618 D(x): 0.6559 D(G(z)): 0.3965 / 0.3384\n",
"[95/100][215/391] Loss_D: 3.1928 Loss_G: 2.4195 D(x): 0.5662 D(G(z)): 0.3716 / 0.4844\n",
"[95/100][216/391] Loss_D: 2.8498 Loss_G: 2.7010 D(x): 0.6654 D(G(z)): 0.4272 / 0.4396\n",
"[95/100][217/391] Loss_D: 2.7099 Loss_G: 2.5410 D(x): 0.7504 D(G(z)): 0.4632 / 0.4665\n",
"[95/100][218/391] Loss_D: 3.2481 Loss_G: 2.4893 D(x): 0.5312 D(G(z)): 0.3016 / 0.4578\n",
"[95/100][219/391] Loss_D: 2.8562 Loss_G: 3.5496 D(x): 0.7142 D(G(z)): 0.5188 / 0.3399\n",
"[95/100][220/391] Loss_D: 2.5945 Loss_G: 2.2254 D(x): 0.8186 D(G(z)): 0.4901 / 0.5172\n",
"[95/100][221/391] Loss_D: 2.7390 Loss_G: 2.6480 D(x): 0.7148 D(G(z)): 0.4551 / 0.4552\n",
"[95/100][222/391] Loss_D: 3.4577 Loss_G: 2.8464 D(x): 0.6760 D(G(z)): 0.5776 / 0.4134\n",
"[95/100][223/391] Loss_D: 3.2789 Loss_G: 3.1934 D(x): 0.5500 D(G(z)): 0.3528 / 0.3809\n",
"[95/100][224/391] Loss_D: 2.2961 Loss_G: 3.2319 D(x): 0.7277 D(G(z)): 0.3607 / 0.3713\n",
"[95/100][225/391] Loss_D: 2.7334 Loss_G: 2.9467 D(x): 0.6048 D(G(z)): 0.3036 / 0.4209\n",
"[95/100][226/391] Loss_D: 3.0553 Loss_G: 2.1487 D(x): 0.6147 D(G(z)): 0.4822 / 0.5055\n",
"[95/100][227/391] Loss_D: 3.1342 Loss_G: 2.3061 D(x): 0.6046 D(G(z)): 0.4347 / 0.4910\n",
"[95/100][228/391] Loss_D: 3.1377 Loss_G: 3.0786 D(x): 0.5996 D(G(z)): 0.4024 / 0.3868\n",
"[95/100][229/391] Loss_D: 3.1812 Loss_G: 2.4594 D(x): 0.6527 D(G(z)): 0.5454 / 0.4760\n",
"[95/100][230/391] Loss_D: 2.8944 Loss_G: 2.9967 D(x): 0.6446 D(G(z)): 0.5043 / 0.4038\n",
"[95/100][231/391] Loss_D: 2.6854 Loss_G: 3.0089 D(x): 0.7301 D(G(z)): 0.4184 / 0.4068\n",
"[95/100][232/391] Loss_D: 1.9228 Loss_G: 2.1718 D(x): 0.7665 D(G(z)): 0.2586 / 0.5209\n",
"[95/100][233/391] Loss_D: 3.1255 Loss_G: 2.7244 D(x): 0.6422 D(G(z)): 0.4973 / 0.4252\n",
"[95/100][234/391] Loss_D: 2.7019 Loss_G: 2.0712 D(x): 0.6582 D(G(z)): 0.4191 / 0.5378\n",
"[95/100][235/391] Loss_D: 2.8872 Loss_G: 2.7001 D(x): 0.6788 D(G(z)): 0.4747 / 0.4175\n",
"[95/100][236/391] Loss_D: 2.7199 Loss_G: 2.6848 D(x): 0.6367 D(G(z)): 0.3375 / 0.4252\n",
"[95/100][237/391] Loss_D: 3.2749 Loss_G: 3.0387 D(x): 0.6821 D(G(z)): 0.5385 / 0.4079\n",
"[95/100][238/391] Loss_D: 3.0095 Loss_G: 2.7073 D(x): 0.6563 D(G(z)): 0.5062 / 0.4341\n",
"[95/100][239/391] Loss_D: 3.2048 Loss_G: 2.2905 D(x): 0.6218 D(G(z)): 0.4924 / 0.5109\n",
"[95/100][240/391] Loss_D: 2.9819 Loss_G: 2.2625 D(x): 0.6051 D(G(z)): 0.3985 / 0.5049\n",
"[95/100][241/391] Loss_D: 3.8791 Loss_G: 2.6341 D(x): 0.6457 D(G(z)): 0.4390 / 0.4529\n",
"[95/100][242/391] Loss_D: 2.9518 Loss_G: 3.2606 D(x): 0.6435 D(G(z)): 0.4486 / 0.3787\n",
"[95/100][243/391] Loss_D: 2.4597 Loss_G: 2.8758 D(x): 0.7672 D(G(z)): 0.4152 / 0.4118\n",
"[95/100][244/391] Loss_D: 2.0910 Loss_G: 3.8641 D(x): 0.7706 D(G(z)): 0.3673 / 0.3042\n",
"[95/100][245/391] Loss_D: 2.4642 Loss_G: 2.7033 D(x): 0.7598 D(G(z)): 0.4119 / 0.4386\n",
"[95/100][246/391] Loss_D: 3.0922 Loss_G: 3.5679 D(x): 0.5908 D(G(z)): 0.3819 / 0.3349\n",
"[95/100][247/391] Loss_D: 2.9818 Loss_G: 2.5436 D(x): 0.5870 D(G(z)): 0.3540 / 0.4512\n",
"[95/100][248/391] Loss_D: 2.5366 Loss_G: 2.7698 D(x): 0.7127 D(G(z)): 0.4427 / 0.4425\n",
"[95/100][249/391] Loss_D: 3.1081 Loss_G: 1.8070 D(x): 0.6040 D(G(z)): 0.4253 / 0.5814\n",
"[95/100][250/391] Loss_D: 3.4229 Loss_G: 3.0852 D(x): 0.5857 D(G(z)): 0.4459 / 0.3927\n",
"[95/100][251/391] Loss_D: 2.7542 Loss_G: 2.2299 D(x): 0.7156 D(G(z)): 0.4212 / 0.5136\n",
"[95/100][252/391] Loss_D: 2.7254 Loss_G: 2.6210 D(x): 0.7183 D(G(z)): 0.4345 / 0.4465\n",
"[95/100][253/391] Loss_D: 2.7592 Loss_G: 2.9535 D(x): 0.6957 D(G(z)): 0.4142 / 0.4021\n",
"[95/100][254/391] Loss_D: 2.1342 Loss_G: 2.8378 D(x): 0.7778 D(G(z)): 0.3968 / 0.4312\n",
"[95/100][255/391] Loss_D: 3.0278 Loss_G: 2.5805 D(x): 0.6489 D(G(z)): 0.4431 / 0.4439\n",
"[95/100][256/391] Loss_D: 2.4781 Loss_G: 4.8529 D(x): 0.6776 D(G(z)): 0.3602 / 0.2198\n",
"[95/100][257/391] Loss_D: 3.7053 Loss_G: 2.4901 D(x): 0.5252 D(G(z)): 0.4570 / 0.4734\n",
"[95/100][258/391] Loss_D: 2.8052 Loss_G: 2.1026 D(x): 0.6498 D(G(z)): 0.3908 / 0.5066\n",
"[95/100][259/391] Loss_D: 3.0837 Loss_G: 2.1785 D(x): 0.6126 D(G(z)): 0.4620 / 0.5081\n",
"[95/100][260/391] Loss_D: 3.1884 Loss_G: 2.6214 D(x): 0.6822 D(G(z)): 0.5273 / 0.4469\n",
"[95/100][261/391] Loss_D: 3.0829 Loss_G: 1.9856 D(x): 0.6377 D(G(z)): 0.4406 / 0.5565\n",
"[95/100][262/391] Loss_D: 3.5292 Loss_G: 2.5957 D(x): 0.6174 D(G(z)): 0.4980 / 0.4628\n",
"[95/100][263/391] Loss_D: 3.1739 Loss_G: 2.4441 D(x): 0.6239 D(G(z)): 0.4566 / 0.4637\n",
"[95/100][264/391] Loss_D: 2.6856 Loss_G: 2.9060 D(x): 0.6459 D(G(z)): 0.4106 / 0.4073\n",
"[95/100][265/391] Loss_D: 3.1118 Loss_G: 2.9570 D(x): 0.5896 D(G(z)): 0.4278 / 0.4166\n",
"[95/100][266/391] Loss_D: 3.2014 Loss_G: 2.8229 D(x): 0.6392 D(G(z)): 0.4272 / 0.4229\n",
"[95/100][267/391] Loss_D: 2.5579 Loss_G: 3.3133 D(x): 0.7183 D(G(z)): 0.3946 / 0.3678\n",
"[95/100][268/391] Loss_D: 2.3097 Loss_G: 1.9287 D(x): 0.6860 D(G(z)): 0.3444 / 0.5416\n",
"[95/100][269/391] Loss_D: 3.5082 Loss_G: 2.1521 D(x): 0.6016 D(G(z)): 0.4977 / 0.5253\n",
"[95/100][270/391] Loss_D: 2.7762 Loss_G: 2.3938 D(x): 0.6903 D(G(z)): 0.4435 / 0.5027\n",
"[95/100][271/391] Loss_D: 4.1298 Loss_G: 2.7068 D(x): 0.4641 D(G(z)): 0.5511 / 0.4472\n",
"[95/100][272/391] Loss_D: 2.9028 Loss_G: 2.0169 D(x): 0.7151 D(G(z)): 0.4774 / 0.5402\n",
"[95/100][273/391] Loss_D: 2.7938 Loss_G: 2.8213 D(x): 0.6862 D(G(z)): 0.4649 / 0.4136\n",
"[95/100][274/391] Loss_D: 2.9176 Loss_G: 2.8533 D(x): 0.6636 D(G(z)): 0.4497 / 0.4008\n",
"[95/100][275/391] Loss_D: 2.7677 Loss_G: 2.2343 D(x): 0.7166 D(G(z)): 0.4232 / 0.4991\n",
"[95/100][276/391] Loss_D: 2.5600 Loss_G: 2.8851 D(x): 0.6896 D(G(z)): 0.3539 / 0.4135\n",
"[95/100][277/391] Loss_D: 2.9771 Loss_G: 2.6581 D(x): 0.6563 D(G(z)): 0.4547 / 0.4411\n",
"[95/100][278/391] Loss_D: 2.5106 Loss_G: 2.1390 D(x): 0.6599 D(G(z)): 0.3597 / 0.5196\n",
"[95/100][279/391] Loss_D: 2.5890 Loss_G: 2.8424 D(x): 0.6975 D(G(z)): 0.4131 / 0.4276\n",
"[95/100][280/391] Loss_D: 3.1059 Loss_G: 2.9363 D(x): 0.6793 D(G(z)): 0.5113 / 0.4060\n",
"[95/100][281/391] Loss_D: 2.9187 Loss_G: 2.4961 D(x): 0.6756 D(G(z)): 0.4558 / 0.4797\n",
"[95/100][282/391] Loss_D: 3.2034 Loss_G: 2.8124 D(x): 0.6096 D(G(z)): 0.4590 / 0.4347\n",
"[95/100][283/391] Loss_D: 3.0931 Loss_G: 3.6564 D(x): 0.5928 D(G(z)): 0.4085 / 0.3364\n",
"[95/100][284/391] Loss_D: 2.7947 Loss_G: 2.7275 D(x): 0.6114 D(G(z)): 0.3844 / 0.4340\n",
"[95/100][285/391] Loss_D: 2.7289 Loss_G: 2.5984 D(x): 0.7360 D(G(z)): 0.4516 / 0.4533\n",
"[95/100][286/391] Loss_D: 3.0221 Loss_G: 2.4669 D(x): 0.6806 D(G(z)): 0.4518 / 0.4676\n",
"[95/100][287/391] Loss_D: 2.9412 Loss_G: 3.2947 D(x): 0.6549 D(G(z)): 0.4130 / 0.3597\n",
"[95/100][288/391] Loss_D: 2.8529 Loss_G: 3.4287 D(x): 0.6524 D(G(z)): 0.4441 / 0.3416\n",
"[95/100][289/391] Loss_D: 2.4751 Loss_G: 3.1618 D(x): 0.7340 D(G(z)): 0.3595 / 0.3881\n",
"[95/100][290/391] Loss_D: 2.6715 Loss_G: 3.3331 D(x): 0.6891 D(G(z)): 0.3709 / 0.3555\n",
"[95/100][291/391] Loss_D: 3.1869 Loss_G: 2.3582 D(x): 0.5522 D(G(z)): 0.3436 / 0.4902\n",
"[95/100][292/391] Loss_D: 4.1732 Loss_G: 1.9493 D(x): 0.6235 D(G(z)): 0.6287 / 0.5589\n",
"[95/100][293/391] Loss_D: 2.6704 Loss_G: 2.2512 D(x): 0.7433 D(G(z)): 0.4738 / 0.4879\n",
"[95/100][294/391] Loss_D: 3.0309 Loss_G: 3.4339 D(x): 0.6277 D(G(z)): 0.4610 / 0.3596\n",
"[95/100][295/391] Loss_D: 2.4478 Loss_G: 3.0126 D(x): 0.7226 D(G(z)): 0.3474 / 0.4120\n",
"[95/100][296/391] Loss_D: 2.9916 Loss_G: 2.2816 D(x): 0.7033 D(G(z)): 0.4882 / 0.4877\n",
"[95/100][297/391] Loss_D: 2.8091 Loss_G: 3.4463 D(x): 0.6916 D(G(z)): 0.3811 / 0.3338\n",
"[95/100][298/391] Loss_D: 2.8512 Loss_G: 2.5548 D(x): 0.6984 D(G(z)): 0.4989 / 0.4749\n",
"[95/100][299/391] Loss_D: 2.9213 Loss_G: 3.2072 D(x): 0.6805 D(G(z)): 0.4276 / 0.3912\n",
"[95/100][300/391] Loss_D: 3.4608 Loss_G: 3.2404 D(x): 0.5556 D(G(z)): 0.4532 / 0.3936\n",
"[95/100][301/391] Loss_D: 3.6047 Loss_G: 2.2307 D(x): 0.6304 D(G(z)): 0.3696 / 0.5129\n",
"[95/100][302/391] Loss_D: 2.5578 Loss_G: 2.2099 D(x): 0.6532 D(G(z)): 0.3668 / 0.4968\n",
"[95/100][303/391] Loss_D: 2.9269 Loss_G: 2.6175 D(x): 0.6894 D(G(z)): 0.4881 / 0.4581\n",
"[95/100][304/391] Loss_D: 2.4510 Loss_G: 2.2771 D(x): 0.6756 D(G(z)): 0.4155 / 0.5157\n",
"[95/100][305/391] Loss_D: 2.7478 Loss_G: 2.3707 D(x): 0.7301 D(G(z)): 0.4410 / 0.4702\n",
"[95/100][306/391] Loss_D: 3.0055 Loss_G: 2.9791 D(x): 0.6294 D(G(z)): 0.4084 / 0.4021\n",
"[95/100][307/391] Loss_D: 2.8780 Loss_G: 3.0255 D(x): 0.6243 D(G(z)): 0.3962 / 0.3906\n",
"[95/100][308/391] Loss_D: 2.4325 Loss_G: 2.3854 D(x): 0.7129 D(G(z)): 0.3565 / 0.4925\n",
"[95/100][309/391] Loss_D: 2.7865 Loss_G: 3.8783 D(x): 0.6909 D(G(z)): 0.4713 / 0.3238\n",
"[95/100][310/391] Loss_D: 3.1048 Loss_G: 3.2651 D(x): 0.6116 D(G(z)): 0.4030 / 0.3721\n",
"[95/100][311/391] Loss_D: 3.3545 Loss_G: 2.8823 D(x): 0.6844 D(G(z)): 0.5107 / 0.4209\n",
"[95/100][312/391] Loss_D: 2.2882 Loss_G: 3.2046 D(x): 0.7401 D(G(z)): 0.3351 / 0.3812\n",
"[95/100][313/391] Loss_D: 2.7925 Loss_G: 3.1609 D(x): 0.6762 D(G(z)): 0.4552 / 0.3763\n",
"[95/100][314/391] Loss_D: 3.1858 Loss_G: 2.9640 D(x): 0.6462 D(G(z)): 0.5028 / 0.4055\n",
"[95/100][315/391] Loss_D: 2.6996 Loss_G: 2.8246 D(x): 0.6625 D(G(z)): 0.3686 / 0.4287\n",
"[95/100][316/391] Loss_D: 2.7296 Loss_G: 2.4921 D(x): 0.6848 D(G(z)): 0.3844 / 0.4478\n",
"[95/100][317/391] Loss_D: 2.9532 Loss_G: 3.1968 D(x): 0.5834 D(G(z)): 0.3179 / 0.3601\n",
"[95/100][318/391] Loss_D: 2.7565 Loss_G: 2.9548 D(x): 0.6408 D(G(z)): 0.4004 / 0.3968\n",
"[95/100][319/391] Loss_D: 2.7641 Loss_G: 2.4315 D(x): 0.7469 D(G(z)): 0.4908 / 0.4932\n",
"[95/100][320/391] Loss_D: 2.8938 Loss_G: 3.0501 D(x): 0.7555 D(G(z)): 0.5039 / 0.3963\n",
"[95/100][321/391] Loss_D: 3.2463 Loss_G: 3.2442 D(x): 0.6968 D(G(z)): 0.5117 / 0.3811\n",
"[95/100][322/391] Loss_D: 2.7265 Loss_G: 3.1786 D(x): 0.6508 D(G(z)): 0.3921 / 0.3860\n",
"[95/100][323/391] Loss_D: 3.4147 Loss_G: 2.7128 D(x): 0.5447 D(G(z)): 0.4399 / 0.4467\n",
"[95/100][324/391] Loss_D: 2.7882 Loss_G: 3.9295 D(x): 0.6503 D(G(z)): 0.4192 / 0.3089\n",
"[95/100][325/391] Loss_D: 2.6514 Loss_G: 3.1612 D(x): 0.6531 D(G(z)): 0.3157 / 0.3893\n",
"[95/100][326/391] Loss_D: 2.6103 Loss_G: 3.6015 D(x): 0.7692 D(G(z)): 0.4495 / 0.3348\n",
"[95/100][327/391] Loss_D: 2.8088 Loss_G: 3.5014 D(x): 0.6767 D(G(z)): 0.4276 / 0.3414\n",
"[95/100][328/391] Loss_D: 2.5445 Loss_G: 2.0569 D(x): 0.6498 D(G(z)): 0.3313 / 0.5376\n",
"[95/100][329/391] Loss_D: 2.8484 Loss_G: 3.3986 D(x): 0.6980 D(G(z)): 0.4649 / 0.3717\n",
"[95/100][330/391] Loss_D: 2.4860 Loss_G: 2.5604 D(x): 0.7543 D(G(z)): 0.3872 / 0.4573\n",
"[95/100][331/391] Loss_D: 3.4529 Loss_G: 3.9464 D(x): 0.6351 D(G(z)): 0.3553 / 0.3112\n",
"[95/100][332/391] Loss_D: 2.5251 Loss_G: 2.4876 D(x): 0.7107 D(G(z)): 0.4090 / 0.4794\n",
"[95/100][333/391] Loss_D: 3.1487 Loss_G: 3.4665 D(x): 0.6088 D(G(z)): 0.4067 / 0.3399\n",
"[95/100][334/391] Loss_D: 2.5817 Loss_G: 2.7502 D(x): 0.7055 D(G(z)): 0.4507 / 0.4350\n",
"[95/100][335/391] Loss_D: 3.1160 Loss_G: 2.5943 D(x): 0.6818 D(G(z)): 0.4991 / 0.4427\n",
"[95/100][336/391] Loss_D: 3.6379 Loss_G: 3.8916 D(x): 0.6035 D(G(z)): 0.4848 / 0.2973\n",
"[95/100][337/391] Loss_D: 2.8280 Loss_G: 2.8222 D(x): 0.6138 D(G(z)): 0.3524 / 0.4057\n",
"[95/100][338/391] Loss_D: 2.6981 Loss_G: 2.0944 D(x): 0.6403 D(G(z)): 0.4020 / 0.5413\n",
"[95/100][339/391] Loss_D: 2.7362 Loss_G: 2.5401 D(x): 0.7187 D(G(z)): 0.4428 / 0.4598\n",
"[95/100][340/391] Loss_D: 3.1979 Loss_G: 2.7646 D(x): 0.6176 D(G(z)): 0.4774 / 0.4351\n",
"[95/100][341/391] Loss_D: 2.4375 Loss_G: 2.4998 D(x): 0.7359 D(G(z)): 0.3792 / 0.4586\n",
"[95/100][342/391] Loss_D: 2.7560 Loss_G: 2.3085 D(x): 0.6856 D(G(z)): 0.4328 / 0.4803\n",
"[95/100][343/391] Loss_D: 2.9129 Loss_G: 2.9747 D(x): 0.6202 D(G(z)): 0.3269 / 0.4115\n",
"[95/100][344/391] Loss_D: 2.1434 Loss_G: 4.2495 D(x): 0.7418 D(G(z)): 0.3244 / 0.2718\n",
"[95/100][345/391] Loss_D: 3.0973 Loss_G: 2.3863 D(x): 0.7030 D(G(z)): 0.5231 / 0.4907\n",
"[95/100][346/391] Loss_D: 2.4991 Loss_G: 3.5179 D(x): 0.7327 D(G(z)): 0.3621 / 0.3430\n",
"[95/100][347/391] Loss_D: 3.2032 Loss_G: 2.8281 D(x): 0.5814 D(G(z)): 0.4260 / 0.4232\n",
"[95/100][348/391] Loss_D: 2.7902 Loss_G: 2.3225 D(x): 0.5866 D(G(z)): 0.2971 / 0.4915\n",
"[95/100][349/391] Loss_D: 2.9949 Loss_G: 2.4920 D(x): 0.6807 D(G(z)): 0.5348 / 0.4666\n",
"[95/100][350/391] Loss_D: 2.6741 Loss_G: 2.1697 D(x): 0.7389 D(G(z)): 0.4719 / 0.5266\n",
"[95/100][351/391] Loss_D: 2.9395 Loss_G: 3.0661 D(x): 0.7423 D(G(z)): 0.4752 / 0.3980\n",
"[95/100][352/391] Loss_D: 2.3983 Loss_G: 3.6592 D(x): 0.7801 D(G(z)): 0.4595 / 0.3254\n",
"[95/100][353/391] Loss_D: 2.9644 Loss_G: 2.7357 D(x): 0.6028 D(G(z)): 0.3529 / 0.4265\n",
"[95/100][354/391] Loss_D: 2.5723 Loss_G: 2.8572 D(x): 0.6409 D(G(z)): 0.3914 / 0.4127\n",
"[95/100][355/391] Loss_D: 2.8988 Loss_G: 3.2450 D(x): 0.6062 D(G(z)): 0.4168 / 0.3817\n",
"[95/100][356/391] Loss_D: 2.8385 Loss_G: 3.2205 D(x): 0.6738 D(G(z)): 0.4356 / 0.3828\n",
"[95/100][357/391] Loss_D: 3.5214 Loss_G: 2.3616 D(x): 0.5395 D(G(z)): 0.4591 / 0.4911\n",
"[95/100][358/391] Loss_D: 2.5814 Loss_G: 2.7769 D(x): 0.7000 D(G(z)): 0.4313 / 0.4233\n",
"[95/100][359/391] Loss_D: 2.4995 Loss_G: 3.1857 D(x): 0.7732 D(G(z)): 0.4298 / 0.3817\n",
"[95/100][360/391] Loss_D: 2.7149 Loss_G: 2.9378 D(x): 0.6393 D(G(z)): 0.3519 / 0.4137\n",
"[95/100][361/391] Loss_D: 3.6397 Loss_G: 2.2209 D(x): 0.7520 D(G(z)): 0.4087 / 0.5089\n",
"[95/100][362/391] Loss_D: 2.5641 Loss_G: 2.6958 D(x): 0.7132 D(G(z)): 0.3610 / 0.4375\n",
"[95/100][363/391] Loss_D: 3.0779 Loss_G: 3.1115 D(x): 0.6174 D(G(z)): 0.4293 / 0.3766\n",
"[95/100][364/391] Loss_D: 2.5967 Loss_G: 2.5802 D(x): 0.7396 D(G(z)): 0.3838 / 0.4399\n",
"[95/100][365/391] Loss_D: 2.8421 Loss_G: 2.5051 D(x): 0.6646 D(G(z)): 0.4131 / 0.4793\n",
"[95/100][366/391] Loss_D: 2.8317 Loss_G: 3.8838 D(x): 0.6542 D(G(z)): 0.4525 / 0.3014\n",
"[95/100][367/391] Loss_D: 2.9384 Loss_G: 2.1539 D(x): 0.6355 D(G(z)): 0.4015 / 0.5188\n",
"[95/100][368/391] Loss_D: 2.4366 Loss_G: 3.2295 D(x): 0.6826 D(G(z)): 0.3333 / 0.3664\n",
"[95/100][369/391] Loss_D: 3.0386 Loss_G: 2.8446 D(x): 0.6612 D(G(z)): 0.5188 / 0.4256\n",
"[95/100][370/391] Loss_D: 2.7455 Loss_G: 2.5757 D(x): 0.7535 D(G(z)): 0.4905 / 0.4700\n",
"[95/100][371/391] Loss_D: 2.3975 Loss_G: 2.5238 D(x): 0.6550 D(G(z)): 0.2631 / 0.4733\n",
"[95/100][372/391] Loss_D: 2.7008 Loss_G: 2.5092 D(x): 0.6412 D(G(z)): 0.3820 / 0.4580\n",
"[95/100][373/391] Loss_D: 2.6424 Loss_G: 2.4825 D(x): 0.6729 D(G(z)): 0.3626 / 0.4706\n",
"[95/100][374/391] Loss_D: 2.6132 Loss_G: 2.7096 D(x): 0.6770 D(G(z)): 0.3775 / 0.4223\n",
"[95/100][375/391] Loss_D: 3.0771 Loss_G: 1.9416 D(x): 0.7020 D(G(z)): 0.5003 / 0.5561\n",
"[95/100][376/391] Loss_D: 2.7347 Loss_G: 2.2387 D(x): 0.6768 D(G(z)): 0.3599 / 0.5195\n",
"[95/100][377/391] Loss_D: 2.5595 Loss_G: 2.2885 D(x): 0.7549 D(G(z)): 0.3765 / 0.5019\n",
"[95/100][378/391] Loss_D: 2.5195 Loss_G: 3.1524 D(x): 0.7305 D(G(z)): 0.4777 / 0.3724\n",
"[95/100][379/391] Loss_D: 3.2055 Loss_G: 2.3880 D(x): 0.6396 D(G(z)): 0.5034 / 0.4789\n",
"[95/100][380/391] Loss_D: 2.4194 Loss_G: 2.6310 D(x): 0.6955 D(G(z)): 0.3571 / 0.4508\n",
"[95/100][381/391] Loss_D: 2.5389 Loss_G: 2.5628 D(x): 0.7611 D(G(z)): 0.4359 / 0.4715\n",
"[95/100][382/391] Loss_D: 2.7252 Loss_G: 2.2001 D(x): 0.6749 D(G(z)): 0.4126 / 0.5108\n",
"[95/100][383/391] Loss_D: 3.0703 Loss_G: 2.5071 D(x): 0.6233 D(G(z)): 0.4233 / 0.4791\n",
"[95/100][384/391] Loss_D: 2.9156 Loss_G: 3.0970 D(x): 0.5830 D(G(z)): 0.4041 / 0.3855\n",
"[95/100][385/391] Loss_D: 3.4058 Loss_G: 3.0151 D(x): 0.7019 D(G(z)): 0.5964 / 0.3950\n",
"[95/100][386/391] Loss_D: 2.7751 Loss_G: 3.6783 D(x): 0.6657 D(G(z)): 0.3592 / 0.3301\n",
"[95/100][387/391] Loss_D: 2.8007 Loss_G: 2.0490 D(x): 0.6621 D(G(z)): 0.3787 / 0.5290\n",
"[95/100][388/391] Loss_D: 2.5483 Loss_G: 3.0192 D(x): 0.7289 D(G(z)): 0.4829 / 0.4014\n",
"[95/100][389/391] Loss_D: 3.0092 Loss_G: 3.2913 D(x): 0.6944 D(G(z)): 0.4795 / 0.3745\n",
"[95/100][390/391] Loss_D: 2.6389 Loss_G: 3.2548 D(x): 0.7044 D(G(z)): 0.4573 / 0.3891\n",
"[95/100][391/391] Loss_D: 3.7792 Loss_G: 4.0371 D(x): 0.7031 D(G(z)): 0.3540 / 0.2983\n",
"[96/100][1/391] Loss_D: 3.5484 Loss_G: 2.6238 D(x): 0.6123 D(G(z)): 0.4106 / 0.4630\n",
"[96/100][2/391] Loss_D: 3.2478 Loss_G: 2.4548 D(x): 0.5060 D(G(z)): 0.3380 / 0.4682\n",
"[96/100][3/391] Loss_D: 2.7356 Loss_G: 2.0123 D(x): 0.6159 D(G(z)): 0.3408 / 0.5326\n",
"[96/100][4/391] Loss_D: 2.3370 Loss_G: 2.6130 D(x): 0.7889 D(G(z)): 0.4412 / 0.4519\n",
"[96/100][5/391] Loss_D: 2.9239 Loss_G: 2.9252 D(x): 0.6196 D(G(z)): 0.3953 / 0.4048\n",
"[96/100][6/391] Loss_D: 3.0300 Loss_G: 3.3808 D(x): 0.7361 D(G(z)): 0.4919 / 0.3564\n",
"[96/100][7/391] Loss_D: 2.6941 Loss_G: 2.2361 D(x): 0.7011 D(G(z)): 0.3598 / 0.4944\n",
"[96/100][8/391] Loss_D: 2.5459 Loss_G: 3.0339 D(x): 0.6767 D(G(z)): 0.4253 / 0.4002\n",
"[96/100][9/391] Loss_D: 2.5338 Loss_G: 2.7220 D(x): 0.7283 D(G(z)): 0.4425 / 0.4480\n",
"[96/100][10/391] Loss_D: 3.1105 Loss_G: 2.8656 D(x): 0.6375 D(G(z)): 0.4852 / 0.4098\n",
"[96/100][11/391] Loss_D: 3.5505 Loss_G: 2.7395 D(x): 0.6276 D(G(z)): 0.5485 / 0.4424\n",
"[96/100][12/391] Loss_D: 3.3368 Loss_G: 2.9962 D(x): 0.5361 D(G(z)): 0.4002 / 0.3939\n",
"[96/100][13/391] Loss_D: 2.5191 Loss_G: 3.0606 D(x): 0.7907 D(G(z)): 0.4264 / 0.3890\n",
"[96/100][14/391] Loss_D: 3.0109 Loss_G: 2.6814 D(x): 0.5754 D(G(z)): 0.4030 / 0.4435\n",
"[96/100][15/391] Loss_D: 2.5432 Loss_G: 2.4696 D(x): 0.6286 D(G(z)): 0.3446 / 0.4652\n",
"[96/100][16/391] Loss_D: 3.0194 Loss_G: 2.3703 D(x): 0.6975 D(G(z)): 0.4734 / 0.4701\n",
"[96/100][17/391] Loss_D: 2.9102 Loss_G: 2.5697 D(x): 0.6664 D(G(z)): 0.4680 / 0.4510\n",
"[96/100][18/391] Loss_D: 2.5908 Loss_G: 3.0163 D(x): 0.6805 D(G(z)): 0.4019 / 0.3918\n",
"[96/100][19/391] Loss_D: 3.6219 Loss_G: 3.0999 D(x): 0.5784 D(G(z)): 0.5383 / 0.3918\n",
"[96/100][20/391] Loss_D: 2.7750 Loss_G: 3.2634 D(x): 0.6008 D(G(z)): 0.3528 / 0.3775\n",
"[96/100][21/391] Loss_D: 3.1983 Loss_G: 2.7163 D(x): 0.7205 D(G(z)): 0.5776 / 0.4294\n",
"[96/100][22/391] Loss_D: 2.6778 Loss_G: 4.2720 D(x): 0.7432 D(G(z)): 0.4629 / 0.2857\n",
"[96/100][23/391] Loss_D: 2.9685 Loss_G: 2.7969 D(x): 0.5876 D(G(z)): 0.3889 / 0.4258\n",
"[96/100][24/391] Loss_D: 2.6981 Loss_G: 2.3372 D(x): 0.6699 D(G(z)): 0.3985 / 0.4827\n",
"[96/100][25/391] Loss_D: 3.0175 Loss_G: 3.6731 D(x): 0.6509 D(G(z)): 0.4416 / 0.3343\n",
"[96/100][26/391] Loss_D: 2.8532 Loss_G: 3.5019 D(x): 0.7163 D(G(z)): 0.4596 / 0.3449\n",
"[96/100][27/391] Loss_D: 3.0904 Loss_G: 2.7969 D(x): 0.6756 D(G(z)): 0.4445 / 0.4299\n",
"[96/100][28/391] Loss_D: 3.4011 Loss_G: 3.2791 D(x): 0.5269 D(G(z)): 0.4007 / 0.3676\n",
"[96/100][29/391] Loss_D: 2.6694 Loss_G: 2.6566 D(x): 0.6603 D(G(z)): 0.3420 / 0.4552\n",
"[96/100][30/391] Loss_D: 2.8286 Loss_G: 3.6372 D(x): 0.7030 D(G(z)): 0.4932 / 0.3552\n",
"[96/100][31/391] Loss_D: 3.6275 Loss_G: 3.7789 D(x): 0.6871 D(G(z)): 0.4883 / 0.3361\n",
"[96/100][32/391] Loss_D: 2.6739 Loss_G: 2.6568 D(x): 0.7268 D(G(z)): 0.4191 / 0.4626\n",
"[96/100][33/391] Loss_D: 3.0631 Loss_G: 2.8274 D(x): 0.6741 D(G(z)): 0.4679 / 0.4196\n",
"[96/100][34/391] Loss_D: 2.4938 Loss_G: 2.3886 D(x): 0.6651 D(G(z)): 0.3641 / 0.4634\n",
"[96/100][35/391] Loss_D: 3.4942 Loss_G: 2.4454 D(x): 0.5404 D(G(z)): 0.4759 / 0.4589\n",
"[96/100][36/391] Loss_D: 3.2999 Loss_G: 3.1243 D(x): 0.5681 D(G(z)): 0.4202 / 0.3819\n",
"[96/100][37/391] Loss_D: 2.8077 Loss_G: 3.2723 D(x): 0.7432 D(G(z)): 0.4837 / 0.3560\n",
"[96/100][38/391] Loss_D: 3.0623 Loss_G: 2.6459 D(x): 0.5722 D(G(z)): 0.3540 / 0.4467\n",
"[96/100][39/391] Loss_D: 2.9258 Loss_G: 2.4685 D(x): 0.7067 D(G(z)): 0.4621 / 0.4700\n",
"[96/100][40/391] Loss_D: 3.0908 Loss_G: 3.0377 D(x): 0.6530 D(G(z)): 0.3982 / 0.3909\n",
"[96/100][41/391] Loss_D: 3.0983 Loss_G: 2.9486 D(x): 0.6902 D(G(z)): 0.4771 / 0.4083\n",
"[96/100][42/391] Loss_D: 2.8352 Loss_G: 2.8126 D(x): 0.6510 D(G(z)): 0.4265 / 0.4277\n",
"[96/100][43/391] Loss_D: 2.8089 Loss_G: 2.5516 D(x): 0.6858 D(G(z)): 0.4135 / 0.4745\n",
"[96/100][44/391] Loss_D: 2.8439 Loss_G: 2.7957 D(x): 0.6208 D(G(z)): 0.4281 / 0.4175\n",
"[96/100][45/391] Loss_D: 3.1239 Loss_G: 3.2408 D(x): 0.6637 D(G(z)): 0.4801 / 0.3868\n",
"[96/100][46/391] Loss_D: 2.4332 Loss_G: 2.9450 D(x): 0.7510 D(G(z)): 0.3581 / 0.3944\n",
"[96/100][47/391] Loss_D: 2.5106 Loss_G: 2.5560 D(x): 0.6921 D(G(z)): 0.3752 / 0.4564\n",
"[96/100][48/391] Loss_D: 2.3162 Loss_G: 2.8917 D(x): 0.7260 D(G(z)): 0.3655 / 0.4283\n",
"[96/100][49/391] Loss_D: 2.1014 Loss_G: 2.2308 D(x): 0.7905 D(G(z)): 0.3751 / 0.5068\n",
"[96/100][50/391] Loss_D: 2.2050 Loss_G: 2.9350 D(x): 0.7359 D(G(z)): 0.2750 / 0.4269\n",
"[96/100][51/391] Loss_D: 2.8194 Loss_G: 2.8080 D(x): 0.6439 D(G(z)): 0.3822 / 0.4297\n",
"[96/100][52/391] Loss_D: 3.1635 Loss_G: 2.7979 D(x): 0.6437 D(G(z)): 0.5021 / 0.4415\n",
"[96/100][53/391] Loss_D: 2.6580 Loss_G: 2.9440 D(x): 0.6261 D(G(z)): 0.3694 / 0.4059\n",
"[96/100][54/391] Loss_D: 2.8634 Loss_G: 2.7536 D(x): 0.6420 D(G(z)): 0.4272 / 0.4274\n",
"[96/100][55/391] Loss_D: 3.0705 Loss_G: 2.2717 D(x): 0.6068 D(G(z)): 0.4511 / 0.4996\n",
"[96/100][56/391] Loss_D: 3.2473 Loss_G: 2.7050 D(x): 0.7140 D(G(z)): 0.5470 / 0.4387\n",
"[96/100][57/391] Loss_D: 3.2985 Loss_G: 2.4651 D(x): 0.5289 D(G(z)): 0.4048 / 0.4742\n",
"[96/100][58/391] Loss_D: 2.8142 Loss_G: 3.1449 D(x): 0.6840 D(G(z)): 0.4665 / 0.3893\n",
"[96/100][59/391] Loss_D: 2.5878 Loss_G: 1.8050 D(x): 0.7216 D(G(z)): 0.4322 / 0.5920\n",
"[96/100][60/391] Loss_D: 3.1599 Loss_G: 3.3497 D(x): 0.6526 D(G(z)): 0.5033 / 0.3730\n",
"[96/100][61/391] Loss_D: 3.6877 Loss_G: 4.0651 D(x): 0.6847 D(G(z)): 0.5122 / 0.2846\n",
"[96/100][62/391] Loss_D: 3.3941 Loss_G: 3.1152 D(x): 0.5956 D(G(z)): 0.4679 / 0.3898\n",
"[96/100][63/391] Loss_D: 2.9579 Loss_G: 2.7053 D(x): 0.7292 D(G(z)): 0.5069 / 0.4378\n",
"[96/100][64/391] Loss_D: 2.7673 Loss_G: 3.1251 D(x): 0.5925 D(G(z)): 0.3528 / 0.3876\n",
"[96/100][65/391] Loss_D: 3.3652 Loss_G: 3.0465 D(x): 0.6540 D(G(z)): 0.5713 / 0.4046\n",
"[96/100][66/391] Loss_D: 3.0436 Loss_G: 2.5604 D(x): 0.5798 D(G(z)): 0.3992 / 0.4673\n",
"[96/100][67/391] Loss_D: 3.0158 Loss_G: 2.5465 D(x): 0.6846 D(G(z)): 0.3891 / 0.4760\n",
"[96/100][68/391] Loss_D: 3.0985 Loss_G: 2.6119 D(x): 0.6021 D(G(z)): 0.4643 / 0.4609\n",
"[96/100][69/391] Loss_D: 2.8638 Loss_G: 2.8993 D(x): 0.6660 D(G(z)): 0.4412 / 0.4142\n",
"[96/100][70/391] Loss_D: 2.8854 Loss_G: 2.9847 D(x): 0.7062 D(G(z)): 0.4371 / 0.4104\n",
"[96/100][71/391] Loss_D: 3.9234 Loss_G: 3.3204 D(x): 0.5259 D(G(z)): 0.5146 / 0.3622\n",
"[96/100][72/391] Loss_D: 2.2797 Loss_G: 2.2409 D(x): 0.7629 D(G(z)): 0.3620 / 0.5039\n",
"[96/100][73/391] Loss_D: 3.1219 Loss_G: 2.3706 D(x): 0.6401 D(G(z)): 0.4776 / 0.4868\n",
"[96/100][74/391] Loss_D: 3.1173 Loss_G: 3.1886 D(x): 0.6523 D(G(z)): 0.4938 / 0.3726\n",
"[96/100][75/391] Loss_D: 2.9452 Loss_G: 2.8338 D(x): 0.6240 D(G(z)): 0.4330 / 0.4106\n",
"[96/100][76/391] Loss_D: 3.4244 Loss_G: 3.2292 D(x): 0.5897 D(G(z)): 0.4966 / 0.3572\n",
"[96/100][77/391] Loss_D: 2.8083 Loss_G: 4.2098 D(x): 0.6322 D(G(z)): 0.3512 / 0.2720\n",
"[96/100][78/391] Loss_D: 4.0456 Loss_G: 1.8345 D(x): 0.4529 D(G(z)): 0.4245 / 0.5637\n",
"[96/100][79/391] Loss_D: 3.5276 Loss_G: 2.2227 D(x): 0.7406 D(G(z)): 0.6563 / 0.5125\n",
"[96/100][80/391] Loss_D: 3.5293 Loss_G: 2.3585 D(x): 0.6950 D(G(z)): 0.5652 / 0.4890\n",
"[96/100][81/391] Loss_D: 2.7851 Loss_G: 2.8701 D(x): 0.7037 D(G(z)): 0.4567 / 0.4236\n",
"[96/100][82/391] Loss_D: 3.0409 Loss_G: 2.4490 D(x): 0.6491 D(G(z)): 0.4405 / 0.4859\n",
"[96/100][83/391] Loss_D: 3.0044 Loss_G: 2.7135 D(x): 0.6467 D(G(z)): 0.4330 / 0.4504\n",
"[96/100][84/391] Loss_D: 3.2380 Loss_G: 2.9678 D(x): 0.6046 D(G(z)): 0.4828 / 0.4095\n",
"[96/100][85/391] Loss_D: 3.3978 Loss_G: 2.7590 D(x): 0.5179 D(G(z)): 0.3724 / 0.4229\n",
"[96/100][86/391] Loss_D: 3.1650 Loss_G: 2.9923 D(x): 0.6455 D(G(z)): 0.4694 / 0.3813\n",
"[96/100][87/391] Loss_D: 2.9269 Loss_G: 3.8294 D(x): 0.7738 D(G(z)): 0.4492 / 0.3025\n",
"[96/100][88/391] Loss_D: 2.5364 Loss_G: 4.1536 D(x): 0.6776 D(G(z)): 0.3815 / 0.2822\n",
"[96/100][89/391] Loss_D: 3.2401 Loss_G: 3.0851 D(x): 0.6146 D(G(z)): 0.4789 / 0.3933\n",
"[96/100][90/391] Loss_D: 2.6145 Loss_G: 2.8128 D(x): 0.6414 D(G(z)): 0.2823 / 0.4372\n",
"[96/100][91/391] Loss_D: 3.5807 Loss_G: 2.5938 D(x): 0.7000 D(G(z)): 0.5155 / 0.4575\n",
"[96/100][92/391] Loss_D: 3.0567 Loss_G: 2.5347 D(x): 0.6409 D(G(z)): 0.4584 / 0.4675\n",
"[96/100][93/391] Loss_D: 2.5892 Loss_G: 3.6680 D(x): 0.7457 D(G(z)): 0.3738 / 0.3260\n",
"[96/100][94/391] Loss_D: 2.4856 Loss_G: 3.4289 D(x): 0.6456 D(G(z)): 0.3534 / 0.3554\n",
"[96/100][95/391] Loss_D: 2.7334 Loss_G: 3.4252 D(x): 0.6888 D(G(z)): 0.4037 / 0.3432\n",
"[96/100][96/391] Loss_D: 3.0614 Loss_G: 3.3600 D(x): 0.6217 D(G(z)): 0.4093 / 0.3348\n",
"[96/100][97/391] Loss_D: 2.7835 Loss_G: 2.6620 D(x): 0.7028 D(G(z)): 0.4232 / 0.4491\n",
"[96/100][98/391] Loss_D: 2.6457 Loss_G: 3.4127 D(x): 0.7097 D(G(z)): 0.4193 / 0.3469\n",
"[96/100][99/391] Loss_D: 2.9313 Loss_G: 2.3825 D(x): 0.6912 D(G(z)): 0.4593 / 0.4886\n",
"[96/100][100/391] Loss_D: 3.2421 Loss_G: 3.6215 D(x): 0.5557 D(G(z)): 0.4001 / 0.3374\n",
"[96/100][101/391] Loss_D: 2.7904 Loss_G: 3.4458 D(x): 0.7183 D(G(z)): 0.4708 / 0.3449\n",
"[96/100][102/391] Loss_D: 2.7133 Loss_G: 2.8006 D(x): 0.6705 D(G(z)): 0.3615 / 0.4290\n",
"[96/100][103/391] Loss_D: 2.9358 Loss_G: 2.0007 D(x): 0.6326 D(G(z)): 0.4256 / 0.5442\n",
"[96/100][104/391] Loss_D: 2.5415 Loss_G: 3.6411 D(x): 0.6793 D(G(z)): 0.3877 / 0.3336\n",
"[96/100][105/391] Loss_D: 2.9102 Loss_G: 3.0260 D(x): 0.6934 D(G(z)): 0.4430 / 0.3756\n",
"[96/100][106/391] Loss_D: 2.6566 Loss_G: 3.1852 D(x): 0.7037 D(G(z)): 0.4157 / 0.3775\n",
"[96/100][107/391] Loss_D: 2.7993 Loss_G: 3.3359 D(x): 0.6715 D(G(z)): 0.3913 / 0.3429\n",
"[96/100][108/391] Loss_D: 2.8662 Loss_G: 2.4319 D(x): 0.7003 D(G(z)): 0.4735 / 0.4717\n",
"[96/100][109/391] Loss_D: 2.7329 Loss_G: 1.7071 D(x): 0.6365 D(G(z)): 0.3794 / 0.5838\n",
"[96/100][110/391] Loss_D: 2.9408 Loss_G: 3.0338 D(x): 0.6914 D(G(z)): 0.4553 / 0.3965\n",
"[96/100][111/391] Loss_D: 2.8574 Loss_G: 3.0041 D(x): 0.7615 D(G(z)): 0.4928 / 0.4030\n",
"[96/100][112/391] Loss_D: 3.1228 Loss_G: 3.2054 D(x): 0.6131 D(G(z)): 0.4556 / 0.3960\n",
"[96/100][113/391] Loss_D: 2.9703 Loss_G: 2.9346 D(x): 0.7474 D(G(z)): 0.5167 / 0.4042\n",
"[96/100][114/391] Loss_D: 2.8848 Loss_G: 2.9156 D(x): 0.5943 D(G(z)): 0.3719 / 0.4229\n",
"[96/100][115/391] Loss_D: 2.6665 Loss_G: 3.0749 D(x): 0.6769 D(G(z)): 0.3316 / 0.4032\n",
"[96/100][116/391] Loss_D: 2.3108 Loss_G: 2.9077 D(x): 0.7041 D(G(z)): 0.2731 / 0.4174\n",
"[96/100][117/391] Loss_D: 3.3597 Loss_G: 3.0847 D(x): 0.6589 D(G(z)): 0.5244 / 0.3726\n",
"[96/100][118/391] Loss_D: 2.5765 Loss_G: 2.1273 D(x): 0.7192 D(G(z)): 0.4321 / 0.5212\n",
"[96/100][119/391] Loss_D: 2.5962 Loss_G: 2.6969 D(x): 0.6691 D(G(z)): 0.3336 / 0.4465\n",
"[96/100][120/391] Loss_D: 2.8697 Loss_G: 2.6157 D(x): 0.6253 D(G(z)): 0.3896 / 0.4568\n",
"[96/100][121/391] Loss_D: 3.5758 Loss_G: 2.8380 D(x): 0.6315 D(G(z)): 0.3610 / 0.4192\n",
"[96/100][122/391] Loss_D: 3.4797 Loss_G: 2.7288 D(x): 0.5464 D(G(z)): 0.4275 / 0.4206\n",
"[96/100][123/391] Loss_D: 3.0808 Loss_G: 2.7082 D(x): 0.7221 D(G(z)): 0.5151 / 0.4307\n",
"[96/100][124/391] Loss_D: 2.8916 Loss_G: 2.3760 D(x): 0.6590 D(G(z)): 0.4544 / 0.4786\n",
"[96/100][125/391] Loss_D: 2.9449 Loss_G: 2.8827 D(x): 0.6808 D(G(z)): 0.4563 / 0.4197\n",
"[96/100][126/391] Loss_D: 2.7639 Loss_G: 1.9270 D(x): 0.6399 D(G(z)): 0.3831 / 0.5425\n",
"[96/100][127/391] Loss_D: 2.7747 Loss_G: 2.3550 D(x): 0.7257 D(G(z)): 0.4244 / 0.5035\n",
"[96/100][128/391] Loss_D: 2.8525 Loss_G: 2.7178 D(x): 0.6039 D(G(z)): 0.3764 / 0.4409\n",
"[96/100][129/391] Loss_D: 2.6808 Loss_G: 3.0563 D(x): 0.7363 D(G(z)): 0.4672 / 0.3994\n",
"[96/100][130/391] Loss_D: 2.4950 Loss_G: 3.4731 D(x): 0.7502 D(G(z)): 0.3649 / 0.3536\n",
"[96/100][131/391] Loss_D: 2.5932 Loss_G: 2.8346 D(x): 0.7786 D(G(z)): 0.4563 / 0.4363\n",
"[96/100][132/391] Loss_D: 2.4138 Loss_G: 3.0140 D(x): 0.7174 D(G(z)): 0.3209 / 0.4093\n",
"[96/100][133/391] Loss_D: 2.9849 Loss_G: 3.3676 D(x): 0.6660 D(G(z)): 0.5199 / 0.3596\n",
"[96/100][134/391] Loss_D: 2.2956 Loss_G: 3.4772 D(x): 0.7244 D(G(z)): 0.3808 / 0.3509\n",
"[96/100][135/391] Loss_D: 2.4997 Loss_G: 3.8457 D(x): 0.7153 D(G(z)): 0.3852 / 0.3016\n",
"[96/100][136/391] Loss_D: 3.4976 Loss_G: 3.0832 D(x): 0.5447 D(G(z)): 0.4457 / 0.3808\n",
"[96/100][137/391] Loss_D: 3.0165 Loss_G: 3.0909 D(x): 0.5959 D(G(z)): 0.4138 / 0.3897\n",
"[96/100][138/391] Loss_D: 2.6533 Loss_G: 2.8413 D(x): 0.6987 D(G(z)): 0.4662 / 0.4207\n",
"[96/100][139/391] Loss_D: 2.9418 Loss_G: 4.0466 D(x): 0.6623 D(G(z)): 0.4267 / 0.2973\n",
"[96/100][140/391] Loss_D: 2.5134 Loss_G: 2.9970 D(x): 0.6834 D(G(z)): 0.3566 / 0.4073\n",
"[96/100][141/391] Loss_D: 3.1953 Loss_G: 3.3860 D(x): 0.6588 D(G(z)): 0.4967 / 0.3642\n",
"[96/100][142/391] Loss_D: 2.3760 Loss_G: 3.4587 D(x): 0.7615 D(G(z)): 0.3905 / 0.3610\n",
"[96/100][143/391] Loss_D: 2.6704 Loss_G: 4.4065 D(x): 0.6902 D(G(z)): 0.4040 / 0.2680\n",
"[96/100][144/391] Loss_D: 3.2802 Loss_G: 3.5116 D(x): 0.5620 D(G(z)): 0.4450 / 0.3542\n",
"[96/100][145/391] Loss_D: 2.4141 Loss_G: 2.8826 D(x): 0.7636 D(G(z)): 0.3269 / 0.4188\n",
"[96/100][146/391] Loss_D: 2.2622 Loss_G: 2.4998 D(x): 0.7820 D(G(z)): 0.3128 / 0.4584\n",
"[96/100][147/391] Loss_D: 3.2563 Loss_G: 3.7376 D(x): 0.6241 D(G(z)): 0.4467 / 0.3337\n",
"[96/100][148/391] Loss_D: 2.5705 Loss_G: 3.0552 D(x): 0.7454 D(G(z)): 0.5053 / 0.3908\n",
"[96/100][149/391] Loss_D: 2.9263 Loss_G: 3.4446 D(x): 0.7231 D(G(z)): 0.5082 / 0.3630\n",
"[96/100][150/391] Loss_D: 2.4171 Loss_G: 3.3975 D(x): 0.7065 D(G(z)): 0.2985 / 0.3590\n",
"[96/100][151/391] Loss_D: 3.5344 Loss_G: 2.5706 D(x): 0.6632 D(G(z)): 0.3658 / 0.4660\n",
"[96/100][152/391] Loss_D: 2.9530 Loss_G: 2.8234 D(x): 0.6245 D(G(z)): 0.4362 / 0.4335\n",
"[96/100][153/391] Loss_D: 3.1620 Loss_G: 3.1186 D(x): 0.5902 D(G(z)): 0.3967 / 0.3864\n",
"[96/100][154/391] Loss_D: 2.1276 Loss_G: 2.6080 D(x): 0.7783 D(G(z)): 0.3858 / 0.4392\n",
"[96/100][155/391] Loss_D: 3.2340 Loss_G: 3.5324 D(x): 0.5897 D(G(z)): 0.3882 / 0.3457\n",
"[96/100][156/391] Loss_D: 3.0741 Loss_G: 3.5992 D(x): 0.6696 D(G(z)): 0.4912 / 0.3213\n",
"[96/100][157/391] Loss_D: 2.8213 Loss_G: 3.3454 D(x): 0.6631 D(G(z)): 0.3813 / 0.3419\n",
"[96/100][158/391] Loss_D: 2.7341 Loss_G: 2.7515 D(x): 0.6480 D(G(z)): 0.3858 / 0.4281\n",
"[96/100][159/391] Loss_D: 2.5402 Loss_G: 3.1285 D(x): 0.7145 D(G(z)): 0.3674 / 0.3971\n",
"[96/100][160/391] Loss_D: 2.8628 Loss_G: 3.0438 D(x): 0.6663 D(G(z)): 0.4474 / 0.4095\n",
"[96/100][161/391] Loss_D: 3.1645 Loss_G: 2.9201 D(x): 0.6428 D(G(z)): 0.4777 / 0.4061\n",
"[96/100][162/391] Loss_D: 2.9886 Loss_G: 3.0962 D(x): 0.6179 D(G(z)): 0.4113 / 0.3843\n",
"[96/100][163/391] Loss_D: 2.6505 Loss_G: 1.9005 D(x): 0.7501 D(G(z)): 0.4453 / 0.5621\n",
"[96/100][164/391] Loss_D: 2.2492 Loss_G: 3.6672 D(x): 0.7453 D(G(z)): 0.3732 / 0.3260\n",
"[96/100][165/391] Loss_D: 2.8785 Loss_G: 3.1114 D(x): 0.6623 D(G(z)): 0.4138 / 0.3778\n",
"[96/100][166/391] Loss_D: 3.1529 Loss_G: 3.7599 D(x): 0.6081 D(G(z)): 0.4894 / 0.3182\n",
"[96/100][167/391] Loss_D: 3.6755 Loss_G: 2.6995 D(x): 0.5682 D(G(z)): 0.5369 / 0.4391\n",
"[96/100][168/391] Loss_D: 2.9760 Loss_G: 2.7965 D(x): 0.6224 D(G(z)): 0.3900 / 0.3975\n",
"[96/100][169/391] Loss_D: 3.0226 Loss_G: 3.2739 D(x): 0.6228 D(G(z)): 0.4544 / 0.3612\n",
"[96/100][170/391] Loss_D: 2.5980 Loss_G: 2.5583 D(x): 0.7355 D(G(z)): 0.4052 / 0.4723\n",
"[96/100][171/391] Loss_D: 2.7512 Loss_G: 2.3243 D(x): 0.6377 D(G(z)): 0.3342 / 0.5077\n",
"[96/100][172/391] Loss_D: 3.0014 Loss_G: 3.0676 D(x): 0.6899 D(G(z)): 0.4775 / 0.3957\n",
"[96/100][173/391] Loss_D: 2.6574 Loss_G: 2.4360 D(x): 0.6567 D(G(z)): 0.3581 / 0.4872\n",
"[96/100][174/391] Loss_D: 2.5543 Loss_G: 1.9838 D(x): 0.6715 D(G(z)): 0.3954 / 0.5295\n",
"[96/100][175/391] Loss_D: 2.9342 Loss_G: 3.3871 D(x): 0.7091 D(G(z)): 0.4941 / 0.3567\n",
"[96/100][176/391] Loss_D: 2.7951 Loss_G: 2.0229 D(x): 0.6799 D(G(z)): 0.4029 / 0.5260\n",
"[96/100][177/391] Loss_D: 2.8456 Loss_G: 2.6151 D(x): 0.7039 D(G(z)): 0.4266 / 0.4499\n",
"[96/100][178/391] Loss_D: 2.6531 Loss_G: 2.7448 D(x): 0.6866 D(G(z)): 0.4292 / 0.4400\n",
"[96/100][179/391] Loss_D: 2.5850 Loss_G: 3.6489 D(x): 0.7134 D(G(z)): 0.4072 / 0.3273\n",
"[96/100][180/391] Loss_D: 2.9411 Loss_G: 1.8638 D(x): 0.7259 D(G(z)): 0.5035 / 0.5663\n",
"[96/100][181/391] Loss_D: 3.6471 Loss_G: 3.2875 D(x): 0.6917 D(G(z)): 0.4684 / 0.3667\n",
"[96/100][182/391] Loss_D: 2.5303 Loss_G: 2.9102 D(x): 0.7030 D(G(z)): 0.4045 / 0.4263\n",
"[96/100][183/391] Loss_D: 2.7487 Loss_G: 2.4877 D(x): 0.7086 D(G(z)): 0.4774 / 0.4791\n",
"[96/100][184/391] Loss_D: 2.7290 Loss_G: 3.0912 D(x): 0.6102 D(G(z)): 0.2868 / 0.3972\n",
"[96/100][185/391] Loss_D: 3.1024 Loss_G: 2.8122 D(x): 0.5939 D(G(z)): 0.4121 / 0.4188\n",
"[96/100][186/391] Loss_D: 2.6257 Loss_G: 2.5914 D(x): 0.6662 D(G(z)): 0.3531 / 0.4613\n",
"[96/100][187/391] Loss_D: 3.2034 Loss_G: 2.8762 D(x): 0.5319 D(G(z)): 0.3694 / 0.4041\n",
"[96/100][188/391] Loss_D: 2.4977 Loss_G: 2.8902 D(x): 0.7601 D(G(z)): 0.4837 / 0.4117\n",
"[96/100][189/391] Loss_D: 2.2766 Loss_G: 2.4758 D(x): 0.7587 D(G(z)): 0.3966 / 0.4737\n",
"[96/100][190/391] Loss_D: 2.6213 Loss_G: 1.9670 D(x): 0.6649 D(G(z)): 0.4007 / 0.5568\n",
"[96/100][191/391] Loss_D: 3.4803 Loss_G: 3.0378 D(x): 0.6185 D(G(z)): 0.4915 / 0.4073\n",
"[96/100][192/391] Loss_D: 2.2860 Loss_G: 2.6845 D(x): 0.7803 D(G(z)): 0.4054 / 0.4440\n",
"[96/100][193/391] Loss_D: 3.0621 Loss_G: 3.1165 D(x): 0.5650 D(G(z)): 0.4120 / 0.3850\n",
"[96/100][194/391] Loss_D: 3.0307 Loss_G: 2.6098 D(x): 0.6960 D(G(z)): 0.4908 / 0.4434\n",
"[96/100][195/391] Loss_D: 3.2526 Loss_G: 4.2813 D(x): 0.5807 D(G(z)): 0.4436 / 0.2647\n",
"[96/100][196/391] Loss_D: 2.8206 Loss_G: 2.6725 D(x): 0.6235 D(G(z)): 0.4373 / 0.4355\n",
"[96/100][197/391] Loss_D: 3.0821 Loss_G: 2.6760 D(x): 0.6559 D(G(z)): 0.4517 / 0.4293\n",
"[96/100][198/391] Loss_D: 3.1945 Loss_G: 3.3557 D(x): 0.6600 D(G(z)): 0.5277 / 0.3758\n",
"[96/100][199/391] Loss_D: 2.5032 Loss_G: 2.4244 D(x): 0.6798 D(G(z)): 0.3791 / 0.4736\n",
"[96/100][200/391] Loss_D: 2.6305 Loss_G: 2.9801 D(x): 0.7322 D(G(z)): 0.4433 / 0.4119\n",
"[96/100][201/391] Loss_D: 3.1219 Loss_G: 2.9059 D(x): 0.6299 D(G(z)): 0.4265 / 0.4185\n",
"[96/100][202/391] Loss_D: 2.9043 Loss_G: 3.6723 D(x): 0.6369 D(G(z)): 0.4390 / 0.3375\n",
"[96/100][203/391] Loss_D: 2.7281 Loss_G: 3.3078 D(x): 0.7074 D(G(z)): 0.4187 / 0.3623\n",
"[96/100][204/391] Loss_D: 2.5555 Loss_G: 2.3689 D(x): 0.7090 D(G(z)): 0.4405 / 0.4685\n",
"[96/100][205/391] Loss_D: 2.5384 Loss_G: 2.0971 D(x): 0.6612 D(G(z)): 0.3620 / 0.5141\n",
"[96/100][206/391] Loss_D: 3.0007 Loss_G: 4.1088 D(x): 0.5775 D(G(z)): 0.3669 / 0.2861\n",
"[96/100][207/391] Loss_D: 3.1549 Loss_G: 2.6500 D(x): 0.5594 D(G(z)): 0.3792 / 0.4218\n",
"[96/100][208/391] Loss_D: 2.5328 Loss_G: 2.2663 D(x): 0.6783 D(G(z)): 0.3914 / 0.4995\n",
"[96/100][209/391] Loss_D: 2.4176 Loss_G: 2.1536 D(x): 0.7101 D(G(z)): 0.3755 / 0.5369\n",
"[96/100][210/391] Loss_D: 2.9729 Loss_G: 1.8674 D(x): 0.7243 D(G(z)): 0.5065 / 0.5717\n",
"[96/100][211/391] Loss_D: 3.5608 Loss_G: 2.4602 D(x): 0.6867 D(G(z)): 0.4587 / 0.4799\n",
"[96/100][212/391] Loss_D: 3.2369 Loss_G: 3.1928 D(x): 0.7237 D(G(z)): 0.5287 / 0.3928\n",
"[96/100][213/391] Loss_D: 2.9213 Loss_G: 2.7484 D(x): 0.6730 D(G(z)): 0.4707 / 0.4394\n",
"[96/100][214/391] Loss_D: 2.2714 Loss_G: 2.9621 D(x): 0.6619 D(G(z)): 0.3099 / 0.4025\n",
"[96/100][215/391] Loss_D: 2.5771 Loss_G: 3.2058 D(x): 0.7322 D(G(z)): 0.3817 / 0.3849\n",
"[96/100][216/391] Loss_D: 3.1079 Loss_G: 3.1697 D(x): 0.6017 D(G(z)): 0.4438 / 0.3863\n",
"[96/100][217/391] Loss_D: 2.5997 Loss_G: 2.8440 D(x): 0.6766 D(G(z)): 0.3681 / 0.4148\n",
"[96/100][218/391] Loss_D: 2.3448 Loss_G: 3.1094 D(x): 0.7121 D(G(z)): 0.3649 / 0.3834\n",
"[96/100][219/391] Loss_D: 3.2059 Loss_G: 2.5410 D(x): 0.6952 D(G(z)): 0.5418 / 0.4611\n",
"[96/100][220/391] Loss_D: 3.0083 Loss_G: 3.1569 D(x): 0.7150 D(G(z)): 0.5088 / 0.3838\n",
"[96/100][221/391] Loss_D: 1.9894 Loss_G: 3.7573 D(x): 0.8044 D(G(z)): 0.2909 / 0.3253\n",
"[96/100][222/391] Loss_D: 2.4824 Loss_G: 3.9227 D(x): 0.7255 D(G(z)): 0.3994 / 0.3160\n",
"[96/100][223/391] Loss_D: 2.9840 Loss_G: 2.6001 D(x): 0.6837 D(G(z)): 0.4878 / 0.4689\n",
"[96/100][224/391] Loss_D: 2.3945 Loss_G: 3.0673 D(x): 0.6717 D(G(z)): 0.3323 / 0.4074\n",
"[96/100][225/391] Loss_D: 3.7851 Loss_G: 3.6141 D(x): 0.5543 D(G(z)): 0.5230 / 0.3370\n",
"[96/100][226/391] Loss_D: 2.8362 Loss_G: 3.3941 D(x): 0.6875 D(G(z)): 0.4785 / 0.3595\n",
"[96/100][227/391] Loss_D: 2.6338 Loss_G: 3.2803 D(x): 0.6650 D(G(z)): 0.3617 / 0.3564\n",
"[96/100][228/391] Loss_D: 3.8340 Loss_G: 3.2616 D(x): 0.4794 D(G(z)): 0.4200 / 0.3677\n",
"[96/100][229/391] Loss_D: 2.8929 Loss_G: 2.7228 D(x): 0.6154 D(G(z)): 0.4013 / 0.4404\n",
"[96/100][230/391] Loss_D: 2.8610 Loss_G: 3.2661 D(x): 0.6439 D(G(z)): 0.4811 / 0.3778\n",
"[96/100][231/391] Loss_D: 3.6485 Loss_G: 4.1322 D(x): 0.7640 D(G(z)): 0.6159 / 0.2912\n",
"[96/100][232/391] Loss_D: 2.8772 Loss_G: 2.0037 D(x): 0.6186 D(G(z)): 0.3823 / 0.5505\n",
"[96/100][233/391] Loss_D: 3.2271 Loss_G: 3.1973 D(x): 0.5171 D(G(z)): 0.3285 / 0.3779\n",
"[96/100][234/391] Loss_D: 2.8115 Loss_G: 2.7091 D(x): 0.6318 D(G(z)): 0.4153 / 0.4426\n",
"[96/100][235/391] Loss_D: 2.6589 Loss_G: 2.1747 D(x): 0.6423 D(G(z)): 0.3861 / 0.5157\n",
"[96/100][236/391] Loss_D: 3.2817 Loss_G: 2.0416 D(x): 0.6956 D(G(z)): 0.5578 / 0.5287\n",
"[96/100][237/391] Loss_D: 2.5440 Loss_G: 3.0468 D(x): 0.7469 D(G(z)): 0.3760 / 0.3932\n",
"[96/100][238/391] Loss_D: 2.6540 Loss_G: 2.9021 D(x): 0.6934 D(G(z)): 0.4596 / 0.4190\n",
"[96/100][239/391] Loss_D: 2.6968 Loss_G: 4.1132 D(x): 0.7008 D(G(z)): 0.4481 / 0.2961\n",
"[96/100][240/391] Loss_D: 2.5873 Loss_G: 3.7177 D(x): 0.6529 D(G(z)): 0.3268 / 0.3349\n",
"[96/100][241/391] Loss_D: 3.9497 Loss_G: 2.4932 D(x): 0.7135 D(G(z)): 0.5157 / 0.4802\n",
"[96/100][242/391] Loss_D: 2.7378 Loss_G: 2.4552 D(x): 0.7177 D(G(z)): 0.4599 / 0.4812\n",
"[96/100][243/391] Loss_D: 2.4646 Loss_G: 3.3918 D(x): 0.7534 D(G(z)): 0.3947 / 0.3754\n",
"[96/100][244/391] Loss_D: 2.6506 Loss_G: 2.6525 D(x): 0.6363 D(G(z)): 0.3659 / 0.4585\n",
"[96/100][245/391] Loss_D: 2.7687 Loss_G: 3.6679 D(x): 0.7071 D(G(z)): 0.4499 / 0.3433\n",
"[96/100][246/391] Loss_D: 2.9367 Loss_G: 3.1492 D(x): 0.6776 D(G(z)): 0.4405 / 0.3867\n",
"[96/100][247/391] Loss_D: 2.6420 Loss_G: 2.8695 D(x): 0.7115 D(G(z)): 0.4082 / 0.4084\n",
"[96/100][248/391] Loss_D: 3.0168 Loss_G: 2.4661 D(x): 0.5872 D(G(z)): 0.3575 / 0.4875\n",
"[96/100][249/391] Loss_D: 2.5269 Loss_G: 3.5831 D(x): 0.6596 D(G(z)): 0.3036 / 0.3428\n",
"[96/100][250/391] Loss_D: 2.5033 Loss_G: 2.0915 D(x): 0.7624 D(G(z)): 0.3667 / 0.5263\n",
"[96/100][251/391] Loss_D: 2.6352 Loss_G: 2.4782 D(x): 0.6732 D(G(z)): 0.3508 / 0.4847\n",
"[96/100][252/391] Loss_D: 3.2685 Loss_G: 3.1675 D(x): 0.5126 D(G(z)): 0.3192 / 0.3776\n",
"[96/100][253/391] Loss_D: 3.0432 Loss_G: 2.6658 D(x): 0.7288 D(G(z)): 0.5125 / 0.4587\n",
"[96/100][254/391] Loss_D: 3.0188 Loss_G: 3.0740 D(x): 0.6860 D(G(z)): 0.5371 / 0.3883\n",
"[96/100][255/391] Loss_D: 2.6734 Loss_G: 2.3748 D(x): 0.7040 D(G(z)): 0.3635 / 0.4816\n",
"[96/100][256/391] Loss_D: 2.5972 Loss_G: 3.0203 D(x): 0.7451 D(G(z)): 0.4359 / 0.4007\n",
"[96/100][257/391] Loss_D: 3.4662 Loss_G: 3.3076 D(x): 0.5921 D(G(z)): 0.4706 / 0.3705\n",
"[96/100][258/391] Loss_D: 2.5347 Loss_G: 3.3909 D(x): 0.7264 D(G(z)): 0.4192 / 0.3797\n",
"[96/100][259/391] Loss_D: 2.9681 Loss_G: 3.2699 D(x): 0.6726 D(G(z)): 0.4702 / 0.3651\n",
"[96/100][260/391] Loss_D: 2.5197 Loss_G: 3.4156 D(x): 0.6890 D(G(z)): 0.3640 / 0.3686\n",
"[96/100][261/391] Loss_D: 2.8707 Loss_G: 2.1397 D(x): 0.6623 D(G(z)): 0.4080 / 0.5168\n",
"[96/100][262/391] Loss_D: 2.7100 Loss_G: 2.7851 D(x): 0.6981 D(G(z)): 0.4073 / 0.4333\n",
"[96/100][263/391] Loss_D: 3.2434 Loss_G: 2.5226 D(x): 0.6122 D(G(z)): 0.4030 / 0.4642\n",
"[96/100][264/391] Loss_D: 2.5869 Loss_G: 3.0155 D(x): 0.6253 D(G(z)): 0.3530 / 0.3938\n",
"[96/100][265/391] Loss_D: 2.5728 Loss_G: 2.2714 D(x): 0.6449 D(G(z)): 0.2921 / 0.4954\n",
"[96/100][266/391] Loss_D: 3.2823 Loss_G: 2.8873 D(x): 0.7286 D(G(z)): 0.5445 / 0.4275\n",
"[96/100][267/391] Loss_D: 2.6652 Loss_G: 2.2459 D(x): 0.6952 D(G(z)): 0.4125 / 0.4899\n",
"[96/100][268/391] Loss_D: 2.6304 Loss_G: 2.0655 D(x): 0.7041 D(G(z)): 0.4660 / 0.5235\n",
"[96/100][269/391] Loss_D: 2.8952 Loss_G: 2.8655 D(x): 0.6560 D(G(z)): 0.4181 / 0.4057\n",
"[96/100][270/391] Loss_D: 2.4494 Loss_G: 2.5186 D(x): 0.6859 D(G(z)): 0.2862 / 0.4696\n",
"[96/100][271/391] Loss_D: 3.7515 Loss_G: 2.5398 D(x): 0.7181 D(G(z)): 0.5017 / 0.4712\n",
"[96/100][272/391] Loss_D: 2.5911 Loss_G: 2.7556 D(x): 0.6393 D(G(z)): 0.3034 / 0.4411\n",
"[96/100][273/391] Loss_D: 2.9204 Loss_G: 2.7959 D(x): 0.7004 D(G(z)): 0.4929 / 0.4326\n",
"[96/100][274/391] Loss_D: 2.4917 Loss_G: 2.7593 D(x): 0.7575 D(G(z)): 0.4025 / 0.4330\n",
"[96/100][275/391] Loss_D: 2.6605 Loss_G: 2.8783 D(x): 0.6692 D(G(z)): 0.3647 / 0.4262\n",
"[96/100][276/391] Loss_D: 2.5209 Loss_G: 3.6200 D(x): 0.7255 D(G(z)): 0.3731 / 0.3329\n",
"[96/100][277/391] Loss_D: 2.8021 Loss_G: 2.8513 D(x): 0.7339 D(G(z)): 0.4547 / 0.4185\n",
"[96/100][278/391] Loss_D: 2.4716 Loss_G: 3.8008 D(x): 0.7255 D(G(z)): 0.4352 / 0.3120\n",
"[96/100][279/391] Loss_D: 3.0001 Loss_G: 3.9186 D(x): 0.6666 D(G(z)): 0.4794 / 0.3083\n",
"[96/100][280/391] Loss_D: 3.3778 Loss_G: 4.4020 D(x): 0.5727 D(G(z)): 0.4540 / 0.2840\n",
"[96/100][281/391] Loss_D: 2.5184 Loss_G: 3.0024 D(x): 0.6770 D(G(z)): 0.3483 / 0.4041\n",
"[96/100][282/391] Loss_D: 2.3330 Loss_G: 2.8453 D(x): 0.7238 D(G(z)): 0.3281 / 0.4158\n",
"[96/100][283/391] Loss_D: 3.5396 Loss_G: 2.6128 D(x): 0.4997 D(G(z)): 0.4040 / 0.4327\n",
"[96/100][284/391] Loss_D: 2.5650 Loss_G: 3.1413 D(x): 0.6686 D(G(z)): 0.4155 / 0.3797\n",
"[96/100][285/391] Loss_D: 2.7788 Loss_G: 2.6934 D(x): 0.6597 D(G(z)): 0.3867 / 0.4365\n",
"[96/100][286/391] Loss_D: 3.0780 Loss_G: 3.0055 D(x): 0.7131 D(G(z)): 0.5256 / 0.3878\n",
"[96/100][287/391] Loss_D: 2.4344 Loss_G: 3.0904 D(x): 0.7545 D(G(z)): 0.3299 / 0.3977\n",
"[96/100][288/391] Loss_D: 2.8614 Loss_G: 2.1960 D(x): 0.6201 D(G(z)): 0.4259 / 0.5070\n",
"[96/100][289/391] Loss_D: 3.0857 Loss_G: 2.5170 D(x): 0.7028 D(G(z)): 0.4767 / 0.4695\n",
"[96/100][290/391] Loss_D: 3.6511 Loss_G: 2.8154 D(x): 0.5293 D(G(z)): 0.4909 / 0.4145\n",
"[96/100][291/391] Loss_D: 2.7401 Loss_G: 2.0156 D(x): 0.7666 D(G(z)): 0.4548 / 0.5492\n",
"[96/100][292/391] Loss_D: 2.7536 Loss_G: 2.5188 D(x): 0.7089 D(G(z)): 0.4135 / 0.4687\n",
"[96/100][293/391] Loss_D: 3.0892 Loss_G: 3.3560 D(x): 0.5785 D(G(z)): 0.4016 / 0.3536\n",
"[96/100][294/391] Loss_D: 2.6245 Loss_G: 2.8594 D(x): 0.7009 D(G(z)): 0.4470 / 0.4253\n",
"[96/100][295/391] Loss_D: 2.6860 Loss_G: 2.1867 D(x): 0.7067 D(G(z)): 0.3902 / 0.5037\n",
"[96/100][296/391] Loss_D: 3.0308 Loss_G: 2.6997 D(x): 0.6241 D(G(z)): 0.4522 / 0.4234\n",
"[96/100][297/391] Loss_D: 2.9209 Loss_G: 2.5035 D(x): 0.6550 D(G(z)): 0.4044 / 0.4548\n",
"[96/100][298/391] Loss_D: 2.4837 Loss_G: 3.1364 D(x): 0.7458 D(G(z)): 0.4358 / 0.3844\n",
"[96/100][299/391] Loss_D: 2.5650 Loss_G: 3.2079 D(x): 0.7757 D(G(z)): 0.4305 / 0.3736\n",
"[96/100][300/391] Loss_D: 2.7495 Loss_G: 2.0117 D(x): 0.6550 D(G(z)): 0.3578 / 0.5501\n",
"[96/100][301/391] Loss_D: 3.5069 Loss_G: 3.5927 D(x): 0.6473 D(G(z)): 0.4457 / 0.3453\n",
"[96/100][302/391] Loss_D: 2.3808 Loss_G: 2.7427 D(x): 0.7168 D(G(z)): 0.3801 / 0.4270\n",
"[96/100][303/391] Loss_D: 2.5799 Loss_G: 3.1068 D(x): 0.7128 D(G(z)): 0.3968 / 0.3961\n",
"[96/100][304/391] Loss_D: 3.0899 Loss_G: 3.1768 D(x): 0.5870 D(G(z)): 0.4592 / 0.3811\n",
"[96/100][305/391] Loss_D: 2.7397 Loss_G: 2.6265 D(x): 0.6499 D(G(z)): 0.3706 / 0.4416\n",
"[96/100][306/391] Loss_D: 3.0603 Loss_G: 2.3313 D(x): 0.6485 D(G(z)): 0.4307 / 0.4851\n",
"[96/100][307/391] Loss_D: 2.4590 Loss_G: 2.1377 D(x): 0.6932 D(G(z)): 0.3298 / 0.5071\n",
"[96/100][308/391] Loss_D: 3.1187 Loss_G: 3.0387 D(x): 0.6137 D(G(z)): 0.4794 / 0.3967\n",
"[96/100][309/391] Loss_D: 3.3829 Loss_G: 2.7028 D(x): 0.5583 D(G(z)): 0.4810 / 0.4402\n",
"[96/100][310/391] Loss_D: 2.9174 Loss_G: 2.3727 D(x): 0.6450 D(G(z)): 0.3750 / 0.5014\n",
"[96/100][311/391] Loss_D: 2.5768 Loss_G: 2.4921 D(x): 0.7792 D(G(z)): 0.3928 / 0.4623\n",
"[96/100][312/391] Loss_D: 2.6652 Loss_G: 3.6497 D(x): 0.8345 D(G(z)): 0.4846 / 0.3272\n",
"[96/100][313/391] Loss_D: 2.5044 Loss_G: 2.7363 D(x): 0.6528 D(G(z)): 0.3257 / 0.4316\n",
"[96/100][314/391] Loss_D: 2.8117 Loss_G: 2.1184 D(x): 0.7091 D(G(z)): 0.4914 / 0.5068\n",
"[96/100][315/391] Loss_D: 2.7779 Loss_G: 3.1121 D(x): 0.6642 D(G(z)): 0.4239 / 0.4018\n",
"[96/100][316/391] Loss_D: 2.7350 Loss_G: 2.3230 D(x): 0.6725 D(G(z)): 0.3903 / 0.4621\n",
"[96/100][317/391] Loss_D: 2.6624 Loss_G: 2.2069 D(x): 0.7247 D(G(z)): 0.4049 / 0.5129\n",
"[96/100][318/391] Loss_D: 3.5690 Loss_G: 2.5282 D(x): 0.5631 D(G(z)): 0.5118 / 0.4588\n",
"[96/100][319/391] Loss_D: 2.3514 Loss_G: 2.5449 D(x): 0.7239 D(G(z)): 0.3629 / 0.4683\n",
"[96/100][320/391] Loss_D: 2.4583 Loss_G: 2.8023 D(x): 0.7557 D(G(z)): 0.3519 / 0.4235\n",
"[96/100][321/391] Loss_D: 2.8194 Loss_G: 2.8298 D(x): 0.7008 D(G(z)): 0.4114 / 0.4194\n",
"[96/100][322/391] Loss_D: 2.7991 Loss_G: 2.7732 D(x): 0.7510 D(G(z)): 0.5085 / 0.4367\n",
"[96/100][323/391] Loss_D: 2.9858 Loss_G: 3.3859 D(x): 0.6341 D(G(z)): 0.4010 / 0.3504\n",
"[96/100][324/391] Loss_D: 2.5606 Loss_G: 2.8816 D(x): 0.7106 D(G(z)): 0.4403 / 0.4135\n",
"[96/100][325/391] Loss_D: 2.8803 Loss_G: 3.3188 D(x): 0.6949 D(G(z)): 0.4301 / 0.3685\n",
"[96/100][326/391] Loss_D: 2.4645 Loss_G: 2.7113 D(x): 0.6630 D(G(z)): 0.3107 / 0.4317\n",
"[96/100][327/391] Loss_D: 2.9349 Loss_G: 2.6063 D(x): 0.5669 D(G(z)): 0.3298 / 0.4638\n",
"[96/100][328/391] Loss_D: 2.6489 Loss_G: 2.6250 D(x): 0.6615 D(G(z)): 0.3799 / 0.4589\n",
"[96/100][329/391] Loss_D: 2.7024 Loss_G: 3.6507 D(x): 0.6841 D(G(z)): 0.3869 / 0.3404\n",
"[96/100][330/391] Loss_D: 3.4211 Loss_G: 2.2175 D(x): 0.5855 D(G(z)): 0.4683 / 0.5014\n",
"[96/100][331/391] Loss_D: 3.4836 Loss_G: 1.7843 D(x): 0.6803 D(G(z)): 0.4970 / 0.5833\n",
"[96/100][332/391] Loss_D: 2.6219 Loss_G: 1.9796 D(x): 0.6867 D(G(z)): 0.4289 / 0.5549\n",
"[96/100][333/391] Loss_D: 3.3617 Loss_G: 2.7441 D(x): 0.6491 D(G(z)): 0.4956 / 0.4335\n",
"[96/100][334/391] Loss_D: 2.6168 Loss_G: 2.2435 D(x): 0.6623 D(G(z)): 0.4040 / 0.5185\n",
"[96/100][335/391] Loss_D: 2.6538 Loss_G: 3.6276 D(x): 0.8254 D(G(z)): 0.4746 / 0.3228\n",
"[96/100][336/391] Loss_D: 2.8358 Loss_G: 3.4083 D(x): 0.6626 D(G(z)): 0.3707 / 0.3478\n",
"[96/100][337/391] Loss_D: 2.7720 Loss_G: 2.5468 D(x): 0.6802 D(G(z)): 0.4013 / 0.4586\n",
"[96/100][338/391] Loss_D: 3.0179 Loss_G: 3.1461 D(x): 0.6165 D(G(z)): 0.4183 / 0.3820\n",
"[96/100][339/391] Loss_D: 3.2184 Loss_G: 2.3186 D(x): 0.6191 D(G(z)): 0.4876 / 0.5032\n",
"[96/100][340/391] Loss_D: 2.3191 Loss_G: 2.7918 D(x): 0.7242 D(G(z)): 0.3133 / 0.4324\n",
"[96/100][341/391] Loss_D: 2.6787 Loss_G: 2.6153 D(x): 0.6680 D(G(z)): 0.3855 / 0.4683\n",
"[96/100][342/391] Loss_D: 3.3216 Loss_G: 2.1983 D(x): 0.6190 D(G(z)): 0.5030 / 0.5033\n",
"[96/100][343/391] Loss_D: 3.2854 Loss_G: 2.9389 D(x): 0.6626 D(G(z)): 0.5111 / 0.4074\n",
"[96/100][344/391] Loss_D: 3.3747 Loss_G: 3.6416 D(x): 0.6074 D(G(z)): 0.5076 / 0.3332\n",
"[96/100][345/391] Loss_D: 3.0434 Loss_G: 3.6537 D(x): 0.7549 D(G(z)): 0.5236 / 0.3321\n",
"[96/100][346/391] Loss_D: 3.4320 Loss_G: 3.2091 D(x): 0.5447 D(G(z)): 0.4467 / 0.3717\n",
"[96/100][347/391] Loss_D: 2.9612 Loss_G: 2.7379 D(x): 0.6230 D(G(z)): 0.3462 / 0.4355\n",
"[96/100][348/391] Loss_D: 2.5230 Loss_G: 2.6988 D(x): 0.6648 D(G(z)): 0.3091 / 0.4458\n",
"[96/100][349/391] Loss_D: 2.9997 Loss_G: 2.2286 D(x): 0.6610 D(G(z)): 0.4881 / 0.4994\n",
"[96/100][350/391] Loss_D: 2.8033 Loss_G: 2.5513 D(x): 0.7517 D(G(z)): 0.4901 / 0.4622\n",
"[96/100][351/391] Loss_D: 2.6577 Loss_G: 2.7220 D(x): 0.6426 D(G(z)): 0.3360 / 0.4442\n",
"[96/100][352/391] Loss_D: 2.8661 Loss_G: 2.7932 D(x): 0.7255 D(G(z)): 0.5109 / 0.4357\n",
"[96/100][353/391] Loss_D: 2.6503 Loss_G: 2.3210 D(x): 0.6843 D(G(z)): 0.3747 / 0.4965\n",
"[96/100][354/391] Loss_D: 2.9037 Loss_G: 2.0917 D(x): 0.6555 D(G(z)): 0.4610 / 0.5332\n",
"[96/100][355/391] Loss_D: 2.5144 Loss_G: 3.7560 D(x): 0.7471 D(G(z)): 0.4285 / 0.3089\n",
"[96/100][356/391] Loss_D: 2.9313 Loss_G: 2.6926 D(x): 0.6180 D(G(z)): 0.3962 / 0.4337\n",
"[96/100][357/391] Loss_D: 2.2564 Loss_G: 2.9505 D(x): 0.7412 D(G(z)): 0.3042 / 0.4101\n",
"[96/100][358/391] Loss_D: 2.6753 Loss_G: 3.2301 D(x): 0.6680 D(G(z)): 0.4374 / 0.3741\n",
"[96/100][359/391] Loss_D: 2.7313 Loss_G: 3.1469 D(x): 0.6900 D(G(z)): 0.4531 / 0.3945\n",
"[96/100][360/391] Loss_D: 2.9946 Loss_G: 2.4951 D(x): 0.6373 D(G(z)): 0.4697 / 0.4680\n",
"[96/100][361/391] Loss_D: 3.4893 Loss_G: 2.3213 D(x): 0.6498 D(G(z)): 0.4324 / 0.4924\n",
"[96/100][362/391] Loss_D: 2.7745 Loss_G: 3.1450 D(x): 0.6709 D(G(z)): 0.3926 / 0.3773\n",
"[96/100][363/391] Loss_D: 2.6616 Loss_G: 3.4051 D(x): 0.6537 D(G(z)): 0.3532 / 0.3555\n",
"[96/100][364/391] Loss_D: 2.5460 Loss_G: 1.8313 D(x): 0.6879 D(G(z)): 0.3105 / 0.5693\n",
"[96/100][365/391] Loss_D: 2.7045 Loss_G: 2.1826 D(x): 0.7343 D(G(z)): 0.4442 / 0.5202\n",
"[96/100][366/391] Loss_D: 2.7071 Loss_G: 3.1769 D(x): 0.7028 D(G(z)): 0.4440 / 0.3878\n",
"[96/100][367/391] Loss_D: 2.9237 Loss_G: 2.8053 D(x): 0.6972 D(G(z)): 0.4590 / 0.4179\n",
"[96/100][368/391] Loss_D: 2.6619 Loss_G: 2.2145 D(x): 0.6508 D(G(z)): 0.3745 / 0.5317\n",
"[96/100][369/391] Loss_D: 2.5221 Loss_G: 3.5592 D(x): 0.6388 D(G(z)): 0.3005 / 0.3444\n",
"[96/100][370/391] Loss_D: 2.6691 Loss_G: 2.9589 D(x): 0.6409 D(G(z)): 0.3425 / 0.4081\n",
"[96/100][371/391] Loss_D: 2.9892 Loss_G: 3.0596 D(x): 0.7487 D(G(z)): 0.4916 / 0.4125\n",
"[96/100][372/391] Loss_D: 2.6651 Loss_G: 2.5857 D(x): 0.7025 D(G(z)): 0.4561 / 0.4616\n",
"[96/100][373/391] Loss_D: 2.9315 Loss_G: 2.7721 D(x): 0.6463 D(G(z)): 0.4570 / 0.4447\n",
"[96/100][374/391] Loss_D: 2.7895 Loss_G: 2.2913 D(x): 0.7133 D(G(z)): 0.4793 / 0.4966\n",
"[96/100][375/391] Loss_D: 3.9027 Loss_G: 3.3144 D(x): 0.6946 D(G(z)): 0.6431 / 0.3716\n",
"[96/100][376/391] Loss_D: 3.0573 Loss_G: 2.4281 D(x): 0.6345 D(G(z)): 0.4123 / 0.4830\n",
"[96/100][377/391] Loss_D: 2.5699 Loss_G: 2.9715 D(x): 0.6980 D(G(z)): 0.3338 / 0.3898\n",
"[96/100][378/391] Loss_D: 2.4585 Loss_G: 3.1099 D(x): 0.6630 D(G(z)): 0.3455 / 0.3959\n",
"[96/100][379/391] Loss_D: 2.5534 Loss_G: 2.8747 D(x): 0.6705 D(G(z)): 0.3648 / 0.4297\n",
"[96/100][380/391] Loss_D: 2.7040 Loss_G: 2.7523 D(x): 0.6239 D(G(z)): 0.2914 / 0.4337\n",
"[96/100][381/391] Loss_D: 2.5844 Loss_G: 3.5806 D(x): 0.7137 D(G(z)): 0.4010 / 0.3362\n",
"[96/100][382/391] Loss_D: 2.9411 Loss_G: 2.4511 D(x): 0.6650 D(G(z)): 0.4704 / 0.4855\n",
"[96/100][383/391] Loss_D: 2.9961 Loss_G: 3.5510 D(x): 0.7439 D(G(z)): 0.4869 / 0.3336\n",
"[96/100][384/391] Loss_D: 3.3755 Loss_G: 3.4504 D(x): 0.5886 D(G(z)): 0.5062 / 0.3489\n",
"[96/100][385/391] Loss_D: 2.4563 Loss_G: 2.5815 D(x): 0.6775 D(G(z)): 0.3312 / 0.4572\n",
"[96/100][386/391] Loss_D: 2.7716 Loss_G: 2.3302 D(x): 0.6860 D(G(z)): 0.4021 / 0.4848\n",
"[96/100][387/391] Loss_D: 2.8593 Loss_G: 3.1699 D(x): 0.6843 D(G(z)): 0.4559 / 0.3784\n",
"[96/100][388/391] Loss_D: 2.6565 Loss_G: 2.9164 D(x): 0.6461 D(G(z)): 0.3589 / 0.4127\n",
"[96/100][389/391] Loss_D: 2.6948 Loss_G: 2.1359 D(x): 0.6788 D(G(z)): 0.4074 / 0.5314\n",
"[96/100][390/391] Loss_D: 2.6910 Loss_G: 3.4074 D(x): 0.7269 D(G(z)): 0.4892 / 0.3704\n",
"[96/100][391/391] Loss_D: 3.7875 Loss_G: 2.1605 D(x): 0.6813 D(G(z)): 0.3320 / 0.5470\n",
"[97/100][1/391] Loss_D: 3.4891 Loss_G: 2.8315 D(x): 0.6691 D(G(z)): 0.4843 / 0.4201\n",
"[97/100][2/391] Loss_D: 2.2197 Loss_G: 2.8353 D(x): 0.6890 D(G(z)): 0.2943 / 0.4243\n",
"[97/100][3/391] Loss_D: 2.5214 Loss_G: 2.5806 D(x): 0.7557 D(G(z)): 0.4424 / 0.4628\n",
"[97/100][4/391] Loss_D: 2.4509 Loss_G: 3.7955 D(x): 0.6942 D(G(z)): 0.4006 / 0.3095\n",
"[97/100][5/391] Loss_D: 2.7852 Loss_G: 3.1511 D(x): 0.7646 D(G(z)): 0.5001 / 0.3935\n",
"[97/100][6/391] Loss_D: 2.5713 Loss_G: 3.0504 D(x): 0.7068 D(G(z)): 0.3334 / 0.3929\n",
"[97/100][7/391] Loss_D: 2.9555 Loss_G: 3.6184 D(x): 0.6574 D(G(z)): 0.4247 / 0.3317\n",
"[97/100][8/391] Loss_D: 2.6647 Loss_G: 2.6784 D(x): 0.6085 D(G(z)): 0.3281 / 0.4551\n",
"[97/100][9/391] Loss_D: 2.5082 Loss_G: 2.4854 D(x): 0.6941 D(G(z)): 0.3789 / 0.4731\n",
"[97/100][10/391] Loss_D: 2.5625 Loss_G: 2.2801 D(x): 0.7279 D(G(z)): 0.4030 / 0.5047\n",
"[97/100][11/391] Loss_D: 3.4706 Loss_G: 4.2451 D(x): 0.5993 D(G(z)): 0.4997 / 0.2855\n",
"[97/100][12/391] Loss_D: 2.7327 Loss_G: 2.5917 D(x): 0.6470 D(G(z)): 0.3996 / 0.4523\n",
"[97/100][13/391] Loss_D: 3.1777 Loss_G: 2.3498 D(x): 0.6962 D(G(z)): 0.5181 / 0.4772\n",
"[97/100][14/391] Loss_D: 2.6777 Loss_G: 2.1580 D(x): 0.7148 D(G(z)): 0.4552 / 0.5122\n",
"[97/100][15/391] Loss_D: 2.7295 Loss_G: 2.8201 D(x): 0.6289 D(G(z)): 0.4116 / 0.4241\n",
"[97/100][16/391] Loss_D: 3.0163 Loss_G: 3.5026 D(x): 0.6379 D(G(z)): 0.4012 / 0.3485\n",
"[97/100][17/391] Loss_D: 2.9315 Loss_G: 3.2002 D(x): 0.6214 D(G(z)): 0.4311 / 0.3707\n",
"[97/100][18/391] Loss_D: 2.6909 Loss_G: 2.3957 D(x): 0.6171 D(G(z)): 0.3273 / 0.4948\n",
"[97/100][19/391] Loss_D: 2.4832 Loss_G: 2.9403 D(x): 0.6948 D(G(z)): 0.3468 / 0.4053\n",
"[97/100][20/391] Loss_D: 2.5228 Loss_G: 2.6927 D(x): 0.7625 D(G(z)): 0.4454 / 0.4445\n",
"[97/100][21/391] Loss_D: 2.9539 Loss_G: 2.2191 D(x): 0.7667 D(G(z)): 0.5660 / 0.5080\n",
"[97/100][22/391] Loss_D: 3.0775 Loss_G: 3.0175 D(x): 0.6543 D(G(z)): 0.4430 / 0.4080\n",
"[97/100][23/391] Loss_D: 2.3823 Loss_G: 3.7178 D(x): 0.7657 D(G(z)): 0.3782 / 0.3417\n",
"[97/100][24/391] Loss_D: 2.5556 Loss_G: 3.6205 D(x): 0.7111 D(G(z)): 0.3839 / 0.3369\n",
"[97/100][25/391] Loss_D: 3.2769 Loss_G: 3.4686 D(x): 0.6093 D(G(z)): 0.4904 / 0.3447\n",
"[97/100][26/391] Loss_D: 2.4390 Loss_G: 3.4831 D(x): 0.6704 D(G(z)): 0.2969 / 0.3376\n",
"[97/100][27/391] Loss_D: 3.2363 Loss_G: 3.0735 D(x): 0.5578 D(G(z)): 0.3545 / 0.3703\n",
"[97/100][28/391] Loss_D: 2.2263 Loss_G: 2.9060 D(x): 0.7535 D(G(z)): 0.3620 / 0.4154\n",
"[97/100][29/391] Loss_D: 2.6017 Loss_G: 3.9758 D(x): 0.7247 D(G(z)): 0.4172 / 0.3059\n",
"[97/100][30/391] Loss_D: 2.7837 Loss_G: 2.6783 D(x): 0.6766 D(G(z)): 0.4936 / 0.4435\n",
"[97/100][31/391] Loss_D: 3.6416 Loss_G: 2.9946 D(x): 0.6260 D(G(z)): 0.3777 / 0.4156\n",
"[97/100][32/391] Loss_D: 2.5929 Loss_G: 3.0076 D(x): 0.7108 D(G(z)): 0.3893 / 0.3917\n",
"[97/100][33/391] Loss_D: 3.3202 Loss_G: 3.0829 D(x): 0.5992 D(G(z)): 0.4798 / 0.3932\n",
"[97/100][34/391] Loss_D: 3.0549 Loss_G: 3.7140 D(x): 0.6888 D(G(z)): 0.5209 / 0.3376\n",
"[97/100][35/391] Loss_D: 2.7680 Loss_G: 3.3193 D(x): 0.7042 D(G(z)): 0.4621 / 0.3667\n",
"[97/100][36/391] Loss_D: 3.4676 Loss_G: 3.6234 D(x): 0.5191 D(G(z)): 0.3903 / 0.3192\n",
"[97/100][37/391] Loss_D: 2.9659 Loss_G: 3.0016 D(x): 0.6153 D(G(z)): 0.3645 / 0.3974\n",
"[97/100][38/391] Loss_D: 2.4772 Loss_G: 3.3388 D(x): 0.6717 D(G(z)): 0.3601 / 0.3588\n",
"[97/100][39/391] Loss_D: 2.5150 Loss_G: 2.3454 D(x): 0.7380 D(G(z)): 0.3615 / 0.4837\n",
"[97/100][40/391] Loss_D: 2.7985 Loss_G: 2.7884 D(x): 0.7435 D(G(z)): 0.4449 / 0.4432\n",
"[97/100][41/391] Loss_D: 3.2763 Loss_G: 2.9073 D(x): 0.6728 D(G(z)): 0.5170 / 0.4037\n",
"[97/100][42/391] Loss_D: 2.5611 Loss_G: 2.8366 D(x): 0.7150 D(G(z)): 0.4038 / 0.4183\n",
"[97/100][43/391] Loss_D: 2.9755 Loss_G: 4.9817 D(x): 0.6550 D(G(z)): 0.4114 / 0.2319\n",
"[97/100][44/391] Loss_D: 2.7508 Loss_G: 2.6595 D(x): 0.6792 D(G(z)): 0.4635 / 0.4344\n",
"[97/100][45/391] Loss_D: 2.8496 Loss_G: 3.2659 D(x): 0.7084 D(G(z)): 0.4468 / 0.3675\n",
"[97/100][46/391] Loss_D: 3.1193 Loss_G: 3.7879 D(x): 0.6069 D(G(z)): 0.4101 / 0.2923\n",
"[97/100][47/391] Loss_D: 3.2583 Loss_G: 3.2864 D(x): 0.6000 D(G(z)): 0.4649 / 0.3667\n",
"[97/100][48/391] Loss_D: 2.4655 Loss_G: 2.6045 D(x): 0.7045 D(G(z)): 0.3997 / 0.4373\n",
"[97/100][49/391] Loss_D: 2.9872 Loss_G: 2.6622 D(x): 0.6113 D(G(z)): 0.4292 / 0.4543\n",
"[97/100][50/391] Loss_D: 2.5896 Loss_G: 1.9283 D(x): 0.7348 D(G(z)): 0.4285 / 0.5439\n",
"[97/100][51/391] Loss_D: 2.7279 Loss_G: 2.7704 D(x): 0.6751 D(G(z)): 0.4011 / 0.4266\n",
"[97/100][52/391] Loss_D: 2.7030 Loss_G: 3.3873 D(x): 0.6617 D(G(z)): 0.4018 / 0.3603\n",
"[97/100][53/391] Loss_D: 2.9925 Loss_G: 2.9809 D(x): 0.6927 D(G(z)): 0.5178 / 0.4071\n",
"[97/100][54/391] Loss_D: 2.3446 Loss_G: 2.6151 D(x): 0.7316 D(G(z)): 0.3536 / 0.4501\n",
"[97/100][55/391] Loss_D: 2.5562 Loss_G: 3.4751 D(x): 0.6900 D(G(z)): 0.3810 / 0.3453\n",
"[97/100][56/391] Loss_D: 3.0552 Loss_G: 2.4798 D(x): 0.5326 D(G(z)): 0.2635 / 0.4629\n",
"[97/100][57/391] Loss_D: 3.4627 Loss_G: 3.2171 D(x): 0.5999 D(G(z)): 0.5121 / 0.3742\n",
"[97/100][58/391] Loss_D: 3.7911 Loss_G: 2.5587 D(x): 0.4814 D(G(z)): 0.4145 / 0.4576\n",
"[97/100][59/391] Loss_D: 2.7643 Loss_G: 2.0853 D(x): 0.6307 D(G(z)): 0.3664 / 0.5290\n",
"[97/100][60/391] Loss_D: 2.9065 Loss_G: 2.2419 D(x): 0.7807 D(G(z)): 0.4957 / 0.5005\n",
"[97/100][61/391] Loss_D: 3.8609 Loss_G: 3.4602 D(x): 0.6258 D(G(z)): 0.6007 / 0.3604\n",
"[97/100][62/391] Loss_D: 2.7659 Loss_G: 2.8403 D(x): 0.7779 D(G(z)): 0.4940 / 0.4236\n",
"[97/100][63/391] Loss_D: 3.3208 Loss_G: 3.3579 D(x): 0.5556 D(G(z)): 0.4434 / 0.3605\n",
"[97/100][64/391] Loss_D: 3.2695 Loss_G: 3.2290 D(x): 0.6729 D(G(z)): 0.5698 / 0.3803\n",
"[97/100][65/391] Loss_D: 3.3971 Loss_G: 3.3106 D(x): 0.5541 D(G(z)): 0.4135 / 0.3800\n",
"[97/100][66/391] Loss_D: 3.0463 Loss_G: 2.8581 D(x): 0.5692 D(G(z)): 0.4225 / 0.4000\n",
"[97/100][67/391] Loss_D: 3.7001 Loss_G: 2.6995 D(x): 0.5128 D(G(z)): 0.3979 / 0.4304\n",
"[97/100][68/391] Loss_D: 2.6877 Loss_G: 3.5553 D(x): 0.6881 D(G(z)): 0.4732 / 0.3423\n",
"[97/100][69/391] Loss_D: 3.1033 Loss_G: 2.5627 D(x): 0.6072 D(G(z)): 0.4181 / 0.4434\n",
"[97/100][70/391] Loss_D: 2.8504 Loss_G: 3.2740 D(x): 0.7467 D(G(z)): 0.4466 / 0.3611\n",
"[97/100][71/391] Loss_D: 2.5451 Loss_G: 2.7024 D(x): 0.7109 D(G(z)): 0.3694 / 0.4488\n",
"[97/100][72/391] Loss_D: 2.7440 Loss_G: 3.1150 D(x): 0.7244 D(G(z)): 0.4866 / 0.3924\n",
"[97/100][73/391] Loss_D: 3.2436 Loss_G: 2.5166 D(x): 0.7037 D(G(z)): 0.5410 / 0.4556\n",
"[97/100][74/391] Loss_D: 2.8565 Loss_G: 2.9110 D(x): 0.6535 D(G(z)): 0.4393 / 0.4047\n",
"[97/100][75/391] Loss_D: 2.7189 Loss_G: 3.7887 D(x): 0.6793 D(G(z)): 0.4236 / 0.3160\n",
"[97/100][76/391] Loss_D: 3.1325 Loss_G: 3.5541 D(x): 0.5356 D(G(z)): 0.3638 / 0.3436\n",
"[97/100][77/391] Loss_D: 2.5609 Loss_G: 3.1564 D(x): 0.6910 D(G(z)): 0.2594 / 0.3836\n",
"[97/100][78/391] Loss_D: 2.4097 Loss_G: 2.8621 D(x): 0.7029 D(G(z)): 0.3389 / 0.4156\n",
"[97/100][79/391] Loss_D: 2.4587 Loss_G: 3.0300 D(x): 0.7527 D(G(z)): 0.4644 / 0.4135\n",
"[97/100][80/391] Loss_D: 2.8878 Loss_G: 2.7217 D(x): 0.7293 D(G(z)): 0.4406 / 0.4449\n",
"[97/100][81/391] Loss_D: 3.6350 Loss_G: 2.7688 D(x): 0.7122 D(G(z)): 0.6167 / 0.4329\n",
"[97/100][82/391] Loss_D: 2.8919 Loss_G: 2.6531 D(x): 0.7314 D(G(z)): 0.4590 / 0.4558\n",
"[97/100][83/391] Loss_D: 2.7480 Loss_G: 3.5245 D(x): 0.6697 D(G(z)): 0.3593 / 0.3398\n",
"[97/100][84/391] Loss_D: 2.6210 Loss_G: 2.8966 D(x): 0.6421 D(G(z)): 0.3696 / 0.4140\n",
"[97/100][85/391] Loss_D: 3.0282 Loss_G: 2.6067 D(x): 0.6383 D(G(z)): 0.4434 / 0.4537\n",
"[97/100][86/391] Loss_D: 2.5253 Loss_G: 3.1475 D(x): 0.6624 D(G(z)): 0.3108 / 0.3864\n",
"[97/100][87/391] Loss_D: 3.1637 Loss_G: 2.4972 D(x): 0.7018 D(G(z)): 0.4679 / 0.4593\n",
"[97/100][88/391] Loss_D: 2.7418 Loss_G: 2.0651 D(x): 0.6094 D(G(z)): 0.3351 / 0.5186\n",
"[97/100][89/391] Loss_D: 3.0790 Loss_G: 2.4086 D(x): 0.5642 D(G(z)): 0.3709 / 0.4789\n",
"[97/100][90/391] Loss_D: 2.4391 Loss_G: 2.7193 D(x): 0.7241 D(G(z)): 0.3602 / 0.4295\n",
"[97/100][91/391] Loss_D: 3.7098 Loss_G: 2.8774 D(x): 0.7628 D(G(z)): 0.5215 / 0.4255\n",
"[97/100][92/391] Loss_D: 3.0260 Loss_G: 2.6771 D(x): 0.5964 D(G(z)): 0.4392 / 0.4433\n",
"[97/100][93/391] Loss_D: 3.1394 Loss_G: 3.3712 D(x): 0.7273 D(G(z)): 0.5510 / 0.3721\n",
"[97/100][94/391] Loss_D: 2.9413 Loss_G: 4.0655 D(x): 0.6918 D(G(z)): 0.5263 / 0.2930\n",
"[97/100][95/391] Loss_D: 2.5246 Loss_G: 3.5090 D(x): 0.7601 D(G(z)): 0.3642 / 0.3433\n",
"[97/100][96/391] Loss_D: 2.6810 Loss_G: 2.3803 D(x): 0.6830 D(G(z)): 0.4091 / 0.5022\n",
"[97/100][97/391] Loss_D: 2.8264 Loss_G: 3.4386 D(x): 0.6261 D(G(z)): 0.3224 / 0.3446\n",
"[97/100][98/391] Loss_D: 3.8212 Loss_G: 3.1982 D(x): 0.4503 D(G(z)): 0.3122 / 0.3830\n",
"[97/100][99/391] Loss_D: 3.2131 Loss_G: 2.7025 D(x): 0.6388 D(G(z)): 0.4442 / 0.4480\n",
"[97/100][100/391] Loss_D: 2.8740 Loss_G: 2.6264 D(x): 0.7200 D(G(z)): 0.4797 / 0.4516\n",
"[97/100][101/391] Loss_D: 3.1283 Loss_G: 2.5833 D(x): 0.7778 D(G(z)): 0.5586 / 0.4570\n",
"[97/100][102/391] Loss_D: 2.7778 Loss_G: 2.9554 D(x): 0.7371 D(G(z)): 0.4395 / 0.4224\n",
"[97/100][103/391] Loss_D: 3.0098 Loss_G: 2.8539 D(x): 0.5739 D(G(z)): 0.3498 / 0.4224\n",
"[97/100][104/391] Loss_D: 2.9384 Loss_G: 2.8223 D(x): 0.6356 D(G(z)): 0.4512 / 0.4420\n",
"[97/100][105/391] Loss_D: 2.9652 Loss_G: 2.6036 D(x): 0.6298 D(G(z)): 0.3647 / 0.4259\n",
"[97/100][106/391] Loss_D: 2.7901 Loss_G: 2.4297 D(x): 0.6746 D(G(z)): 0.4407 / 0.4805\n",
"[97/100][107/391] Loss_D: 3.2763 Loss_G: 2.7728 D(x): 0.6116 D(G(z)): 0.4919 / 0.4190\n",
"[97/100][108/391] Loss_D: 3.2057 Loss_G: 2.8969 D(x): 0.6261 D(G(z)): 0.4760 / 0.4140\n",
"[97/100][109/391] Loss_D: 2.1425 Loss_G: 2.2844 D(x): 0.7545 D(G(z)): 0.2753 / 0.4971\n",
"[97/100][110/391] Loss_D: 3.4834 Loss_G: 2.6916 D(x): 0.5980 D(G(z)): 0.5209 / 0.4588\n",
"[97/100][111/391] Loss_D: 2.6700 Loss_G: 2.4426 D(x): 0.7063 D(G(z)): 0.4454 / 0.4731\n",
"[97/100][112/391] Loss_D: 2.5818 Loss_G: 2.1901 D(x): 0.6701 D(G(z)): 0.4046 / 0.5103\n",
"[97/100][113/391] Loss_D: 3.1200 Loss_G: 3.6219 D(x): 0.6853 D(G(z)): 0.4887 / 0.3269\n",
"[97/100][114/391] Loss_D: 3.0817 Loss_G: 1.7531 D(x): 0.6602 D(G(z)): 0.5065 / 0.5956\n",
"[97/100][115/391] Loss_D: 2.9744 Loss_G: 2.4392 D(x): 0.5877 D(G(z)): 0.3240 / 0.4620\n",
"[97/100][116/391] Loss_D: 2.3971 Loss_G: 3.7602 D(x): 0.6840 D(G(z)): 0.3086 / 0.3192\n",
"[97/100][117/391] Loss_D: 3.2513 Loss_G: 2.2193 D(x): 0.6735 D(G(z)): 0.4801 / 0.4984\n",
"[97/100][118/391] Loss_D: 2.6224 Loss_G: 3.3900 D(x): 0.6835 D(G(z)): 0.4468 / 0.3665\n",
"[97/100][119/391] Loss_D: 2.8805 Loss_G: 3.1028 D(x): 0.7991 D(G(z)): 0.5284 / 0.3850\n",
"[97/100][120/391] Loss_D: 2.9827 Loss_G: 3.2289 D(x): 0.7397 D(G(z)): 0.5431 / 0.3856\n",
"[97/100][121/391] Loss_D: 3.5301 Loss_G: 2.9226 D(x): 0.6808 D(G(z)): 0.4198 / 0.4239\n",
"[97/100][122/391] Loss_D: 2.9317 Loss_G: 2.7248 D(x): 0.5893 D(G(z)): 0.3100 / 0.4261\n",
"[97/100][123/391] Loss_D: 2.7829 Loss_G: 2.9558 D(x): 0.7061 D(G(z)): 0.4388 / 0.4210\n",
"[97/100][124/391] Loss_D: 2.7671 Loss_G: 2.9433 D(x): 0.6404 D(G(z)): 0.4020 / 0.4117\n",
"[97/100][125/391] Loss_D: 3.1728 Loss_G: 3.2391 D(x): 0.6916 D(G(z)): 0.5146 / 0.3736\n",
"[97/100][126/391] Loss_D: 2.6954 Loss_G: 3.6074 D(x): 0.6537 D(G(z)): 0.3891 / 0.3340\n",
"[97/100][127/391] Loss_D: 3.4340 Loss_G: 2.5653 D(x): 0.5476 D(G(z)): 0.4379 / 0.4566\n",
"[97/100][128/391] Loss_D: 2.4290 Loss_G: 3.1767 D(x): 0.7143 D(G(z)): 0.4406 / 0.3807\n",
"[97/100][129/391] Loss_D: 2.5875 Loss_G: 2.4178 D(x): 0.6730 D(G(z)): 0.3423 / 0.5004\n",
"[97/100][130/391] Loss_D: 2.4094 Loss_G: 3.1118 D(x): 0.7297 D(G(z)): 0.3412 / 0.4019\n",
"[97/100][131/391] Loss_D: 2.3117 Loss_G: 1.9655 D(x): 0.7265 D(G(z)): 0.3502 / 0.5559\n",
"[97/100][132/391] Loss_D: 3.0135 Loss_G: 3.5340 D(x): 0.6403 D(G(z)): 0.4608 / 0.3480\n",
"[97/100][133/391] Loss_D: 2.9475 Loss_G: 2.2667 D(x): 0.6400 D(G(z)): 0.4434 / 0.5058\n",
"[97/100][134/391] Loss_D: 3.3307 Loss_G: 2.9470 D(x): 0.6748 D(G(z)): 0.5592 / 0.4079\n",
"[97/100][135/391] Loss_D: 3.0764 Loss_G: 3.2706 D(x): 0.6757 D(G(z)): 0.5004 / 0.3604\n",
"[97/100][136/391] Loss_D: 3.2818 Loss_G: 3.9040 D(x): 0.5825 D(G(z)): 0.4391 / 0.3025\n",
"[97/100][137/391] Loss_D: 2.8360 Loss_G: 3.3505 D(x): 0.6346 D(G(z)): 0.3784 / 0.3585\n",
"[97/100][138/391] Loss_D: 2.6591 Loss_G: 2.8808 D(x): 0.6355 D(G(z)): 0.3520 / 0.4240\n",
"[97/100][139/391] Loss_D: 3.1895 Loss_G: 2.8254 D(x): 0.5868 D(G(z)): 0.3752 / 0.4350\n",
"[97/100][140/391] Loss_D: 2.1508 Loss_G: 3.4455 D(x): 0.7949 D(G(z)): 0.3710 / 0.3572\n",
"[97/100][141/391] Loss_D: 2.6614 Loss_G: 2.7914 D(x): 0.7740 D(G(z)): 0.4557 / 0.4219\n",
"[97/100][142/391] Loss_D: 2.7445 Loss_G: 2.5749 D(x): 0.6550 D(G(z)): 0.3811 / 0.4650\n",
"[97/100][143/391] Loss_D: 2.2873 Loss_G: 2.1999 D(x): 0.7911 D(G(z)): 0.2957 / 0.5002\n",
"[97/100][144/391] Loss_D: 2.5071 Loss_G: 2.8125 D(x): 0.6950 D(G(z)): 0.3888 / 0.4149\n",
"[97/100][145/391] Loss_D: 2.5746 Loss_G: 2.7775 D(x): 0.7310 D(G(z)): 0.3224 / 0.4182\n",
"[97/100][146/391] Loss_D: 2.7265 Loss_G: 3.1132 D(x): 0.7149 D(G(z)): 0.4186 / 0.3879\n",
"[97/100][147/391] Loss_D: 2.8323 Loss_G: 2.9353 D(x): 0.7543 D(G(z)): 0.4515 / 0.3967\n",
"[97/100][148/391] Loss_D: 2.9280 Loss_G: 2.4388 D(x): 0.6177 D(G(z)): 0.4504 / 0.4719\n",
"[97/100][149/391] Loss_D: 2.8781 Loss_G: 3.1831 D(x): 0.6353 D(G(z)): 0.3728 / 0.3782\n",
"[97/100][150/391] Loss_D: 3.6224 Loss_G: 2.5261 D(x): 0.6292 D(G(z)): 0.5718 / 0.4822\n",
"[97/100][151/391] Loss_D: 3.5759 Loss_G: 3.1014 D(x): 0.6201 D(G(z)): 0.4467 / 0.3872\n",
"[97/100][152/391] Loss_D: 2.9810 Loss_G: 2.6615 D(x): 0.6010 D(G(z)): 0.4453 / 0.4513\n",
"[97/100][153/391] Loss_D: 3.0186 Loss_G: 2.6549 D(x): 0.6013 D(G(z)): 0.4116 / 0.4398\n",
"[97/100][154/391] Loss_D: 2.4283 Loss_G: 3.3568 D(x): 0.7332 D(G(z)): 0.4283 / 0.3500\n",
"[97/100][155/391] Loss_D: 2.5166 Loss_G: 3.0649 D(x): 0.7922 D(G(z)): 0.3828 / 0.4007\n",
"[97/100][156/391] Loss_D: 2.5603 Loss_G: 2.6868 D(x): 0.6957 D(G(z)): 0.3602 / 0.4426\n",
"[97/100][157/391] Loss_D: 2.7355 Loss_G: 2.7962 D(x): 0.6430 D(G(z)): 0.3099 / 0.4027\n",
"[97/100][158/391] Loss_D: 2.5137 Loss_G: 2.5502 D(x): 0.7139 D(G(z)): 0.4080 / 0.4588\n",
"[97/100][159/391] Loss_D: 2.7979 Loss_G: 2.1940 D(x): 0.7044 D(G(z)): 0.4372 / 0.5080\n",
"[97/100][160/391] Loss_D: 2.9497 Loss_G: 2.7485 D(x): 0.6330 D(G(z)): 0.4454 / 0.4400\n",
"[97/100][161/391] Loss_D: 3.2981 Loss_G: 3.1730 D(x): 0.5985 D(G(z)): 0.4597 / 0.3956\n",
"[97/100][162/391] Loss_D: 2.4164 Loss_G: 2.4688 D(x): 0.6761 D(G(z)): 0.3329 / 0.4615\n",
"[97/100][163/391] Loss_D: 2.9721 Loss_G: 3.6238 D(x): 0.6685 D(G(z)): 0.4721 / 0.3268\n",
"[97/100][164/391] Loss_D: 2.9615 Loss_G: 3.5004 D(x): 0.7446 D(G(z)): 0.5482 / 0.3514\n",
"[97/100][165/391] Loss_D: 2.2438 Loss_G: 3.4339 D(x): 0.7696 D(G(z)): 0.3482 / 0.3589\n",
"[97/100][166/391] Loss_D: 2.4970 Loss_G: 3.5065 D(x): 0.7228 D(G(z)): 0.3927 / 0.3435\n",
"[97/100][167/391] Loss_D: 3.2992 Loss_G: 2.7092 D(x): 0.5567 D(G(z)): 0.4147 / 0.4370\n",
"[97/100][168/391] Loss_D: 2.8881 Loss_G: 3.2889 D(x): 0.6483 D(G(z)): 0.4441 / 0.3639\n",
"[97/100][169/391] Loss_D: 2.9585 Loss_G: 3.1244 D(x): 0.6343 D(G(z)): 0.4621 / 0.3816\n",
"[97/100][170/391] Loss_D: 3.1825 Loss_G: 3.7991 D(x): 0.5748 D(G(z)): 0.3857 / 0.3183\n",
"[97/100][171/391] Loss_D: 3.0671 Loss_G: 3.2827 D(x): 0.7388 D(G(z)): 0.5118 / 0.3676\n",
"[97/100][172/391] Loss_D: 2.8622 Loss_G: 3.3582 D(x): 0.7757 D(G(z)): 0.4981 / 0.3522\n",
"[97/100][173/391] Loss_D: 2.9935 Loss_G: 3.5336 D(x): 0.6914 D(G(z)): 0.4916 / 0.3417\n",
"[97/100][174/391] Loss_D: 2.7430 Loss_G: 4.9224 D(x): 0.5716 D(G(z)): 0.3052 / 0.2254\n",
"[97/100][175/391] Loss_D: 2.7324 Loss_G: 3.0087 D(x): 0.6120 D(G(z)): 0.3007 / 0.3987\n",
"[97/100][176/391] Loss_D: 2.6656 Loss_G: 2.7698 D(x): 0.7353 D(G(z)): 0.4063 / 0.4290\n",
"[97/100][177/391] Loss_D: 3.1138 Loss_G: 3.0052 D(x): 0.6177 D(G(z)): 0.4400 / 0.3997\n",
"[97/100][178/391] Loss_D: 3.1079 Loss_G: 3.3847 D(x): 0.6234 D(G(z)): 0.4658 / 0.3557\n",
"[97/100][179/391] Loss_D: 2.9814 Loss_G: 2.7432 D(x): 0.7143 D(G(z)): 0.4855 / 0.4415\n",
"[97/100][180/391] Loss_D: 3.1562 Loss_G: 2.7240 D(x): 0.7292 D(G(z)): 0.5316 / 0.4472\n",
"[97/100][181/391] Loss_D: 3.5883 Loss_G: 2.8031 D(x): 0.6457 D(G(z)): 0.4851 / 0.4200\n",
"[97/100][182/391] Loss_D: 2.8339 Loss_G: 3.3908 D(x): 0.6209 D(G(z)): 0.4097 / 0.3429\n",
"[97/100][183/391] Loss_D: 2.6870 Loss_G: 3.4931 D(x): 0.6318 D(G(z)): 0.3784 / 0.3477\n",
"[97/100][184/391] Loss_D: 2.7296 Loss_G: 3.2315 D(x): 0.6882 D(G(z)): 0.4247 / 0.3778\n",
"[97/100][185/391] Loss_D: 2.9462 Loss_G: 2.1876 D(x): 0.6164 D(G(z)): 0.3795 / 0.5114\n",
"[97/100][186/391] Loss_D: 2.3913 Loss_G: 2.9791 D(x): 0.7659 D(G(z)): 0.3791 / 0.3975\n",
"[97/100][187/391] Loss_D: 2.9776 Loss_G: 3.0874 D(x): 0.5633 D(G(z)): 0.3352 / 0.3687\n",
"[97/100][188/391] Loss_D: 2.1519 Loss_G: 3.4366 D(x): 0.7456 D(G(z)): 0.3110 / 0.3395\n",
"[97/100][189/391] Loss_D: 3.8034 Loss_G: 3.0886 D(x): 0.7018 D(G(z)): 0.6479 / 0.3935\n",
"[97/100][190/391] Loss_D: 2.6272 Loss_G: 3.5569 D(x): 0.7533 D(G(z)): 0.4280 / 0.3442\n",
"[97/100][191/391] Loss_D: 3.2093 Loss_G: 3.0174 D(x): 0.7296 D(G(z)): 0.5332 / 0.4066\n",
"[97/100][192/391] Loss_D: 2.5981 Loss_G: 3.3130 D(x): 0.6619 D(G(z)): 0.3941 / 0.3800\n",
"[97/100][193/391] Loss_D: 2.2671 Loss_G: 3.0761 D(x): 0.7564 D(G(z)): 0.3531 / 0.3837\n",
"[97/100][194/391] Loss_D: 2.6518 Loss_G: 3.8185 D(x): 0.6619 D(G(z)): 0.3961 / 0.3112\n",
"[97/100][195/391] Loss_D: 2.7323 Loss_G: 3.5106 D(x): 0.6362 D(G(z)): 0.3326 / 0.3400\n",
"[97/100][196/391] Loss_D: 2.5867 Loss_G: 3.1853 D(x): 0.6628 D(G(z)): 0.3741 / 0.3841\n",
"[97/100][197/391] Loss_D: 3.0048 Loss_G: 2.8999 D(x): 0.6890 D(G(z)): 0.4390 / 0.4134\n",
"[97/100][198/391] Loss_D: 3.7984 Loss_G: 2.4125 D(x): 0.5356 D(G(z)): 0.5002 / 0.4703\n",
"[97/100][199/391] Loss_D: 2.8324 Loss_G: 2.8226 D(x): 0.6224 D(G(z)): 0.3706 / 0.4374\n",
"[97/100][200/391] Loss_D: 3.5984 Loss_G: 1.5786 D(x): 0.5252 D(G(z)): 0.4726 / 0.6158\n",
"[97/100][201/391] Loss_D: 3.3510 Loss_G: 2.0606 D(x): 0.6902 D(G(z)): 0.5401 / 0.5255\n",
"[97/100][202/391] Loss_D: 2.7794 Loss_G: 3.0577 D(x): 0.6760 D(G(z)): 0.4411 / 0.3972\n",
"[97/100][203/391] Loss_D: 3.1203 Loss_G: 2.7286 D(x): 0.6021 D(G(z)): 0.4161 / 0.4267\n",
"[97/100][204/391] Loss_D: 3.0615 Loss_G: 3.9618 D(x): 0.6488 D(G(z)): 0.5174 / 0.3108\n",
"[97/100][205/391] Loss_D: 3.0539 Loss_G: 2.5295 D(x): 0.5565 D(G(z)): 0.4082 / 0.4529\n",
"[97/100][206/391] Loss_D: 2.8816 Loss_G: 3.6650 D(x): 0.6413 D(G(z)): 0.4201 / 0.3229\n",
"[97/100][207/391] Loss_D: 2.7967 Loss_G: 2.9392 D(x): 0.6315 D(G(z)): 0.3640 / 0.3969\n",
"[97/100][208/391] Loss_D: 2.2511 Loss_G: 2.2522 D(x): 0.7311 D(G(z)): 0.3833 / 0.4954\n",
"[97/100][209/391] Loss_D: 2.9932 Loss_G: 3.3093 D(x): 0.6903 D(G(z)): 0.4963 / 0.3832\n",
"[97/100][210/391] Loss_D: 2.8438 Loss_G: 2.4203 D(x): 0.5984 D(G(z)): 0.3205 / 0.4880\n",
"[97/100][211/391] Loss_D: 3.6555 Loss_G: 2.6478 D(x): 0.7212 D(G(z)): 0.5099 / 0.4702\n",
"[97/100][212/391] Loss_D: 3.6816 Loss_G: 2.6062 D(x): 0.6250 D(G(z)): 0.5570 / 0.4451\n",
"[97/100][213/391] Loss_D: 2.6134 Loss_G: 2.5594 D(x): 0.7109 D(G(z)): 0.3941 / 0.4619\n",
"[97/100][214/391] Loss_D: 3.5160 Loss_G: 2.5545 D(x): 0.6514 D(G(z)): 0.5830 / 0.4367\n",
"[97/100][215/391] Loss_D: 2.7050 Loss_G: 3.0378 D(x): 0.7063 D(G(z)): 0.3850 / 0.3880\n",
"[97/100][216/391] Loss_D: 2.6340 Loss_G: 2.5793 D(x): 0.7149 D(G(z)): 0.4057 / 0.4474\n",
"[97/100][217/391] Loss_D: 2.7571 Loss_G: 2.7920 D(x): 0.6578 D(G(z)): 0.4066 / 0.4264\n",
"[97/100][218/391] Loss_D: 2.7164 Loss_G: 3.1660 D(x): 0.6462 D(G(z)): 0.3443 / 0.3782\n",
"[97/100][219/391] Loss_D: 2.7843 Loss_G: 3.0993 D(x): 0.6279 D(G(z)): 0.3734 / 0.3987\n",
"[97/100][220/391] Loss_D: 2.7300 Loss_G: 2.8435 D(x): 0.7301 D(G(z)): 0.4742 / 0.4380\n",
"[97/100][221/391] Loss_D: 2.8074 Loss_G: 2.5626 D(x): 0.5754 D(G(z)): 0.3559 / 0.4633\n",
"[97/100][222/391] Loss_D: 2.9208 Loss_G: 3.1445 D(x): 0.6773 D(G(z)): 0.4695 / 0.3814\n",
"[97/100][223/391] Loss_D: 3.1535 Loss_G: 2.7785 D(x): 0.6691 D(G(z)): 0.5082 / 0.4252\n",
"[97/100][224/391] Loss_D: 2.6448 Loss_G: 2.6631 D(x): 0.6731 D(G(z)): 0.3988 / 0.4563\n",
"[97/100][225/391] Loss_D: 2.5262 Loss_G: 3.7051 D(x): 0.7213 D(G(z)): 0.3966 / 0.3216\n",
"[97/100][226/391] Loss_D: 3.1403 Loss_G: 3.1223 D(x): 0.6006 D(G(z)): 0.4634 / 0.3670\n",
"[97/100][227/391] Loss_D: 2.7557 Loss_G: 2.6153 D(x): 0.6816 D(G(z)): 0.4143 / 0.4589\n",
"[97/100][228/391] Loss_D: 2.4027 Loss_G: 3.4491 D(x): 0.7501 D(G(z)): 0.3723 / 0.3469\n",
"[97/100][229/391] Loss_D: 3.0314 Loss_G: 2.2967 D(x): 0.6134 D(G(z)): 0.4095 / 0.4975\n",
"[97/100][230/391] Loss_D: 2.3556 Loss_G: 3.2664 D(x): 0.6782 D(G(z)): 0.3568 / 0.3781\n",
"[97/100][231/391] Loss_D: 2.5729 Loss_G: 3.1569 D(x): 0.6678 D(G(z)): 0.3315 / 0.3713\n",
"[97/100][232/391] Loss_D: 2.4851 Loss_G: 1.8014 D(x): 0.7355 D(G(z)): 0.3832 / 0.5733\n",
"[97/100][233/391] Loss_D: 2.1002 Loss_G: 4.0253 D(x): 0.8078 D(G(z)): 0.3402 / 0.2986\n",
"[97/100][234/391] Loss_D: 2.5358 Loss_G: 3.1718 D(x): 0.6672 D(G(z)): 0.3971 / 0.3768\n",
"[97/100][235/391] Loss_D: 2.8189 Loss_G: 2.7683 D(x): 0.6936 D(G(z)): 0.4500 / 0.4319\n",
"[97/100][236/391] Loss_D: 2.7011 Loss_G: 4.0243 D(x): 0.6814 D(G(z)): 0.4182 / 0.3028\n",
"[97/100][237/391] Loss_D: 2.4816 Loss_G: 2.5576 D(x): 0.6827 D(G(z)): 0.3315 / 0.4677\n",
"[97/100][238/391] Loss_D: 2.6116 Loss_G: 3.0534 D(x): 0.6784 D(G(z)): 0.4059 / 0.3930\n",
"[97/100][239/391] Loss_D: 2.2031 Loss_G: 2.2462 D(x): 0.7585 D(G(z)): 0.3216 / 0.5105\n",
"[97/100][240/391] Loss_D: 3.1784 Loss_G: 2.4273 D(x): 0.6691 D(G(z)): 0.5172 / 0.4789\n",
"[97/100][241/391] Loss_D: 3.7749 Loss_G: 2.2187 D(x): 0.6832 D(G(z)): 0.3517 / 0.5124\n",
"[97/100][242/391] Loss_D: 2.3550 Loss_G: 2.7196 D(x): 0.7525 D(G(z)): 0.3916 / 0.4197\n",
"[97/100][243/391] Loss_D: 2.8613 Loss_G: 2.9666 D(x): 0.6361 D(G(z)): 0.3846 / 0.3934\n",
"[97/100][244/391] Loss_D: 2.5427 Loss_G: 3.0174 D(x): 0.7832 D(G(z)): 0.4844 / 0.3977\n",
"[97/100][245/391] Loss_D: 3.2515 Loss_G: 2.6742 D(x): 0.5654 D(G(z)): 0.4306 / 0.4387\n",
"[97/100][246/391] Loss_D: 2.5246 Loss_G: 2.9064 D(x): 0.6885 D(G(z)): 0.3268 / 0.4034\n",
"[97/100][247/391] Loss_D: 2.5816 Loss_G: 3.0645 D(x): 0.7539 D(G(z)): 0.4074 / 0.3876\n",
"[97/100][248/391] Loss_D: 2.9551 Loss_G: 3.0908 D(x): 0.6625 D(G(z)): 0.4819 / 0.3960\n",
"[97/100][249/391] Loss_D: 2.9806 Loss_G: 2.6349 D(x): 0.6954 D(G(z)): 0.5359 / 0.4595\n",
"[97/100][250/391] Loss_D: 2.8557 Loss_G: 3.7092 D(x): 0.6132 D(G(z)): 0.3287 / 0.3217\n",
"[97/100][251/391] Loss_D: 3.2643 Loss_G: 2.2577 D(x): 0.5774 D(G(z)): 0.4191 / 0.5117\n",
"[97/100][252/391] Loss_D: 2.0943 Loss_G: 4.2032 D(x): 0.8204 D(G(z)): 0.3035 / 0.2951\n",
"[97/100][253/391] Loss_D: 2.4297 Loss_G: 2.4426 D(x): 0.7596 D(G(z)): 0.3264 / 0.4743\n",
"[97/100][254/391] Loss_D: 3.2785 Loss_G: 3.0508 D(x): 0.6245 D(G(z)): 0.5064 / 0.3987\n",
"[97/100][255/391] Loss_D: 2.7824 Loss_G: 2.8301 D(x): 0.6289 D(G(z)): 0.2982 / 0.4171\n",
"[97/100][256/391] Loss_D: 2.9879 Loss_G: 2.3597 D(x): 0.6875 D(G(z)): 0.4648 / 0.4815\n",
"[97/100][257/391] Loss_D: 2.6951 Loss_G: 2.5887 D(x): 0.7693 D(G(z)): 0.3972 / 0.4409\n",
"[97/100][258/391] Loss_D: 2.5109 Loss_G: 2.7091 D(x): 0.6941 D(G(z)): 0.3606 / 0.4547\n",
"[97/100][259/391] Loss_D: 2.6527 Loss_G: 2.8861 D(x): 0.7007 D(G(z)): 0.4299 / 0.4201\n",
"[97/100][260/391] Loss_D: 2.6821 Loss_G: 3.6875 D(x): 0.7051 D(G(z)): 0.4202 / 0.3290\n",
"[97/100][261/391] Loss_D: 3.0834 Loss_G: 2.8068 D(x): 0.6208 D(G(z)): 0.4119 / 0.4427\n",
"[97/100][262/391] Loss_D: 2.8181 Loss_G: 2.5983 D(x): 0.7899 D(G(z)): 0.4929 / 0.4395\n",
"[97/100][263/391] Loss_D: 2.5473 Loss_G: 2.9650 D(x): 0.7132 D(G(z)): 0.3333 / 0.4034\n",
"[97/100][264/391] Loss_D: 2.0187 Loss_G: 2.1116 D(x): 0.7466 D(G(z)): 0.3276 / 0.5307\n",
"[97/100][265/391] Loss_D: 3.5146 Loss_G: 2.1994 D(x): 0.5208 D(G(z)): 0.4344 / 0.5312\n",
"[97/100][266/391] Loss_D: 3.1519 Loss_G: 3.2869 D(x): 0.7035 D(G(z)): 0.4710 / 0.3615\n",
"[97/100][267/391] Loss_D: 2.6141 Loss_G: 2.2184 D(x): 0.6572 D(G(z)): 0.3530 / 0.5145\n",
"[97/100][268/391] Loss_D: 2.6436 Loss_G: 2.9698 D(x): 0.6749 D(G(z)): 0.4835 / 0.4061\n",
"[97/100][269/391] Loss_D: 2.9564 Loss_G: 2.4671 D(x): 0.6224 D(G(z)): 0.4085 / 0.4911\n",
"[97/100][270/391] Loss_D: 3.1115 Loss_G: 2.6562 D(x): 0.6008 D(G(z)): 0.4214 / 0.4530\n",
"[97/100][271/391] Loss_D: 3.7213 Loss_G: 2.4958 D(x): 0.6652 D(G(z)): 0.4950 / 0.4749\n",
"[97/100][272/391] Loss_D: 2.7334 Loss_G: 2.5842 D(x): 0.6528 D(G(z)): 0.4243 / 0.4646\n",
"[97/100][273/391] Loss_D: 2.2479 Loss_G: 2.7764 D(x): 0.8672 D(G(z)): 0.4055 / 0.4224\n",
"[97/100][274/391] Loss_D: 2.9012 Loss_G: 2.8912 D(x): 0.6256 D(G(z)): 0.4106 / 0.4202\n",
"[97/100][275/391] Loss_D: 2.3723 Loss_G: 2.7511 D(x): 0.7673 D(G(z)): 0.3799 / 0.4305\n",
"[97/100][276/391] Loss_D: 2.9734 Loss_G: 3.2621 D(x): 0.6592 D(G(z)): 0.4440 / 0.3517\n",
"[97/100][277/391] Loss_D: 2.4828 Loss_G: 2.1409 D(x): 0.7485 D(G(z)): 0.3260 / 0.5252\n",
"[97/100][278/391] Loss_D: 3.0627 Loss_G: 3.0987 D(x): 0.6423 D(G(z)): 0.5020 / 0.3909\n",
"[97/100][279/391] Loss_D: 2.4596 Loss_G: 3.3350 D(x): 0.7226 D(G(z)): 0.3845 / 0.3733\n",
"[97/100][280/391] Loss_D: 2.5858 Loss_G: 2.4324 D(x): 0.6772 D(G(z)): 0.3633 / 0.5011\n",
"[97/100][281/391] Loss_D: 2.7102 Loss_G: 3.2897 D(x): 0.7164 D(G(z)): 0.4248 / 0.3700\n",
"[97/100][282/391] Loss_D: 3.0905 Loss_G: 2.7979 D(x): 0.6208 D(G(z)): 0.4089 / 0.4430\n",
"[97/100][283/391] Loss_D: 2.5634 Loss_G: 2.3851 D(x): 0.6817 D(G(z)): 0.3586 / 0.4948\n",
"[97/100][284/391] Loss_D: 2.7177 Loss_G: 2.5789 D(x): 0.6595 D(G(z)): 0.4172 / 0.4504\n",
"[97/100][285/391] Loss_D: 2.9738 Loss_G: 3.0049 D(x): 0.7075 D(G(z)): 0.5074 / 0.3932\n",
"[97/100][286/391] Loss_D: 2.7005 Loss_G: 3.5719 D(x): 0.6610 D(G(z)): 0.3847 / 0.3321\n",
"[97/100][287/391] Loss_D: 3.2351 Loss_G: 3.5849 D(x): 0.6241 D(G(z)): 0.4640 / 0.3394\n",
"[97/100][288/391] Loss_D: 2.7165 Loss_G: 2.6706 D(x): 0.6271 D(G(z)): 0.3788 / 0.4427\n",
"[97/100][289/391] Loss_D: 2.7475 Loss_G: 3.3928 D(x): 0.6642 D(G(z)): 0.3755 / 0.3709\n",
"[97/100][290/391] Loss_D: 2.2020 Loss_G: 1.4381 D(x): 0.7593 D(G(z)): 0.2876 / 0.6675\n",
"[97/100][291/391] Loss_D: 2.9043 Loss_G: 2.1321 D(x): 0.7090 D(G(z)): 0.4871 / 0.5420\n",
"[97/100][292/391] Loss_D: 2.5394 Loss_G: 2.1186 D(x): 0.7670 D(G(z)): 0.3871 / 0.5314\n",
"[97/100][293/391] Loss_D: 3.1151 Loss_G: 2.9316 D(x): 0.6451 D(G(z)): 0.4658 / 0.4079\n",
"[97/100][294/391] Loss_D: 2.6313 Loss_G: 2.9289 D(x): 0.7071 D(G(z)): 0.4580 / 0.4203\n",
"[97/100][295/391] Loss_D: 2.5281 Loss_G: 4.1988 D(x): 0.6963 D(G(z)): 0.3446 / 0.2812\n",
"[97/100][296/391] Loss_D: 2.9754 Loss_G: 3.4229 D(x): 0.6158 D(G(z)): 0.3636 / 0.3545\n",
"[97/100][297/391] Loss_D: 3.3141 Loss_G: 2.9815 D(x): 0.5780 D(G(z)): 0.4082 / 0.3926\n",
"[97/100][298/391] Loss_D: 2.3416 Loss_G: 3.2623 D(x): 0.8091 D(G(z)): 0.4918 / 0.3702\n",
"[97/100][299/391] Loss_D: 2.4157 Loss_G: 2.6713 D(x): 0.7152 D(G(z)): 0.2679 / 0.4320\n",
"[97/100][300/391] Loss_D: 3.0952 Loss_G: 2.7002 D(x): 0.7368 D(G(z)): 0.5152 / 0.4351\n",
"[97/100][301/391] Loss_D: 3.6439 Loss_G: 2.3560 D(x): 0.6624 D(G(z)): 0.3471 / 0.4878\n",
"[97/100][302/391] Loss_D: 2.1503 Loss_G: 3.0953 D(x): 0.7764 D(G(z)): 0.3610 / 0.3969\n",
"[97/100][303/391] Loss_D: 3.0221 Loss_G: 3.9288 D(x): 0.6509 D(G(z)): 0.4767 / 0.3025\n",
"[97/100][304/391] Loss_D: 2.7818 Loss_G: 2.6342 D(x): 0.6641 D(G(z)): 0.4608 / 0.4407\n",
"[97/100][305/391] Loss_D: 2.8824 Loss_G: 2.5595 D(x): 0.6567 D(G(z)): 0.4346 / 0.4596\n",
"[97/100][306/391] Loss_D: 3.5048 Loss_G: 2.9882 D(x): 0.5273 D(G(z)): 0.4124 / 0.4000\n",
"[97/100][307/391] Loss_D: 2.6871 Loss_G: 2.5764 D(x): 0.6900 D(G(z)): 0.4018 / 0.4498\n",
"[97/100][308/391] Loss_D: 2.6568 Loss_G: 3.1773 D(x): 0.7559 D(G(z)): 0.5402 / 0.3884\n",
"[97/100][309/391] Loss_D: 2.4744 Loss_G: 3.0083 D(x): 0.7052 D(G(z)): 0.4235 / 0.4128\n",
"[97/100][310/391] Loss_D: 2.6206 Loss_G: 3.1333 D(x): 0.6693 D(G(z)): 0.3021 / 0.3758\n",
"[97/100][311/391] Loss_D: 2.7725 Loss_G: 2.9063 D(x): 0.7124 D(G(z)): 0.4087 / 0.4083\n",
"[97/100][312/391] Loss_D: 2.8972 Loss_G: 2.5719 D(x): 0.6895 D(G(z)): 0.4732 / 0.4695\n",
"[97/100][313/391] Loss_D: 2.7954 Loss_G: 3.2906 D(x): 0.6224 D(G(z)): 0.3612 / 0.3634\n",
"[97/100][314/391] Loss_D: 2.5235 Loss_G: 3.0467 D(x): 0.6201 D(G(z)): 0.3303 / 0.3941\n",
"[97/100][315/391] Loss_D: 2.2920 Loss_G: 2.1388 D(x): 0.7310 D(G(z)): 0.3098 / 0.5178\n",
"[97/100][316/391] Loss_D: 2.8738 Loss_G: 2.4122 D(x): 0.7396 D(G(z)): 0.4315 / 0.4761\n",
"[97/100][317/391] Loss_D: 2.8736 Loss_G: 2.6283 D(x): 0.6771 D(G(z)): 0.4209 / 0.4452\n",
"[97/100][318/391] Loss_D: 2.4849 Loss_G: 3.0186 D(x): 0.6728 D(G(z)): 0.3928 / 0.3949\n",
"[97/100][319/391] Loss_D: 2.5206 Loss_G: 2.9556 D(x): 0.7382 D(G(z)): 0.4260 / 0.3980\n",
"[97/100][320/391] Loss_D: 3.8401 Loss_G: 3.4443 D(x): 0.6830 D(G(z)): 0.6309 / 0.3586\n",
"[97/100][321/391] Loss_D: 2.7520 Loss_G: 2.4752 D(x): 0.7190 D(G(z)): 0.4180 / 0.4806\n",
"[97/100][322/391] Loss_D: 2.4399 Loss_G: 3.5163 D(x): 0.7056 D(G(z)): 0.3353 / 0.3399\n",
"[97/100][323/391] Loss_D: 3.6074 Loss_G: 3.2487 D(x): 0.6207 D(G(z)): 0.5505 / 0.3811\n",
"[97/100][324/391] Loss_D: 3.0637 Loss_G: 2.6095 D(x): 0.5641 D(G(z)): 0.3623 / 0.4530\n",
"[97/100][325/391] Loss_D: 2.5354 Loss_G: 3.2469 D(x): 0.7600 D(G(z)): 0.3916 / 0.3607\n",
"[97/100][326/391] Loss_D: 2.3608 Loss_G: 2.5346 D(x): 0.6629 D(G(z)): 0.2690 / 0.4576\n",
"[97/100][327/391] Loss_D: 2.9516 Loss_G: 3.9979 D(x): 0.6079 D(G(z)): 0.4127 / 0.2995\n",
"[97/100][328/391] Loss_D: 2.8612 Loss_G: 2.7325 D(x): 0.6924 D(G(z)): 0.4908 / 0.4431\n",
"[97/100][329/391] Loss_D: 3.3714 Loss_G: 2.4460 D(x): 0.6436 D(G(z)): 0.5217 / 0.4680\n",
"[97/100][330/391] Loss_D: 2.7225 Loss_G: 3.1962 D(x): 0.6655 D(G(z)): 0.3690 / 0.3859\n",
"[97/100][331/391] Loss_D: 3.4824 Loss_G: 2.1036 D(x): 0.5766 D(G(z)): 0.4616 / 0.5172\n",
"[97/100][332/391] Loss_D: 3.3756 Loss_G: 2.8149 D(x): 0.6745 D(G(z)): 0.5744 / 0.4324\n",
"[97/100][333/391] Loss_D: 2.6181 Loss_G: 2.9918 D(x): 0.7148 D(G(z)): 0.3524 / 0.4010\n",
"[97/100][334/391] Loss_D: 2.5938 Loss_G: 2.6305 D(x): 0.7120 D(G(z)): 0.4571 / 0.4538\n",
"[97/100][335/391] Loss_D: 2.9535 Loss_G: 3.6845 D(x): 0.7262 D(G(z)): 0.4932 / 0.3286\n",
"[97/100][336/391] Loss_D: 3.2631 Loss_G: 3.4849 D(x): 0.5735 D(G(z)): 0.3973 / 0.3510\n",
"[97/100][337/391] Loss_D: 3.1916 Loss_G: 3.4281 D(x): 0.5991 D(G(z)): 0.4373 / 0.3636\n",
"[97/100][338/391] Loss_D: 3.1546 Loss_G: 2.6822 D(x): 0.6732 D(G(z)): 0.5057 / 0.4522\n",
"[97/100][339/391] Loss_D: 2.9560 Loss_G: 2.9133 D(x): 0.6501 D(G(z)): 0.4248 / 0.4280\n",
"[97/100][340/391] Loss_D: 2.6224 Loss_G: 2.5829 D(x): 0.6799 D(G(z)): 0.3487 / 0.4712\n",
"[97/100][341/391] Loss_D: 2.5450 Loss_G: 2.5275 D(x): 0.7033 D(G(z)): 0.4043 / 0.4632\n",
"[97/100][342/391] Loss_D: 2.7469 Loss_G: 3.1948 D(x): 0.6661 D(G(z)): 0.4237 / 0.3956\n",
"[97/100][343/391] Loss_D: 2.8506 Loss_G: 2.6567 D(x): 0.6769 D(G(z)): 0.3989 / 0.4414\n",
"[97/100][344/391] Loss_D: 3.1428 Loss_G: 3.0085 D(x): 0.6404 D(G(z)): 0.4874 / 0.4024\n",
"[97/100][345/391] Loss_D: 2.4578 Loss_G: 2.3031 D(x): 0.7147 D(G(z)): 0.3415 / 0.4887\n",
"[97/100][346/391] Loss_D: 3.6806 Loss_G: 4.1139 D(x): 0.5452 D(G(z)): 0.4792 / 0.2797\n",
"[97/100][347/391] Loss_D: 2.9535 Loss_G: 3.3652 D(x): 0.6055 D(G(z)): 0.3427 / 0.3546\n",
"[97/100][348/391] Loss_D: 2.6956 Loss_G: 2.1741 D(x): 0.7091 D(G(z)): 0.4744 / 0.5154\n",
"[97/100][349/391] Loss_D: 3.1146 Loss_G: 2.7430 D(x): 0.6907 D(G(z)): 0.5091 / 0.4287\n",
"[97/100][350/391] Loss_D: 3.5176 Loss_G: 3.1289 D(x): 0.6995 D(G(z)): 0.6057 / 0.3899\n",
"[97/100][351/391] Loss_D: 2.7902 Loss_G: 3.1852 D(x): 0.6706 D(G(z)): 0.4070 / 0.3720\n",
"[97/100][352/391] Loss_D: 2.7503 Loss_G: 3.1069 D(x): 0.5648 D(G(z)): 0.2950 / 0.3797\n",
"[97/100][353/391] Loss_D: 2.7838 Loss_G: 2.6575 D(x): 0.6161 D(G(z)): 0.3498 / 0.4354\n",
"[97/100][354/391] Loss_D: 2.2971 Loss_G: 2.9009 D(x): 0.7720 D(G(z)): 0.4354 / 0.4121\n",
"[97/100][355/391] Loss_D: 2.8922 Loss_G: 2.0594 D(x): 0.7683 D(G(z)): 0.5359 / 0.5279\n",
"[97/100][356/391] Loss_D: 2.9628 Loss_G: 3.3265 D(x): 0.5642 D(G(z)): 0.3684 / 0.3560\n",
"[97/100][357/391] Loss_D: 3.2371 Loss_G: 2.7453 D(x): 0.5777 D(G(z)): 0.4351 / 0.4338\n",
"[97/100][358/391] Loss_D: 2.4053 Loss_G: 2.8604 D(x): 0.7352 D(G(z)): 0.4380 / 0.4008\n",
"[97/100][359/391] Loss_D: 2.4285 Loss_G: 3.5410 D(x): 0.6866 D(G(z)): 0.3049 / 0.3475\n",
"[97/100][360/391] Loss_D: 2.6487 Loss_G: 3.3377 D(x): 0.7511 D(G(z)): 0.4441 / 0.3668\n",
"[97/100][361/391] Loss_D: 3.6010 Loss_G: 3.2257 D(x): 0.7109 D(G(z)): 0.4662 / 0.3816\n",
"[97/100][362/391] Loss_D: 3.1189 Loss_G: 2.6604 D(x): 0.6394 D(G(z)): 0.4649 / 0.4505\n",
"[97/100][363/391] Loss_D: 3.0490 Loss_G: 2.9662 D(x): 0.6660 D(G(z)): 0.5076 / 0.4108\n",
"[97/100][364/391] Loss_D: 2.5761 Loss_G: 2.7210 D(x): 0.6960 D(G(z)): 0.3403 / 0.4331\n",
"[97/100][365/391] Loss_D: 2.7034 Loss_G: 3.1245 D(x): 0.6650 D(G(z)): 0.4018 / 0.3879\n",
"[97/100][366/391] Loss_D: 3.0354 Loss_G: 3.6466 D(x): 0.6498 D(G(z)): 0.4841 / 0.3309\n",
"[97/100][367/391] Loss_D: 2.8304 Loss_G: 3.5693 D(x): 0.6341 D(G(z)): 0.3243 / 0.3348\n",
"[97/100][368/391] Loss_D: 2.7214 Loss_G: 3.7857 D(x): 0.6189 D(G(z)): 0.2886 / 0.3059\n",
"[97/100][369/391] Loss_D: 2.3504 Loss_G: 2.4366 D(x): 0.7065 D(G(z)): 0.3423 / 0.4811\n",
"[97/100][370/391] Loss_D: 2.7027 Loss_G: 2.4426 D(x): 0.7292 D(G(z)): 0.4656 / 0.4858\n",
"[97/100][371/391] Loss_D: 2.6871 Loss_G: 2.8417 D(x): 0.6926 D(G(z)): 0.4073 / 0.4290\n",
"[97/100][372/391] Loss_D: 3.0357 Loss_G: 3.5057 D(x): 0.6248 D(G(z)): 0.4644 / 0.3346\n",
"[97/100][373/391] Loss_D: 2.3462 Loss_G: 2.8579 D(x): 0.7877 D(G(z)): 0.3565 / 0.4216\n",
"[97/100][374/391] Loss_D: 3.0240 Loss_G: 3.1606 D(x): 0.7239 D(G(z)): 0.5184 / 0.3719\n",
"[97/100][375/391] Loss_D: 2.9673 Loss_G: 3.0355 D(x): 0.6416 D(G(z)): 0.4176 / 0.3930\n",
"[97/100][376/391] Loss_D: 3.2812 Loss_G: 3.0020 D(x): 0.5617 D(G(z)): 0.3751 / 0.4007\n",
"[97/100][377/391] Loss_D: 2.7186 Loss_G: 2.5917 D(x): 0.7208 D(G(z)): 0.4328 / 0.4617\n",
"[97/100][378/391] Loss_D: 3.0400 Loss_G: 2.7927 D(x): 0.6122 D(G(z)): 0.4819 / 0.4219\n",
"[97/100][379/391] Loss_D: 3.2135 Loss_G: 3.2237 D(x): 0.5897 D(G(z)): 0.4588 / 0.3780\n",
"[97/100][380/391] Loss_D: 3.6631 Loss_G: 2.1750 D(x): 0.5966 D(G(z)): 0.5690 / 0.5151\n",
"[97/100][381/391] Loss_D: 2.8004 Loss_G: 2.2665 D(x): 0.6661 D(G(z)): 0.4149 / 0.4903\n",
"[97/100][382/391] Loss_D: 3.3465 Loss_G: 2.7862 D(x): 0.6922 D(G(z)): 0.5741 / 0.4306\n",
"[97/100][383/391] Loss_D: 2.5785 Loss_G: 2.9088 D(x): 0.6573 D(G(z)): 0.3077 / 0.4228\n",
"[97/100][384/391] Loss_D: 2.6503 Loss_G: 3.0226 D(x): 0.6879 D(G(z)): 0.4724 / 0.4041\n",
"[97/100][385/391] Loss_D: 2.6568 Loss_G: 2.8848 D(x): 0.6966 D(G(z)): 0.3988 / 0.4264\n",
"[97/100][386/391] Loss_D: 3.0334 Loss_G: 2.9763 D(x): 0.6080 D(G(z)): 0.4154 / 0.3889\n",
"[97/100][387/391] Loss_D: 3.1101 Loss_G: 3.2696 D(x): 0.5518 D(G(z)): 0.3171 / 0.3661\n",
"[97/100][388/391] Loss_D: 2.9322 Loss_G: 2.6592 D(x): 0.6813 D(G(z)): 0.4844 / 0.4299\n",
"[97/100][389/391] Loss_D: 2.8522 Loss_G: 2.5044 D(x): 0.6898 D(G(z)): 0.4411 / 0.4549\n",
"[97/100][390/391] Loss_D: 3.5709 Loss_G: 2.8884 D(x): 0.5130 D(G(z)): 0.4763 / 0.4307\n",
"[97/100][391/391] Loss_D: 3.8946 Loss_G: 2.4195 D(x): 0.7825 D(G(z)): 0.4281 / 0.4887\n",
"[98/100][1/391] Loss_D: 3.5147 Loss_G: 3.2527 D(x): 0.6775 D(G(z)): 0.4278 / 0.3811\n",
"[98/100][2/391] Loss_D: 2.5060 Loss_G: 3.7494 D(x): 0.7666 D(G(z)): 0.4447 / 0.3297\n",
"[98/100][3/391] Loss_D: 2.5186 Loss_G: 3.1873 D(x): 0.7702 D(G(z)): 0.4365 / 0.3820\n",
"[98/100][4/391] Loss_D: 2.7498 Loss_G: 3.1887 D(x): 0.6698 D(G(z)): 0.4446 / 0.3600\n",
"[98/100][5/391] Loss_D: 2.7059 Loss_G: 3.3531 D(x): 0.6552 D(G(z)): 0.3688 / 0.3411\n",
"[98/100][6/391] Loss_D: 2.7330 Loss_G: 3.4523 D(x): 0.6934 D(G(z)): 0.3892 / 0.3392\n",
"[98/100][7/391] Loss_D: 3.0550 Loss_G: 2.8520 D(x): 0.6003 D(G(z)): 0.3418 / 0.4045\n",
"[98/100][8/391] Loss_D: 2.4951 Loss_G: 2.3983 D(x): 0.6731 D(G(z)): 0.4204 / 0.4760\n",
"[98/100][9/391] Loss_D: 2.3662 Loss_G: 2.9760 D(x): 0.7307 D(G(z)): 0.3864 / 0.4213\n",
"[98/100][10/391] Loss_D: 2.5012 Loss_G: 3.7412 D(x): 0.7201 D(G(z)): 0.4075 / 0.3281\n",
"[98/100][11/391] Loss_D: 2.4925 Loss_G: 2.5951 D(x): 0.7583 D(G(z)): 0.3600 / 0.4638\n",
"[98/100][12/391] Loss_D: 2.7217 Loss_G: 2.4895 D(x): 0.6786 D(G(z)): 0.4041 / 0.4697\n",
"[98/100][13/391] Loss_D: 2.2748 Loss_G: 3.3804 D(x): 0.7071 D(G(z)): 0.2396 / 0.3564\n",
"[98/100][14/391] Loss_D: 2.9531 Loss_G: 2.2923 D(x): 0.6256 D(G(z)): 0.4843 / 0.4847\n",
"[98/100][15/391] Loss_D: 2.2052 Loss_G: 2.8416 D(x): 0.7661 D(G(z)): 0.3451 / 0.4266\n",
"[98/100][16/391] Loss_D: 3.1177 Loss_G: 4.0972 D(x): 0.7478 D(G(z)): 0.4834 / 0.3011\n",
"[98/100][17/391] Loss_D: 3.1653 Loss_G: 4.0584 D(x): 0.5784 D(G(z)): 0.4396 / 0.2826\n",
"[98/100][18/391] Loss_D: 2.9446 Loss_G: 2.7789 D(x): 0.6164 D(G(z)): 0.4302 / 0.4469\n",
"[98/100][19/391] Loss_D: 2.3016 Loss_G: 2.5678 D(x): 0.7472 D(G(z)): 0.3584 / 0.4524\n",
"[98/100][20/391] Loss_D: 3.1815 Loss_G: 2.6648 D(x): 0.6835 D(G(z)): 0.5271 / 0.4495\n",
"[98/100][21/391] Loss_D: 3.4546 Loss_G: 2.4524 D(x): 0.5441 D(G(z)): 0.4794 / 0.4639\n",
"[98/100][22/391] Loss_D: 2.8785 Loss_G: 2.8203 D(x): 0.6275 D(G(z)): 0.3839 / 0.4374\n",
"[98/100][23/391] Loss_D: 3.2490 Loss_G: 2.4915 D(x): 0.5569 D(G(z)): 0.4238 / 0.4733\n",
"[98/100][24/391] Loss_D: 2.7671 Loss_G: 3.0299 D(x): 0.7335 D(G(z)): 0.4292 / 0.3921\n",
"[98/100][25/391] Loss_D: 2.2689 Loss_G: 2.2309 D(x): 0.7528 D(G(z)): 0.3105 / 0.5144\n",
"[98/100][26/391] Loss_D: 2.6430 Loss_G: 3.3925 D(x): 0.6791 D(G(z)): 0.3870 / 0.3610\n",
"[98/100][27/391] Loss_D: 2.8886 Loss_G: 4.2034 D(x): 0.7736 D(G(z)): 0.4603 / 0.2734\n",
"[98/100][28/391] Loss_D: 3.3703 Loss_G: 3.0394 D(x): 0.5218 D(G(z)): 0.3839 / 0.3990\n",
"[98/100][29/391] Loss_D: 2.8471 Loss_G: 2.8175 D(x): 0.6301 D(G(z)): 0.3791 / 0.4271\n",
"[98/100][30/391] Loss_D: 3.0768 Loss_G: 2.2327 D(x): 0.6020 D(G(z)): 0.4689 / 0.5234\n",
"[98/100][31/391] Loss_D: 3.6227 Loss_G: 2.6179 D(x): 0.6725 D(G(z)): 0.4362 / 0.4566\n",
"[98/100][32/391] Loss_D: 3.3229 Loss_G: 2.4031 D(x): 0.6573 D(G(z)): 0.5242 / 0.4772\n",
"[98/100][33/391] Loss_D: 2.5614 Loss_G: 2.5568 D(x): 0.7729 D(G(z)): 0.4317 / 0.4402\n",
"[98/100][34/391] Loss_D: 2.6559 Loss_G: 2.6566 D(x): 0.7187 D(G(z)): 0.4682 / 0.4328\n",
"[98/100][35/391] Loss_D: 2.5993 Loss_G: 3.4421 D(x): 0.7679 D(G(z)): 0.4651 / 0.3542\n",
"[98/100][36/391] Loss_D: 3.0954 Loss_G: 3.5722 D(x): 0.6375 D(G(z)): 0.4389 / 0.3328\n",
"[98/100][37/391] Loss_D: 2.7707 Loss_G: 3.4580 D(x): 0.6096 D(G(z)): 0.3667 / 0.3582\n",
"[98/100][38/391] Loss_D: 2.3346 Loss_G: 3.3519 D(x): 0.6897 D(G(z)): 0.3314 / 0.3483\n",
"[98/100][39/391] Loss_D: 2.6874 Loss_G: 3.9926 D(x): 0.7567 D(G(z)): 0.4456 / 0.2957\n",
"[98/100][40/391] Loss_D: 2.9888 Loss_G: 3.1648 D(x): 0.5991 D(G(z)): 0.3663 / 0.3691\n",
"[98/100][41/391] Loss_D: 3.5416 Loss_G: 3.0312 D(x): 0.6052 D(G(z)): 0.5096 / 0.4027\n",
"[98/100][42/391] Loss_D: 2.6597 Loss_G: 2.2632 D(x): 0.7073 D(G(z)): 0.3874 / 0.5016\n",
"[98/100][43/391] Loss_D: 2.4896 Loss_G: 2.6871 D(x): 0.7613 D(G(z)): 0.3698 / 0.4500\n",
"[98/100][44/391] Loss_D: 2.3788 Loss_G: 2.8992 D(x): 0.6829 D(G(z)): 0.3569 / 0.4190\n",
"[98/100][45/391] Loss_D: 2.9062 Loss_G: 2.5508 D(x): 0.6311 D(G(z)): 0.3974 / 0.4683\n",
"[98/100][46/391] Loss_D: 2.5163 Loss_G: 3.2530 D(x): 0.7360 D(G(z)): 0.3626 / 0.3786\n",
"[98/100][47/391] Loss_D: 2.8539 Loss_G: 2.9616 D(x): 0.6597 D(G(z)): 0.4514 / 0.3981\n",
"[98/100][48/391] Loss_D: 3.1285 Loss_G: 3.3581 D(x): 0.5779 D(G(z)): 0.3982 / 0.3581\n",
"[98/100][49/391] Loss_D: 2.6079 Loss_G: 3.2699 D(x): 0.6996 D(G(z)): 0.4094 / 0.3840\n",
"[98/100][50/391] Loss_D: 2.5294 Loss_G: 2.4353 D(x): 0.7637 D(G(z)): 0.4376 / 0.4947\n",
"[98/100][51/391] Loss_D: 2.8785 Loss_G: 2.7503 D(x): 0.7311 D(G(z)): 0.4967 / 0.4333\n",
"[98/100][52/391] Loss_D: 2.3068 Loss_G: 3.0816 D(x): 0.6719 D(G(z)): 0.2574 / 0.3871\n",
"[98/100][53/391] Loss_D: 2.3383 Loss_G: 2.4944 D(x): 0.7304 D(G(z)): 0.3567 / 0.4653\n",
"[98/100][54/391] Loss_D: 2.4669 Loss_G: 2.9328 D(x): 0.6900 D(G(z)): 0.3584 / 0.4207\n",
"[98/100][55/391] Loss_D: 2.5600 Loss_G: 2.9591 D(x): 0.7391 D(G(z)): 0.4252 / 0.3973\n",
"[98/100][56/391] Loss_D: 2.6878 Loss_G: 2.2928 D(x): 0.6921 D(G(z)): 0.3885 / 0.5073\n",
"[98/100][57/391] Loss_D: 3.4649 Loss_G: 3.5385 D(x): 0.6072 D(G(z)): 0.5260 / 0.3391\n",
"[98/100][58/391] Loss_D: 2.4682 Loss_G: 2.7142 D(x): 0.6999 D(G(z)): 0.3924 / 0.4358\n",
"[98/100][59/391] Loss_D: 2.5948 Loss_G: 3.0798 D(x): 0.6772 D(G(z)): 0.3648 / 0.4018\n",
"[98/100][60/391] Loss_D: 2.6082 Loss_G: 3.1810 D(x): 0.7790 D(G(z)): 0.4302 / 0.3877\n",
"[98/100][61/391] Loss_D: 3.8270 Loss_G: 3.9196 D(x): 0.7379 D(G(z)): 0.4630 / 0.3135\n",
"[98/100][62/391] Loss_D: 2.9932 Loss_G: 4.7130 D(x): 0.5868 D(G(z)): 0.3810 / 0.2399\n",
"[98/100][63/391] Loss_D: 3.0750 Loss_G: 3.6937 D(x): 0.6638 D(G(z)): 0.4940 / 0.3236\n",
"[98/100][64/391] Loss_D: 2.7066 Loss_G: 3.4609 D(x): 0.6808 D(G(z)): 0.4378 / 0.3469\n",
"[98/100][65/391] Loss_D: 2.4878 Loss_G: 2.9279 D(x): 0.7425 D(G(z)): 0.3834 / 0.3973\n",
"[98/100][66/391] Loss_D: 2.8396 Loss_G: 2.4655 D(x): 0.5939 D(G(z)): 0.3694 / 0.4843\n",
"[98/100][67/391] Loss_D: 3.2339 Loss_G: 2.8971 D(x): 0.5698 D(G(z)): 0.3295 / 0.4095\n",
"[98/100][68/391] Loss_D: 2.5832 Loss_G: 3.1220 D(x): 0.6502 D(G(z)): 0.3562 / 0.3902\n",
"[98/100][69/391] Loss_D: 2.6091 Loss_G: 3.4441 D(x): 0.7421 D(G(z)): 0.4560 / 0.3546\n",
"[98/100][70/391] Loss_D: 2.6095 Loss_G: 2.8828 D(x): 0.7730 D(G(z)): 0.4234 / 0.4107\n",
"[98/100][71/391] Loss_D: 3.1884 Loss_G: 3.3986 D(x): 0.7537 D(G(z)): 0.5485 / 0.3762\n",
"[98/100][72/391] Loss_D: 2.4728 Loss_G: 4.3165 D(x): 0.6568 D(G(z)): 0.3382 / 0.2671\n",
"[98/100][73/391] Loss_D: 2.7323 Loss_G: 3.1536 D(x): 0.7088 D(G(z)): 0.4469 / 0.3753\n",
"[98/100][74/391] Loss_D: 3.6684 Loss_G: 3.7951 D(x): 0.5925 D(G(z)): 0.5393 / 0.3248\n",
"[98/100][75/391] Loss_D: 2.6005 Loss_G: 2.5868 D(x): 0.6421 D(G(z)): 0.3348 / 0.4592\n",
"[98/100][76/391] Loss_D: 2.2976 Loss_G: 3.7493 D(x): 0.7312 D(G(z)): 0.3087 / 0.3121\n",
"[98/100][77/391] Loss_D: 2.8742 Loss_G: 2.4258 D(x): 0.6184 D(G(z)): 0.3067 / 0.4774\n",
"[98/100][78/391] Loss_D: 3.0528 Loss_G: 2.7486 D(x): 0.6026 D(G(z)): 0.4490 / 0.4337\n",
"[98/100][79/391] Loss_D: 2.1722 Loss_G: 3.3942 D(x): 0.7429 D(G(z)): 0.3041 / 0.3555\n",
"[98/100][80/391] Loss_D: 2.6741 Loss_G: 2.8714 D(x): 0.7525 D(G(z)): 0.4330 / 0.4271\n",
"[98/100][81/391] Loss_D: 2.8102 Loss_G: 2.6984 D(x): 0.8172 D(G(z)): 0.5296 / 0.4417\n",
"[98/100][82/391] Loss_D: 2.8305 Loss_G: 2.7067 D(x): 0.7077 D(G(z)): 0.4343 / 0.4427\n",
"[98/100][83/391] Loss_D: 2.6528 Loss_G: 2.8252 D(x): 0.7182 D(G(z)): 0.4178 / 0.4377\n",
"[98/100][84/391] Loss_D: 2.5467 Loss_G: 4.7145 D(x): 0.6264 D(G(z)): 0.3333 / 0.2431\n",
"[98/100][85/391] Loss_D: 2.5462 Loss_G: 3.5071 D(x): 0.6985 D(G(z)): 0.3463 / 0.3450\n",
"[98/100][86/391] Loss_D: 2.7943 Loss_G: 3.4829 D(x): 0.7802 D(G(z)): 0.4632 / 0.3360\n",
"[98/100][87/391] Loss_D: 3.0156 Loss_G: 2.8752 D(x): 0.6612 D(G(z)): 0.3834 / 0.4007\n",
"[98/100][88/391] Loss_D: 2.3018 Loss_G: 2.6328 D(x): 0.7334 D(G(z)): 0.4059 / 0.4409\n",
"[98/100][89/391] Loss_D: 2.7015 Loss_G: 2.7698 D(x): 0.7447 D(G(z)): 0.4769 / 0.4337\n",
"[98/100][90/391] Loss_D: 2.3603 Loss_G: 3.1149 D(x): 0.7606 D(G(z)): 0.3709 / 0.3847\n",
"[98/100][91/391] Loss_D: 3.5116 Loss_G: 1.8809 D(x): 0.6748 D(G(z)): 0.3416 / 0.5632\n",
"[98/100][92/391] Loss_D: 2.4523 Loss_G: 3.6314 D(x): 0.7211 D(G(z)): 0.3895 / 0.3358\n",
"[98/100][93/391] Loss_D: 3.4585 Loss_G: 3.5708 D(x): 0.6489 D(G(z)): 0.5376 / 0.3437\n",
"[98/100][94/391] Loss_D: 2.6721 Loss_G: 3.3254 D(x): 0.6327 D(G(z)): 0.3522 / 0.3771\n",
"[98/100][95/391] Loss_D: 2.7633 Loss_G: 3.2113 D(x): 0.6208 D(G(z)): 0.3399 / 0.3773\n",
"[98/100][96/391] Loss_D: 2.8368 Loss_G: 3.2749 D(x): 0.6870 D(G(z)): 0.4329 / 0.3605\n",
"[98/100][97/391] Loss_D: 2.9789 Loss_G: 3.0894 D(x): 0.6657 D(G(z)): 0.4090 / 0.3890\n",
"[98/100][98/391] Loss_D: 2.4655 Loss_G: 3.6550 D(x): 0.6857 D(G(z)): 0.2808 / 0.3273\n",
"[98/100][99/391] Loss_D: 2.9687 Loss_G: 2.7163 D(x): 0.6652 D(G(z)): 0.4469 / 0.4435\n",
"[98/100][100/391] Loss_D: 2.4820 Loss_G: 2.1244 D(x): 0.7709 D(G(z)): 0.4635 / 0.5502\n",
"[98/100][101/391] Loss_D: 2.7747 Loss_G: 3.5201 D(x): 0.7036 D(G(z)): 0.4611 / 0.3472\n",
"[98/100][102/391] Loss_D: 2.5075 Loss_G: 2.7981 D(x): 0.7624 D(G(z)): 0.3888 / 0.4218\n",
"[98/100][103/391] Loss_D: 2.9185 Loss_G: 3.3697 D(x): 0.6206 D(G(z)): 0.4161 / 0.3607\n",
"[98/100][104/391] Loss_D: 2.7286 Loss_G: 3.2069 D(x): 0.6237 D(G(z)): 0.3581 / 0.3833\n",
"[98/100][105/391] Loss_D: 2.8190 Loss_G: 2.0715 D(x): 0.7022 D(G(z)): 0.4248 / 0.5281\n",
"[98/100][106/391] Loss_D: 3.2999 Loss_G: 2.6587 D(x): 0.6698 D(G(z)): 0.5297 / 0.4592\n",
"[98/100][107/391] Loss_D: 2.8647 Loss_G: 2.1952 D(x): 0.5968 D(G(z)): 0.2760 / 0.4946\n",
"[98/100][108/391] Loss_D: 2.3092 Loss_G: 2.4305 D(x): 0.7321 D(G(z)): 0.3112 / 0.4704\n",
"[98/100][109/391] Loss_D: 2.6393 Loss_G: 2.8810 D(x): 0.6606 D(G(z)): 0.3812 / 0.4212\n",
"[98/100][110/391] Loss_D: 2.9824 Loss_G: 2.2300 D(x): 0.7395 D(G(z)): 0.5287 / 0.5233\n",
"[98/100][111/391] Loss_D: 2.6576 Loss_G: 2.8569 D(x): 0.6980 D(G(z)): 0.4066 / 0.4178\n",
"[98/100][112/391] Loss_D: 3.2050 Loss_G: 2.7336 D(x): 0.6197 D(G(z)): 0.4820 / 0.4226\n",
"[98/100][113/391] Loss_D: 3.2179 Loss_G: 2.9585 D(x): 0.6350 D(G(z)): 0.4852 / 0.4122\n",
"[98/100][114/391] Loss_D: 2.7592 Loss_G: 2.9105 D(x): 0.6401 D(G(z)): 0.3935 / 0.4067\n",
"[98/100][115/391] Loss_D: 2.7751 Loss_G: 3.1864 D(x): 0.6393 D(G(z)): 0.3421 / 0.3744\n",
"[98/100][116/391] Loss_D: 3.3135 Loss_G: 2.3084 D(x): 0.6166 D(G(z)): 0.4925 / 0.4900\n",
"[98/100][117/391] Loss_D: 3.0374 Loss_G: 3.4964 D(x): 0.7090 D(G(z)): 0.4703 / 0.3416\n",
"[98/100][118/391] Loss_D: 2.7084 Loss_G: 2.5708 D(x): 0.6609 D(G(z)): 0.4177 / 0.4644\n",
"[98/100][119/391] Loss_D: 2.3485 Loss_G: 3.1221 D(x): 0.7834 D(G(z)): 0.4427 / 0.4006\n",
"[98/100][120/391] Loss_D: 2.5071 Loss_G: 2.8961 D(x): 0.6956 D(G(z)): 0.3525 / 0.4241\n",
"[98/100][121/391] Loss_D: 3.5463 Loss_G: 3.2292 D(x): 0.6935 D(G(z)): 0.4202 / 0.3804\n",
"[98/100][122/391] Loss_D: 2.4715 Loss_G: 3.5743 D(x): 0.7232 D(G(z)): 0.3672 / 0.3379\n",
"[98/100][123/391] Loss_D: 2.5676 Loss_G: 2.6316 D(x): 0.7440 D(G(z)): 0.4201 / 0.4616\n",
"[98/100][124/391] Loss_D: 2.6655 Loss_G: 4.1014 D(x): 0.6485 D(G(z)): 0.3843 / 0.2927\n",
"[98/100][125/391] Loss_D: 3.7142 Loss_G: 2.6437 D(x): 0.5136 D(G(z)): 0.4031 / 0.4382\n",
"[98/100][126/391] Loss_D: 2.7374 Loss_G: 3.3347 D(x): 0.6621 D(G(z)): 0.4363 / 0.3627\n",
"[98/100][127/391] Loss_D: 2.9219 Loss_G: 2.1018 D(x): 0.6194 D(G(z)): 0.3238 / 0.5374\n",
"[98/100][128/391] Loss_D: 3.1033 Loss_G: 1.9777 D(x): 0.6650 D(G(z)): 0.5555 / 0.5400\n",
"[98/100][129/391] Loss_D: 3.5361 Loss_G: 2.4255 D(x): 0.5541 D(G(z)): 0.5094 / 0.4944\n",
"[98/100][130/391] Loss_D: 2.8638 Loss_G: 2.6780 D(x): 0.7061 D(G(z)): 0.4604 / 0.4456\n",
"[98/100][131/391] Loss_D: 2.6015 Loss_G: 3.1817 D(x): 0.7708 D(G(z)): 0.4262 / 0.3850\n",
"[98/100][132/391] Loss_D: 2.3804 Loss_G: 3.2789 D(x): 0.7255 D(G(z)): 0.3388 / 0.3644\n",
"[98/100][133/391] Loss_D: 2.5405 Loss_G: 2.8276 D(x): 0.6224 D(G(z)): 0.3245 / 0.4252\n",
"[98/100][134/391] Loss_D: 2.3702 Loss_G: 2.1702 D(x): 0.7116 D(G(z)): 0.3685 / 0.5133\n",
"[98/100][135/391] Loss_D: 2.8584 Loss_G: 2.8242 D(x): 0.6912 D(G(z)): 0.4551 / 0.4188\n",
"[98/100][136/391] Loss_D: 3.1027 Loss_G: 1.8360 D(x): 0.7072 D(G(z)): 0.4939 / 0.5902\n",
"[98/100][137/391] Loss_D: 2.6560 Loss_G: 3.4023 D(x): 0.6418 D(G(z)): 0.2685 / 0.3377\n",
"[98/100][138/391] Loss_D: 2.3447 Loss_G: 3.8109 D(x): 0.7583 D(G(z)): 0.4455 / 0.3255\n",
"[98/100][139/391] Loss_D: 3.0764 Loss_G: 3.1036 D(x): 0.6360 D(G(z)): 0.4525 / 0.3963\n",
"[98/100][140/391] Loss_D: 2.1004 Loss_G: 2.6041 D(x): 0.7476 D(G(z)): 0.3097 / 0.4598\n",
"[98/100][141/391] Loss_D: 2.8330 Loss_G: 3.4390 D(x): 0.6455 D(G(z)): 0.4179 / 0.3574\n",
"[98/100][142/391] Loss_D: 2.5728 Loss_G: 3.5690 D(x): 0.7390 D(G(z)): 0.4548 / 0.3377\n",
"[98/100][143/391] Loss_D: 2.4750 Loss_G: 2.5016 D(x): 0.7135 D(G(z)): 0.3196 / 0.4766\n",
"[98/100][144/391] Loss_D: 2.3435 Loss_G: 2.3792 D(x): 0.7825 D(G(z)): 0.4607 / 0.5023\n",
"[98/100][145/391] Loss_D: 3.0183 Loss_G: 2.9242 D(x): 0.6114 D(G(z)): 0.3665 / 0.4059\n",
"[98/100][146/391] Loss_D: 2.3146 Loss_G: 2.9257 D(x): 0.7466 D(G(z)): 0.2908 / 0.4111\n",
"[98/100][147/391] Loss_D: 3.1707 Loss_G: 3.3247 D(x): 0.5849 D(G(z)): 0.3926 / 0.3644\n",
"[98/100][148/391] Loss_D: 2.3410 Loss_G: 2.0340 D(x): 0.7124 D(G(z)): 0.3619 / 0.5483\n",
"[98/100][149/391] Loss_D: 3.0565 Loss_G: 3.3334 D(x): 0.6877 D(G(z)): 0.4953 / 0.3687\n",
"[98/100][150/391] Loss_D: 3.0111 Loss_G: 2.4618 D(x): 0.7181 D(G(z)): 0.5321 / 0.4831\n",
"[98/100][151/391] Loss_D: 3.7853 Loss_G: 3.0861 D(x): 0.7005 D(G(z)): 0.5019 / 0.3933\n",
"[98/100][152/391] Loss_D: 2.5527 Loss_G: 2.2531 D(x): 0.7023 D(G(z)): 0.4190 / 0.5080\n",
"[98/100][153/391] Loss_D: 2.6032 Loss_G: 3.4568 D(x): 0.6720 D(G(z)): 0.3310 / 0.3605\n",
"[98/100][154/391] Loss_D: 3.7117 Loss_G: 2.3319 D(x): 0.6010 D(G(z)): 0.5735 / 0.4941\n",
"[98/100][155/391] Loss_D: 3.3602 Loss_G: 3.2050 D(x): 0.7151 D(G(z)): 0.5294 / 0.3879\n",
"[98/100][156/391] Loss_D: 3.3815 Loss_G: 3.2072 D(x): 0.5989 D(G(z)): 0.4892 / 0.3925\n",
"[98/100][157/391] Loss_D: 3.4909 Loss_G: 2.6731 D(x): 0.5922 D(G(z)): 0.5143 / 0.4343\n",
"[98/100][158/391] Loss_D: 2.9895 Loss_G: 3.8544 D(x): 0.5757 D(G(z)): 0.3412 / 0.3137\n",
"[98/100][159/391] Loss_D: 2.9524 Loss_G: 3.6605 D(x): 0.6413 D(G(z)): 0.4118 / 0.3441\n",
"[98/100][160/391] Loss_D: 2.5979 Loss_G: 3.2955 D(x): 0.7094 D(G(z)): 0.4000 / 0.3752\n",
"[98/100][161/391] Loss_D: 2.7587 Loss_G: 3.3819 D(x): 0.7119 D(G(z)): 0.4381 / 0.3712\n",
"[98/100][162/391] Loss_D: 2.7997 Loss_G: 2.9991 D(x): 0.6073 D(G(z)): 0.3766 / 0.3990\n",
"[98/100][163/391] Loss_D: 2.4210 Loss_G: 2.5534 D(x): 0.7312 D(G(z)): 0.3971 / 0.4607\n",
"[98/100][164/391] Loss_D: 2.7600 Loss_G: 2.8663 D(x): 0.6415 D(G(z)): 0.3785 / 0.4032\n",
"[98/100][165/391] Loss_D: 3.0456 Loss_G: 2.7046 D(x): 0.5919 D(G(z)): 0.4096 / 0.4246\n",
"[98/100][166/391] Loss_D: 2.5019 Loss_G: 2.5211 D(x): 0.6933 D(G(z)): 0.3673 / 0.4679\n",
"[98/100][167/391] Loss_D: 3.0420 Loss_G: 2.4313 D(x): 0.6703 D(G(z)): 0.4563 / 0.4799\n",
"[98/100][168/391] Loss_D: 2.8583 Loss_G: 2.2042 D(x): 0.7020 D(G(z)): 0.4687 / 0.5272\n",
"[98/100][169/391] Loss_D: 2.3979 Loss_G: 3.0798 D(x): 0.7130 D(G(z)): 0.3795 / 0.4023\n",
"[98/100][170/391] Loss_D: 2.4317 Loss_G: 2.3185 D(x): 0.8075 D(G(z)): 0.4162 / 0.4961\n",
"[98/100][171/391] Loss_D: 3.1051 Loss_G: 2.5520 D(x): 0.6924 D(G(z)): 0.5020 / 0.4735\n",
"[98/100][172/391] Loss_D: 2.6478 Loss_G: 3.6362 D(x): 0.7552 D(G(z)): 0.4591 / 0.3350\n",
"[98/100][173/391] Loss_D: 2.9344 Loss_G: 2.7749 D(x): 0.6474 D(G(z)): 0.4296 / 0.4322\n",
"[98/100][174/391] Loss_D: 3.8227 Loss_G: 2.7671 D(x): 0.5682 D(G(z)): 0.5615 / 0.4174\n",
"[98/100][175/391] Loss_D: 2.4706 Loss_G: 2.7808 D(x): 0.6492 D(G(z)): 0.2774 / 0.4236\n",
"[98/100][176/391] Loss_D: 2.8035 Loss_G: 2.8322 D(x): 0.7020 D(G(z)): 0.4286 / 0.4169\n",
"[98/100][177/391] Loss_D: 3.0544 Loss_G: 3.0302 D(x): 0.6029 D(G(z)): 0.3853 / 0.3889\n",
"[98/100][178/391] Loss_D: 2.8634 Loss_G: 2.8449 D(x): 0.6174 D(G(z)): 0.4056 / 0.4173\n",
"[98/100][179/391] Loss_D: 2.8922 Loss_G: 3.1394 D(x): 0.6105 D(G(z)): 0.3622 / 0.3810\n",
"[98/100][180/391] Loss_D: 2.7806 Loss_G: 2.6412 D(x): 0.7542 D(G(z)): 0.4989 / 0.4538\n",
"[98/100][181/391] Loss_D: 3.6298 Loss_G: 2.3778 D(x): 0.5901 D(G(z)): 0.3731 / 0.4914\n",
"[98/100][182/391] Loss_D: 2.8202 Loss_G: 1.8700 D(x): 0.6146 D(G(z)): 0.4084 / 0.5708\n",
"[98/100][183/391] Loss_D: 2.1032 Loss_G: 2.1875 D(x): 0.8162 D(G(z)): 0.3801 / 0.5182\n",
"[98/100][184/391] Loss_D: 2.8082 Loss_G: 2.5419 D(x): 0.6545 D(G(z)): 0.4210 / 0.4548\n",
"[98/100][185/391] Loss_D: 2.6751 Loss_G: 2.5187 D(x): 0.6709 D(G(z)): 0.3877 / 0.4562\n",
"[98/100][186/391] Loss_D: 2.6416 Loss_G: 1.8461 D(x): 0.6930 D(G(z)): 0.4134 / 0.5649\n",
"[98/100][187/391] Loss_D: 2.7729 Loss_G: 3.2430 D(x): 0.6756 D(G(z)): 0.3980 / 0.3708\n",
"[98/100][188/391] Loss_D: 2.3188 Loss_G: 2.9497 D(x): 0.8033 D(G(z)): 0.5198 / 0.4200\n",
"[98/100][189/391] Loss_D: 3.0849 Loss_G: 3.6011 D(x): 0.6243 D(G(z)): 0.4432 / 0.3433\n",
"[98/100][190/391] Loss_D: 3.2072 Loss_G: 3.2326 D(x): 0.5999 D(G(z)): 0.4656 / 0.3717\n",
"[98/100][191/391] Loss_D: 2.5975 Loss_G: 2.2628 D(x): 0.7233 D(G(z)): 0.3571 / 0.5077\n",
"[98/100][192/391] Loss_D: 2.5912 Loss_G: 2.5421 D(x): 0.6398 D(G(z)): 0.3634 / 0.4709\n",
"[98/100][193/391] Loss_D: 2.4074 Loss_G: 3.2399 D(x): 0.7696 D(G(z)): 0.4085 / 0.3754\n",
"[98/100][194/391] Loss_D: 2.6142 Loss_G: 3.1659 D(x): 0.7026 D(G(z)): 0.3923 / 0.3858\n",
"[98/100][195/391] Loss_D: 3.1479 Loss_G: 3.8261 D(x): 0.6037 D(G(z)): 0.4188 / 0.3147\n",
"[98/100][196/391] Loss_D: 2.4860 Loss_G: 2.7338 D(x): 0.6730 D(G(z)): 0.3929 / 0.4235\n",
"[98/100][197/391] Loss_D: 2.7583 Loss_G: 2.5189 D(x): 0.7385 D(G(z)): 0.4160 / 0.4451\n",
"[98/100][198/391] Loss_D: 2.8314 Loss_G: 2.7823 D(x): 0.6641 D(G(z)): 0.4497 / 0.4224\n",
"[98/100][199/391] Loss_D: 2.3072 Loss_G: 1.9869 D(x): 0.7214 D(G(z)): 0.2996 / 0.5469\n",
"[98/100][200/391] Loss_D: 2.6524 Loss_G: 2.4281 D(x): 0.6559 D(G(z)): 0.3365 / 0.4859\n",
"[98/100][201/391] Loss_D: 2.6869 Loss_G: 3.6118 D(x): 0.7383 D(G(z)): 0.4215 / 0.3334\n",
"[98/100][202/391] Loss_D: 2.6473 Loss_G: 3.0547 D(x): 0.7419 D(G(z)): 0.4609 / 0.4172\n",
"[98/100][203/391] Loss_D: 2.4878 Loss_G: 3.2006 D(x): 0.7458 D(G(z)): 0.4001 / 0.3741\n",
"[98/100][204/391] Loss_D: 2.5505 Loss_G: 2.9153 D(x): 0.7106 D(G(z)): 0.4418 / 0.4092\n",
"[98/100][205/391] Loss_D: 3.5381 Loss_G: 3.5669 D(x): 0.6206 D(G(z)): 0.5818 / 0.3321\n",
"[98/100][206/391] Loss_D: 2.7775 Loss_G: 3.1120 D(x): 0.6539 D(G(z)): 0.4193 / 0.3848\n",
"[98/100][207/391] Loss_D: 2.8640 Loss_G: 3.0079 D(x): 0.6045 D(G(z)): 0.3642 / 0.4046\n",
"[98/100][208/391] Loss_D: 2.1191 Loss_G: 2.9798 D(x): 0.7180 D(G(z)): 0.2247 / 0.3997\n",
"[98/100][209/391] Loss_D: 2.2035 Loss_G: 3.0716 D(x): 0.7261 D(G(z)): 0.2920 / 0.3981\n",
"[98/100][210/391] Loss_D: 2.4900 Loss_G: 3.9353 D(x): 0.7728 D(G(z)): 0.4148 / 0.3080\n",
"[98/100][211/391] Loss_D: 3.7628 Loss_G: 3.6782 D(x): 0.5695 D(G(z)): 0.5459 / 0.3359\n",
"[98/100][212/391] Loss_D: 3.4562 Loss_G: 4.1266 D(x): 0.6090 D(G(z)): 0.5047 / 0.2961\n",
"[98/100][213/391] Loss_D: 2.9031 Loss_G: 3.1327 D(x): 0.6723 D(G(z)): 0.4342 / 0.3901\n",
"[98/100][214/391] Loss_D: 2.1880 Loss_G: 3.1920 D(x): 0.7316 D(G(z)): 0.3751 / 0.3902\n",
"[98/100][215/391] Loss_D: 3.1240 Loss_G: 2.9827 D(x): 0.6379 D(G(z)): 0.4635 / 0.4087\n",
"[98/100][216/391] Loss_D: 2.5663 Loss_G: 3.6580 D(x): 0.6884 D(G(z)): 0.3715 / 0.3469\n",
"[98/100][217/391] Loss_D: 2.7804 Loss_G: 2.8575 D(x): 0.5925 D(G(z)): 0.2831 / 0.4189\n",
"[98/100][218/391] Loss_D: 2.6086 Loss_G: 2.9015 D(x): 0.7622 D(G(z)): 0.4862 / 0.4142\n",
"[98/100][219/391] Loss_D: 2.6209 Loss_G: 3.0458 D(x): 0.6573 D(G(z)): 0.3838 / 0.4015\n",
"[98/100][220/391] Loss_D: 2.4693 Loss_G: 3.6726 D(x): 0.7113 D(G(z)): 0.3752 / 0.3340\n",
"[98/100][221/391] Loss_D: 2.9354 Loss_G: 2.8866 D(x): 0.6651 D(G(z)): 0.4396 / 0.4352\n",
"[98/100][222/391] Loss_D: 2.4909 Loss_G: 3.1515 D(x): 0.7574 D(G(z)): 0.4100 / 0.3905\n",
"[98/100][223/391] Loss_D: 2.9335 Loss_G: 3.2952 D(x): 0.6982 D(G(z)): 0.4764 / 0.3817\n",
"[98/100][224/391] Loss_D: 2.7401 Loss_G: 3.1713 D(x): 0.6725 D(G(z)): 0.4226 / 0.3730\n",
"[98/100][225/391] Loss_D: 2.9444 Loss_G: 2.4876 D(x): 0.6132 D(G(z)): 0.3850 / 0.4645\n",
"[98/100][226/391] Loss_D: 2.6924 Loss_G: 2.3277 D(x): 0.6478 D(G(z)): 0.4000 / 0.4951\n",
"[98/100][227/391] Loss_D: 2.7253 Loss_G: 3.4898 D(x): 0.6659 D(G(z)): 0.4095 / 0.3306\n",
"[98/100][228/391] Loss_D: 2.7127 Loss_G: 3.4910 D(x): 0.6306 D(G(z)): 0.3100 / 0.3521\n",
"[98/100][229/391] Loss_D: 3.1118 Loss_G: 2.7761 D(x): 0.6912 D(G(z)): 0.5204 / 0.4407\n",
"[98/100][230/391] Loss_D: 2.7599 Loss_G: 3.3604 D(x): 0.6866 D(G(z)): 0.4916 / 0.3734\n",
"[98/100][231/391] Loss_D: 2.8535 Loss_G: 2.1177 D(x): 0.6958 D(G(z)): 0.4663 / 0.5257\n",
"[98/100][232/391] Loss_D: 2.7001 Loss_G: 3.3451 D(x): 0.6920 D(G(z)): 0.4241 / 0.3704\n",
"[98/100][233/391] Loss_D: 2.2295 Loss_G: 3.0190 D(x): 0.7224 D(G(z)): 0.2784 / 0.4070\n",
"[98/100][234/391] Loss_D: 2.6155 Loss_G: 3.2699 D(x): 0.6381 D(G(z)): 0.4050 / 0.3786\n",
"[98/100][235/391] Loss_D: 2.4929 Loss_G: 2.3852 D(x): 0.6867 D(G(z)): 0.3378 / 0.4838\n",
"[98/100][236/391] Loss_D: 2.4175 Loss_G: 3.5245 D(x): 0.7525 D(G(z)): 0.3438 / 0.3420\n",
"[98/100][237/391] Loss_D: 2.7517 Loss_G: 4.2875 D(x): 0.7607 D(G(z)): 0.4866 / 0.2692\n",
"[98/100][238/391] Loss_D: 2.7398 Loss_G: 3.3303 D(x): 0.6728 D(G(z)): 0.4292 / 0.3600\n",
"[98/100][239/391] Loss_D: 2.2831 Loss_G: 3.0645 D(x): 0.7269 D(G(z)): 0.2951 / 0.3929\n",
"[98/100][240/391] Loss_D: 2.8122 Loss_G: 2.8945 D(x): 0.7364 D(G(z)): 0.4887 / 0.4185\n",
"[98/100][241/391] Loss_D: 3.7289 Loss_G: 3.7911 D(x): 0.6305 D(G(z)): 0.4635 / 0.3166\n",
"[98/100][242/391] Loss_D: 2.4919 Loss_G: 2.6376 D(x): 0.6969 D(G(z)): 0.3923 / 0.4414\n",
"[98/100][243/391] Loss_D: 2.7684 Loss_G: 3.4780 D(x): 0.6822 D(G(z)): 0.4509 / 0.3535\n",
"[98/100][244/391] Loss_D: 2.7758 Loss_G: 2.3411 D(x): 0.6791 D(G(z)): 0.4524 / 0.4764\n",
"[98/100][245/391] Loss_D: 2.5665 Loss_G: 3.4453 D(x): 0.6581 D(G(z)): 0.3414 / 0.3485\n",
"[98/100][246/391] Loss_D: 2.4923 Loss_G: 2.5330 D(x): 0.7269 D(G(z)): 0.3562 / 0.4469\n",
"[98/100][247/391] Loss_D: 2.9756 Loss_G: 2.2501 D(x): 0.6660 D(G(z)): 0.4325 / 0.4920\n",
"[98/100][248/391] Loss_D: 2.5384 Loss_G: 2.5714 D(x): 0.6824 D(G(z)): 0.3909 / 0.4601\n",
"[98/100][249/391] Loss_D: 3.5435 Loss_G: 2.6650 D(x): 0.5077 D(G(z)): 0.4340 / 0.4545\n",
"[98/100][250/391] Loss_D: 2.9278 Loss_G: 2.4290 D(x): 0.6186 D(G(z)): 0.3571 / 0.4760\n",
"[98/100][251/391] Loss_D: 2.8766 Loss_G: 2.9154 D(x): 0.7094 D(G(z)): 0.4711 / 0.4045\n",
"[98/100][252/391] Loss_D: 3.2305 Loss_G: 2.1595 D(x): 0.6065 D(G(z)): 0.4489 / 0.5384\n",
"[98/100][253/391] Loss_D: 2.4592 Loss_G: 2.1197 D(x): 0.7620 D(G(z)): 0.3957 / 0.5321\n",
"[98/100][254/391] Loss_D: 3.0209 Loss_G: 3.0649 D(x): 0.7865 D(G(z)): 0.6011 / 0.3999\n",
"[98/100][255/391] Loss_D: 2.6149 Loss_G: 2.4378 D(x): 0.7166 D(G(z)): 0.3674 / 0.4632\n",
"[98/100][256/391] Loss_D: 2.7435 Loss_G: 3.0652 D(x): 0.5829 D(G(z)): 0.2916 / 0.3947\n",
"[98/100][257/391] Loss_D: 2.9046 Loss_G: 2.8325 D(x): 0.6433 D(G(z)): 0.3096 / 0.4366\n",
"[98/100][258/391] Loss_D: 3.0125 Loss_G: 1.9964 D(x): 0.6550 D(G(z)): 0.4855 / 0.5278\n",
"[98/100][259/391] Loss_D: 2.6800 Loss_G: 3.3333 D(x): 0.7129 D(G(z)): 0.4353 / 0.3650\n",
"[98/100][260/391] Loss_D: 3.6008 Loss_G: 2.7544 D(x): 0.6062 D(G(z)): 0.5553 / 0.4357\n",
"[98/100][261/391] Loss_D: 2.9598 Loss_G: 3.6698 D(x): 0.7598 D(G(z)): 0.4953 / 0.3276\n",
"[98/100][262/391] Loss_D: 2.9192 Loss_G: 3.3492 D(x): 0.6423 D(G(z)): 0.3757 / 0.3672\n",
"[98/100][263/391] Loss_D: 2.8110 Loss_G: 3.0343 D(x): 0.6676 D(G(z)): 0.3394 / 0.4096\n",
"[98/100][264/391] Loss_D: 2.6431 Loss_G: 2.8962 D(x): 0.6977 D(G(z)): 0.4767 / 0.4301\n",
"[98/100][265/391] Loss_D: 3.1845 Loss_G: 3.2874 D(x): 0.7141 D(G(z)): 0.5409 / 0.3634\n",
"[98/100][266/391] Loss_D: 2.7234 Loss_G: 3.2498 D(x): 0.6976 D(G(z)): 0.3590 / 0.3607\n",
"[98/100][267/391] Loss_D: 2.5158 Loss_G: 2.7341 D(x): 0.6936 D(G(z)): 0.3525 / 0.4401\n",
"[98/100][268/391] Loss_D: 3.1471 Loss_G: 3.5189 D(x): 0.5117 D(G(z)): 0.2387 / 0.3374\n",
"[98/100][269/391] Loss_D: 2.6680 Loss_G: 2.9634 D(x): 0.6645 D(G(z)): 0.3686 / 0.3937\n",
"[98/100][270/391] Loss_D: 2.5941 Loss_G: 2.3252 D(x): 0.7511 D(G(z)): 0.4136 / 0.4755\n",
"[98/100][271/391] Loss_D: 3.9009 Loss_G: 2.9772 D(x): 0.7746 D(G(z)): 0.4293 / 0.3896\n",
"[98/100][272/391] Loss_D: 3.8808 Loss_G: 3.4800 D(x): 0.6811 D(G(z)): 0.6242 / 0.3573\n",
"[98/100][273/391] Loss_D: 2.5551 Loss_G: 2.6298 D(x): 0.7137 D(G(z)): 0.3876 / 0.4620\n",
"[98/100][274/391] Loss_D: 2.3150 Loss_G: 3.6972 D(x): 0.7234 D(G(z)): 0.3815 / 0.3269\n",
"[98/100][275/391] Loss_D: 3.0557 Loss_G: 3.3761 D(x): 0.5768 D(G(z)): 0.3590 / 0.3649\n",
"[98/100][276/391] Loss_D: 2.9395 Loss_G: 3.1630 D(x): 0.6546 D(G(z)): 0.4088 / 0.3914\n",
"[98/100][277/391] Loss_D: 3.2263 Loss_G: 2.3537 D(x): 0.6352 D(G(z)): 0.4796 / 0.4734\n",
"[98/100][278/391] Loss_D: 2.7592 Loss_G: 3.0188 D(x): 0.6266 D(G(z)): 0.3935 / 0.4074\n",
"[98/100][279/391] Loss_D: 2.0428 Loss_G: 2.0082 D(x): 0.8082 D(G(z)): 0.2999 / 0.5392\n",
"[98/100][280/391] Loss_D: 3.0497 Loss_G: 3.0542 D(x): 0.6257 D(G(z)): 0.4395 / 0.3848\n",
"[98/100][281/391] Loss_D: 2.9251 Loss_G: 2.6543 D(x): 0.7774 D(G(z)): 0.5194 / 0.4460\n",
"[98/100][282/391] Loss_D: 3.4077 Loss_G: 4.2003 D(x): 0.7537 D(G(z)): 0.5962 / 0.2899\n",
"[98/100][283/391] Loss_D: 2.7863 Loss_G: 3.9271 D(x): 0.6999 D(G(z)): 0.3895 / 0.3035\n",
"[98/100][284/391] Loss_D: 2.9830 Loss_G: 3.4992 D(x): 0.5646 D(G(z)): 0.3391 / 0.3560\n",
"[98/100][285/391] Loss_D: 2.9986 Loss_G: 3.0431 D(x): 0.5451 D(G(z)): 0.2836 / 0.4015\n",
"[98/100][286/391] Loss_D: 2.8817 Loss_G: 1.6805 D(x): 0.5998 D(G(z)): 0.3565 / 0.5981\n",
"[98/100][287/391] Loss_D: 3.8189 Loss_G: 3.6675 D(x): 0.7196 D(G(z)): 0.6493 / 0.3300\n",
"[98/100][288/391] Loss_D: 2.5223 Loss_G: 4.3046 D(x): 0.6872 D(G(z)): 0.4210 / 0.2750\n",
"[98/100][289/391] Loss_D: 2.5867 Loss_G: 2.8083 D(x): 0.7093 D(G(z)): 0.3975 / 0.4268\n",
"[98/100][290/391] Loss_D: 2.7816 Loss_G: 3.6113 D(x): 0.6690 D(G(z)): 0.3850 / 0.3264\n",
"[98/100][291/391] Loss_D: 2.7120 Loss_G: 2.4365 D(x): 0.6784 D(G(z)): 0.3716 / 0.4699\n",
"[98/100][292/391] Loss_D: 2.8635 Loss_G: 3.2421 D(x): 0.7315 D(G(z)): 0.4607 / 0.3881\n",
"[98/100][293/391] Loss_D: 2.7293 Loss_G: 2.9982 D(x): 0.7284 D(G(z)): 0.4753 / 0.3993\n",
"[98/100][294/391] Loss_D: 2.4101 Loss_G: 3.3359 D(x): 0.7585 D(G(z)): 0.4311 / 0.3587\n",
"[98/100][295/391] Loss_D: 2.5474 Loss_G: 4.6606 D(x): 0.6733 D(G(z)): 0.3180 / 0.2413\n",
"[98/100][296/391] Loss_D: 2.5071 Loss_G: 2.7148 D(x): 0.6763 D(G(z)): 0.3203 / 0.4343\n",
"[98/100][297/391] Loss_D: 3.1151 Loss_G: 3.6963 D(x): 0.6738 D(G(z)): 0.4511 / 0.3203\n",
"[98/100][298/391] Loss_D: 2.8539 Loss_G: 2.7127 D(x): 0.6106 D(G(z)): 0.3725 / 0.4431\n",
"[98/100][299/391] Loss_D: 2.8631 Loss_G: 2.4120 D(x): 0.6915 D(G(z)): 0.4658 / 0.4903\n",
"[98/100][300/391] Loss_D: 2.6756 Loss_G: 2.8107 D(x): 0.6819 D(G(z)): 0.3522 / 0.4422\n",
"[98/100][301/391] Loss_D: 3.5926 Loss_G: 3.1550 D(x): 0.7212 D(G(z)): 0.4724 / 0.3902\n",
"[98/100][302/391] Loss_D: 2.7265 Loss_G: 3.0481 D(x): 0.6476 D(G(z)): 0.3806 / 0.4031\n",
"[98/100][303/391] Loss_D: 2.5328 Loss_G: 2.7355 D(x): 0.7806 D(G(z)): 0.4512 / 0.4471\n",
"[98/100][304/391] Loss_D: 2.1647 Loss_G: 3.7948 D(x): 0.6945 D(G(z)): 0.3566 / 0.3227\n",
"[98/100][305/391] Loss_D: 2.5236 Loss_G: 3.0938 D(x): 0.6378 D(G(z)): 0.2974 / 0.3972\n",
"[98/100][306/391] Loss_D: 2.6962 Loss_G: 2.7647 D(x): 0.6565 D(G(z)): 0.3795 / 0.4201\n",
"[98/100][307/391] Loss_D: 2.4601 Loss_G: 2.2458 D(x): 0.7166 D(G(z)): 0.3297 / 0.4891\n",
"[98/100][308/391] Loss_D: 2.3856 Loss_G: 2.0294 D(x): 0.7283 D(G(z)): 0.4244 / 0.5410\n",
"[98/100][309/391] Loss_D: 2.5814 Loss_G: 2.7315 D(x): 0.6685 D(G(z)): 0.3688 / 0.4440\n",
"[98/100][310/391] Loss_D: 2.9790 Loss_G: 2.0162 D(x): 0.6251 D(G(z)): 0.3659 / 0.5401\n",
"[98/100][311/391] Loss_D: 3.1597 Loss_G: 2.4697 D(x): 0.7499 D(G(z)): 0.5212 / 0.4805\n",
"[98/100][312/391] Loss_D: 2.4398 Loss_G: 2.7242 D(x): 0.7350 D(G(z)): 0.3849 / 0.4246\n",
"[98/100][313/391] Loss_D: 2.3054 Loss_G: 3.0493 D(x): 0.7562 D(G(z)): 0.3369 / 0.3892\n",
"[98/100][314/391] Loss_D: 2.6350 Loss_G: 3.6219 D(x): 0.6897 D(G(z)): 0.4635 / 0.3263\n",
"[98/100][315/391] Loss_D: 2.9276 Loss_G: 2.4371 D(x): 0.6962 D(G(z)): 0.4828 / 0.4620\n",
"[98/100][316/391] Loss_D: 2.9604 Loss_G: 3.9743 D(x): 0.5937 D(G(z)): 0.3361 / 0.3123\n",
"[98/100][317/391] Loss_D: 3.0216 Loss_G: 3.0747 D(x): 0.6228 D(G(z)): 0.4223 / 0.4026\n",
"[98/100][318/391] Loss_D: 2.8033 Loss_G: 2.6607 D(x): 0.6496 D(G(z)): 0.4198 / 0.4542\n",
"[98/100][319/391] Loss_D: 3.0546 Loss_G: 3.0415 D(x): 0.6943 D(G(z)): 0.4996 / 0.4031\n",
"[98/100][320/391] Loss_D: 3.8226 Loss_G: 2.0641 D(x): 0.5274 D(G(z)): 0.4854 / 0.5421\n",
"[98/100][321/391] Loss_D: 3.2693 Loss_G: 3.3443 D(x): 0.7421 D(G(z)): 0.5552 / 0.3628\n",
"[98/100][322/391] Loss_D: 2.4256 Loss_G: 2.5176 D(x): 0.6864 D(G(z)): 0.3636 / 0.4840\n",
"[98/100][323/391] Loss_D: 2.7555 Loss_G: 3.2686 D(x): 0.6532 D(G(z)): 0.3427 / 0.3811\n",
"[98/100][324/391] Loss_D: 2.2994 Loss_G: 2.3787 D(x): 0.7268 D(G(z)): 0.3776 / 0.4858\n",
"[98/100][325/391] Loss_D: 2.8422 Loss_G: 2.7106 D(x): 0.7347 D(G(z)): 0.4614 / 0.4271\n",
"[98/100][326/391] Loss_D: 2.3307 Loss_G: 2.6810 D(x): 0.7944 D(G(z)): 0.3532 / 0.4475\n",
"[98/100][327/391] Loss_D: 3.0603 Loss_G: 2.6686 D(x): 0.5800 D(G(z)): 0.4007 / 0.4574\n",
"[98/100][328/391] Loss_D: 2.3427 Loss_G: 2.5447 D(x): 0.7280 D(G(z)): 0.4179 / 0.4530\n",
"[98/100][329/391] Loss_D: 3.1041 Loss_G: 2.4885 D(x): 0.5675 D(G(z)): 0.3668 / 0.4755\n",
"[98/100][330/391] Loss_D: 2.9534 Loss_G: 2.4430 D(x): 0.7038 D(G(z)): 0.4746 / 0.4753\n",
"[98/100][331/391] Loss_D: 3.5327 Loss_G: 2.3218 D(x): 0.7355 D(G(z)): 0.3989 / 0.4942\n",
"[98/100][332/391] Loss_D: 2.7523 Loss_G: 3.2930 D(x): 0.7017 D(G(z)): 0.4551 / 0.3710\n",
"[98/100][333/391] Loss_D: 2.8976 Loss_G: 3.6525 D(x): 0.7035 D(G(z)): 0.4171 / 0.3150\n",
"[98/100][334/391] Loss_D: 2.0573 Loss_G: 2.8539 D(x): 0.7138 D(G(z)): 0.2829 / 0.4173\n",
"[98/100][335/391] Loss_D: 2.8996 Loss_G: 3.6559 D(x): 0.6787 D(G(z)): 0.4623 / 0.3200\n",
"[98/100][336/391] Loss_D: 2.7410 Loss_G: 3.5226 D(x): 0.7194 D(G(z)): 0.3980 / 0.3317\n",
"[98/100][337/391] Loss_D: 3.2787 Loss_G: 4.4702 D(x): 0.5946 D(G(z)): 0.4424 / 0.2499\n",
"[98/100][338/391] Loss_D: 3.0076 Loss_G: 3.0810 D(x): 0.5866 D(G(z)): 0.3493 / 0.3829\n",
"[98/100][339/391] Loss_D: 2.7340 Loss_G: 2.4497 D(x): 0.6742 D(G(z)): 0.3931 / 0.4661\n",
"[98/100][340/391] Loss_D: 3.2991 Loss_G: 2.4925 D(x): 0.6449 D(G(z)): 0.5275 / 0.4718\n",
"[98/100][341/391] Loss_D: 3.1432 Loss_G: 2.0648 D(x): 0.7037 D(G(z)): 0.5034 / 0.5411\n",
"[98/100][342/391] Loss_D: 2.9491 Loss_G: 2.4435 D(x): 0.6819 D(G(z)): 0.4553 / 0.4764\n",
"[98/100][343/391] Loss_D: 2.5866 Loss_G: 2.2038 D(x): 0.7705 D(G(z)): 0.4316 / 0.5086\n",
"[98/100][344/391] Loss_D: 2.1863 Loss_G: 3.0276 D(x): 0.7169 D(G(z)): 0.3202 / 0.4087\n",
"[98/100][345/391] Loss_D: 2.5463 Loss_G: 3.8276 D(x): 0.7393 D(G(z)): 0.3891 / 0.3079\n",
"[98/100][346/391] Loss_D: 2.7225 Loss_G: 2.9627 D(x): 0.5798 D(G(z)): 0.3153 / 0.4081\n",
"[98/100][347/391] Loss_D: 2.6688 Loss_G: 3.3193 D(x): 0.6612 D(G(z)): 0.3213 / 0.3568\n",
"[98/100][348/391] Loss_D: 3.1965 Loss_G: 2.7411 D(x): 0.5931 D(G(z)): 0.4507 / 0.4306\n",
"[98/100][349/391] Loss_D: 3.2200 Loss_G: 2.8298 D(x): 0.6234 D(G(z)): 0.4960 / 0.4117\n",
"[98/100][350/391] Loss_D: 2.8373 Loss_G: 2.7524 D(x): 0.6464 D(G(z)): 0.4433 / 0.4312\n",
"[98/100][351/391] Loss_D: 3.0366 Loss_G: 2.3429 D(x): 0.6770 D(G(z)): 0.4890 / 0.4845\n",
"[98/100][352/391] Loss_D: 2.8532 Loss_G: 2.7003 D(x): 0.7573 D(G(z)): 0.5218 / 0.4432\n",
"[98/100][353/391] Loss_D: 2.6833 Loss_G: 2.3871 D(x): 0.7094 D(G(z)): 0.4493 / 0.4857\n",
"[98/100][354/391] Loss_D: 2.2000 Loss_G: 2.4581 D(x): 0.6778 D(G(z)): 0.2696 / 0.4634\n",
"[98/100][355/391] Loss_D: 2.5551 Loss_G: 2.7828 D(x): 0.6766 D(G(z)): 0.3957 / 0.4234\n",
"[98/100][356/391] Loss_D: 2.6741 Loss_G: 3.6246 D(x): 0.6879 D(G(z)): 0.4064 / 0.3278\n",
"[98/100][357/391] Loss_D: 3.8412 Loss_G: 2.3448 D(x): 0.5382 D(G(z)): 0.5528 / 0.4670\n",
"[98/100][358/391] Loss_D: 2.6766 Loss_G: 2.6616 D(x): 0.6298 D(G(z)): 0.3443 / 0.4423\n",
"[98/100][359/391] Loss_D: 2.9525 Loss_G: 1.9672 D(x): 0.6696 D(G(z)): 0.4633 / 0.5464\n",
"[98/100][360/391] Loss_D: 2.9546 Loss_G: 4.0413 D(x): 0.6150 D(G(z)): 0.3972 / 0.3085\n",
"[98/100][361/391] Loss_D: 3.4420 Loss_G: 2.8434 D(x): 0.6190 D(G(z)): 0.4185 / 0.4202\n",
"[98/100][362/391] Loss_D: 2.8960 Loss_G: 2.4821 D(x): 0.7370 D(G(z)): 0.4829 / 0.4731\n",
"[98/100][363/391] Loss_D: 2.6776 Loss_G: 3.8641 D(x): 0.7467 D(G(z)): 0.4511 / 0.2921\n",
"[98/100][364/391] Loss_D: 3.0224 Loss_G: 3.6093 D(x): 0.6609 D(G(z)): 0.4280 / 0.3286\n",
"[98/100][365/391] Loss_D: 2.6774 Loss_G: 3.1413 D(x): 0.7084 D(G(z)): 0.4529 / 0.3834\n",
"[98/100][366/391] Loss_D: 2.2799 Loss_G: 3.5745 D(x): 0.8311 D(G(z)): 0.3882 / 0.3374\n",
"[98/100][367/391] Loss_D: 2.9048 Loss_G: 2.8985 D(x): 0.6376 D(G(z)): 0.3912 / 0.4083\n",
"[98/100][368/391] Loss_D: 3.0096 Loss_G: 2.8721 D(x): 0.5881 D(G(z)): 0.3281 / 0.4104\n",
"[98/100][369/391] Loss_D: 2.5299 Loss_G: 2.2507 D(x): 0.6172 D(G(z)): 0.3434 / 0.4938\n",
"[98/100][370/391] Loss_D: 2.8767 Loss_G: 2.9335 D(x): 0.5832 D(G(z)): 0.3580 / 0.4092\n",
"[98/100][371/391] Loss_D: 2.4408 Loss_G: 2.8033 D(x): 0.7388 D(G(z)): 0.3730 / 0.4398\n",
"[98/100][372/391] Loss_D: 2.6686 Loss_G: 2.5210 D(x): 0.7548 D(G(z)): 0.4809 / 0.4518\n",
"[98/100][373/391] Loss_D: 2.4076 Loss_G: 2.4399 D(x): 0.7702 D(G(z)): 0.3516 / 0.4689\n",
"[98/100][374/391] Loss_D: 2.5164 Loss_G: 3.1189 D(x): 0.7404 D(G(z)): 0.4374 / 0.3868\n",
"[98/100][375/391] Loss_D: 2.6055 Loss_G: 2.3458 D(x): 0.7865 D(G(z)): 0.4329 / 0.4999\n",
"[98/100][376/391] Loss_D: 3.1549 Loss_G: 3.3017 D(x): 0.6518 D(G(z)): 0.4548 / 0.3670\n",
"[98/100][377/391] Loss_D: 3.1612 Loss_G: 2.4417 D(x): 0.5637 D(G(z)): 0.3622 / 0.4708\n",
"[98/100][378/391] Loss_D: 2.9549 Loss_G: 3.3878 D(x): 0.5933 D(G(z)): 0.4360 / 0.3527\n",
"[98/100][379/391] Loss_D: 2.5309 Loss_G: 3.7687 D(x): 0.6856 D(G(z)): 0.3775 / 0.3163\n",
"[98/100][380/391] Loss_D: 3.1685 Loss_G: 2.6085 D(x): 0.5791 D(G(z)): 0.3908 / 0.4614\n",
"[98/100][381/391] Loss_D: 3.3316 Loss_G: 2.9919 D(x): 0.7736 D(G(z)): 0.5951 / 0.4173\n",
"[98/100][382/391] Loss_D: 2.2457 Loss_G: 2.8173 D(x): 0.7456 D(G(z)): 0.3593 / 0.4345\n",
"[98/100][383/391] Loss_D: 2.4347 Loss_G: 3.1010 D(x): 0.7328 D(G(z)): 0.3558 / 0.3989\n",
"[98/100][384/391] Loss_D: 2.9414 Loss_G: 2.7606 D(x): 0.6264 D(G(z)): 0.4710 / 0.4296\n",
"[98/100][385/391] Loss_D: 3.2760 Loss_G: 2.8010 D(x): 0.5857 D(G(z)): 0.4949 / 0.4117\n",
"[98/100][386/391] Loss_D: 2.7824 Loss_G: 2.5833 D(x): 0.6564 D(G(z)): 0.3682 / 0.4576\n",
"[98/100][387/391] Loss_D: 2.8612 Loss_G: 2.5804 D(x): 0.6823 D(G(z)): 0.4268 / 0.4542\n",
"[98/100][388/391] Loss_D: 2.4138 Loss_G: 2.6109 D(x): 0.6706 D(G(z)): 0.3412 / 0.4595\n",
"[98/100][389/391] Loss_D: 2.7771 Loss_G: 3.3696 D(x): 0.6364 D(G(z)): 0.3243 / 0.3655\n",
"[98/100][390/391] Loss_D: 2.7610 Loss_G: 3.1125 D(x): 0.7007 D(G(z)): 0.4496 / 0.3938\n",
"[98/100][391/391] Loss_D: 3.6624 Loss_G: 3.5019 D(x): 0.7463 D(G(z)): 0.4172 / 0.3589\n",
"[99/100][1/391] Loss_D: 3.5898 Loss_G: 2.3782 D(x): 0.7127 D(G(z)): 0.4631 / 0.4666\n",
"[99/100][2/391] Loss_D: 3.0519 Loss_G: 2.6274 D(x): 0.6260 D(G(z)): 0.4533 / 0.4575\n",
"[99/100][3/391] Loss_D: 3.0385 Loss_G: 2.8066 D(x): 0.6278 D(G(z)): 0.4474 / 0.4207\n",
"[99/100][4/391] Loss_D: 2.8373 Loss_G: 4.7295 D(x): 0.6747 D(G(z)): 0.4801 / 0.2386\n",
"[99/100][5/391] Loss_D: 2.7494 Loss_G: 2.7489 D(x): 0.6291 D(G(z)): 0.3422 / 0.4257\n",
"[99/100][6/391] Loss_D: 2.8193 Loss_G: 2.7342 D(x): 0.6587 D(G(z)): 0.3867 / 0.4221\n",
"[99/100][7/391] Loss_D: 3.0042 Loss_G: 3.1630 D(x): 0.6694 D(G(z)): 0.4099 / 0.3629\n",
"[99/100][8/391] Loss_D: 2.5558 Loss_G: 2.9061 D(x): 0.6431 D(G(z)): 0.3869 / 0.4067\n",
"[99/100][9/391] Loss_D: 2.3785 Loss_G: 3.0807 D(x): 0.7449 D(G(z)): 0.3966 / 0.3777\n",
"[99/100][10/391] Loss_D: 3.0481 Loss_G: 2.4939 D(x): 0.6700 D(G(z)): 0.5105 / 0.4609\n",
"[99/100][11/391] Loss_D: 3.1229 Loss_G: 3.1568 D(x): 0.6264 D(G(z)): 0.4208 / 0.3647\n",
"[99/100][12/391] Loss_D: 2.4161 Loss_G: 2.4569 D(x): 0.6906 D(G(z)): 0.3307 / 0.4757\n",
"[99/100][13/391] Loss_D: 2.5600 Loss_G: 2.6516 D(x): 0.6392 D(G(z)): 0.3023 / 0.4483\n",
"[99/100][14/391] Loss_D: 2.6252 Loss_G: 2.9502 D(x): 0.6667 D(G(z)): 0.4129 / 0.3958\n",
"[99/100][15/391] Loss_D: 2.5065 Loss_G: 2.4547 D(x): 0.7336 D(G(z)): 0.4227 / 0.4702\n",
"[99/100][16/391] Loss_D: 2.7550 Loss_G: 2.7071 D(x): 0.6855 D(G(z)): 0.3874 / 0.4290\n",
"[99/100][17/391] Loss_D: 2.5971 Loss_G: 2.2945 D(x): 0.6660 D(G(z)): 0.3580 / 0.4766\n",
"[99/100][18/391] Loss_D: 2.8176 Loss_G: 3.1266 D(x): 0.7037 D(G(z)): 0.4789 / 0.3933\n",
"[99/100][19/391] Loss_D: 2.1901 Loss_G: 1.5719 D(x): 0.7741 D(G(z)): 0.3549 / 0.6121\n",
"[99/100][20/391] Loss_D: 3.1837 Loss_G: 2.1380 D(x): 0.6919 D(G(z)): 0.5501 / 0.5294\n",
"[99/100][21/391] Loss_D: 2.8585 Loss_G: 3.4562 D(x): 0.5891 D(G(z)): 0.4279 / 0.3562\n",
"[99/100][22/391] Loss_D: 2.6922 Loss_G: 3.5382 D(x): 0.7217 D(G(z)): 0.4275 / 0.3366\n",
"[99/100][23/391] Loss_D: 2.8060 Loss_G: 3.0473 D(x): 0.6282 D(G(z)): 0.3604 / 0.3855\n",
"[99/100][24/391] Loss_D: 2.7179 Loss_G: 2.0565 D(x): 0.6246 D(G(z)): 0.3124 / 0.5226\n",
"[99/100][25/391] Loss_D: 2.5869 Loss_G: 2.7157 D(x): 0.7484 D(G(z)): 0.3896 / 0.4432\n",
"[99/100][26/391] Loss_D: 2.9691 Loss_G: 2.7902 D(x): 0.6447 D(G(z)): 0.4319 / 0.4316\n",
"[99/100][27/391] Loss_D: 2.7017 Loss_G: 2.7996 D(x): 0.7452 D(G(z)): 0.3845 / 0.4266\n",
"[99/100][28/391] Loss_D: 2.1741 Loss_G: 3.6760 D(x): 0.7243 D(G(z)): 0.3440 / 0.3413\n",
"[99/100][29/391] Loss_D: 2.3017 Loss_G: 2.1067 D(x): 0.7006 D(G(z)): 0.2385 / 0.5434\n",
"[99/100][30/391] Loss_D: 2.4180 Loss_G: 1.9418 D(x): 0.7311 D(G(z)): 0.4305 / 0.5326\n",
"[99/100][31/391] Loss_D: 3.6875 Loss_G: 1.3986 D(x): 0.6630 D(G(z)): 0.3498 / 0.6750\n",
"[99/100][32/391] Loss_D: 2.6201 Loss_G: 2.9752 D(x): 0.7483 D(G(z)): 0.4190 / 0.3965\n",
"[99/100][33/391] Loss_D: 2.4741 Loss_G: 2.3989 D(x): 0.7911 D(G(z)): 0.4111 / 0.5040\n",
"[99/100][34/391] Loss_D: 3.5322 Loss_G: 2.4391 D(x): 0.5869 D(G(z)): 0.5223 / 0.4759\n",
"[99/100][35/391] Loss_D: 2.6047 Loss_G: 2.4308 D(x): 0.7515 D(G(z)): 0.4373 / 0.4807\n",
"[99/100][36/391] Loss_D: 2.7547 Loss_G: 2.0225 D(x): 0.6680 D(G(z)): 0.3715 / 0.5270\n",
"[99/100][37/391] Loss_D: 2.6509 Loss_G: 2.6455 D(x): 0.6769 D(G(z)): 0.4030 / 0.4392\n",
"[99/100][38/391] Loss_D: 2.9133 Loss_G: 2.5660 D(x): 0.5714 D(G(z)): 0.3222 / 0.4502\n",
"[99/100][39/391] Loss_D: 2.8620 Loss_G: 1.8840 D(x): 0.6581 D(G(z)): 0.3760 / 0.5760\n",
"[99/100][40/391] Loss_D: 2.8944 Loss_G: 1.7934 D(x): 0.7482 D(G(z)): 0.4714 / 0.5865\n",
"[99/100][41/391] Loss_D: 3.1545 Loss_G: 2.8119 D(x): 0.7851 D(G(z)): 0.5518 / 0.4192\n",
"[99/100][42/391] Loss_D: 2.2679 Loss_G: 2.6903 D(x): 0.7987 D(G(z)): 0.3615 / 0.4371\n",
"[99/100][43/391] Loss_D: 2.7389 Loss_G: 2.6425 D(x): 0.6733 D(G(z)): 0.4060 / 0.4567\n",
"[99/100][44/391] Loss_D: 3.4094 Loss_G: 3.3574 D(x): 0.5339 D(G(z)): 0.4527 / 0.3538\n",
"[99/100][45/391] Loss_D: 2.7467 Loss_G: 2.6304 D(x): 0.6614 D(G(z)): 0.3553 / 0.4682\n",
"[99/100][46/391] Loss_D: 2.6509 Loss_G: 2.6596 D(x): 0.7602 D(G(z)): 0.4226 / 0.4417\n",
"[99/100][47/391] Loss_D: 2.8212 Loss_G: 2.2569 D(x): 0.6679 D(G(z)): 0.4082 / 0.5062\n",
"[99/100][48/391] Loss_D: 2.9180 Loss_G: 3.6512 D(x): 0.6403 D(G(z)): 0.4295 / 0.3326\n",
"[99/100][49/391] Loss_D: 2.8712 Loss_G: 2.9161 D(x): 0.6486 D(G(z)): 0.4492 / 0.4143\n",
"[99/100][50/391] Loss_D: 2.7722 Loss_G: 2.5681 D(x): 0.6753 D(G(z)): 0.4414 / 0.4685\n",
"[99/100][51/391] Loss_D: 2.7628 Loss_G: 3.0914 D(x): 0.7278 D(G(z)): 0.4631 / 0.3864\n",
"[99/100][52/391] Loss_D: 3.1551 Loss_G: 2.3768 D(x): 0.6236 D(G(z)): 0.4805 / 0.5136\n",
"[99/100][53/391] Loss_D: 2.5673 Loss_G: 3.1238 D(x): 0.7194 D(G(z)): 0.4098 / 0.3799\n",
"[99/100][54/391] Loss_D: 2.6948 Loss_G: 3.3560 D(x): 0.6420 D(G(z)): 0.3921 / 0.3556\n",
"[99/100][55/391] Loss_D: 2.6280 Loss_G: 3.2181 D(x): 0.6488 D(G(z)): 0.3712 / 0.3750\n",
"[99/100][56/391] Loss_D: 2.6084 Loss_G: 2.6319 D(x): 0.7465 D(G(z)): 0.4042 / 0.4500\n",
"[99/100][57/391] Loss_D: 2.6500 Loss_G: 2.6118 D(x): 0.7691 D(G(z)): 0.4524 / 0.4480\n",
"[99/100][58/391] Loss_D: 2.5400 Loss_G: 2.7279 D(x): 0.6794 D(G(z)): 0.4214 / 0.4320\n",
"[99/100][59/391] Loss_D: 2.8422 Loss_G: 2.8831 D(x): 0.6296 D(G(z)): 0.4040 / 0.4136\n",
"[99/100][60/391] Loss_D: 2.6892 Loss_G: 3.1803 D(x): 0.7172 D(G(z)): 0.4272 / 0.3821\n",
"[99/100][61/391] Loss_D: 3.6609 Loss_G: 2.0510 D(x): 0.6289 D(G(z)): 0.4788 / 0.5304\n",
"[99/100][62/391] Loss_D: 2.7185 Loss_G: 2.6057 D(x): 0.6819 D(G(z)): 0.4137 / 0.4564\n",
"[99/100][63/391] Loss_D: 2.5687 Loss_G: 3.4777 D(x): 0.7885 D(G(z)): 0.4356 / 0.3432\n",
"[99/100][64/391] Loss_D: 2.6518 Loss_G: 2.4338 D(x): 0.7509 D(G(z)): 0.4948 / 0.4826\n",
"[99/100][65/391] Loss_D: 2.8911 Loss_G: 2.7159 D(x): 0.5884 D(G(z)): 0.3476 / 0.4512\n",
"[99/100][66/391] Loss_D: 2.9244 Loss_G: 3.8096 D(x): 0.6823 D(G(z)): 0.4686 / 0.3223\n",
"[99/100][67/391] Loss_D: 3.0366 Loss_G: 3.5900 D(x): 0.6609 D(G(z)): 0.4036 / 0.3332\n",
"[99/100][68/391] Loss_D: 2.7732 Loss_G: 3.8348 D(x): 0.6380 D(G(z)): 0.4393 / 0.3223\n",
"[99/100][69/391] Loss_D: 2.4622 Loss_G: 3.9784 D(x): 0.7074 D(G(z)): 0.3419 / 0.3084\n",
"[99/100][70/391] Loss_D: 2.6588 Loss_G: 2.6737 D(x): 0.6650 D(G(z)): 0.3440 / 0.4406\n",
"[99/100][71/391] Loss_D: 2.6192 Loss_G: 4.0148 D(x): 0.7135 D(G(z)): 0.3634 / 0.3154\n",
"[99/100][72/391] Loss_D: 2.5593 Loss_G: 3.4032 D(x): 0.6989 D(G(z)): 0.4047 / 0.3502\n",
"[99/100][73/391] Loss_D: 2.3398 Loss_G: 3.2557 D(x): 0.6992 D(G(z)): 0.2806 / 0.3670\n",
"[99/100][74/391] Loss_D: 2.6803 Loss_G: 3.3755 D(x): 0.6539 D(G(z)): 0.3862 / 0.3657\n",
"[99/100][75/391] Loss_D: 2.4944 Loss_G: 2.6549 D(x): 0.7083 D(G(z)): 0.3697 / 0.4447\n",
"[99/100][76/391] Loss_D: 2.2834 Loss_G: 2.8129 D(x): 0.7311 D(G(z)): 0.3415 / 0.4260\n",
"[99/100][77/391] Loss_D: 2.6933 Loss_G: 2.2660 D(x): 0.6786 D(G(z)): 0.2960 / 0.5007\n",
"[99/100][78/391] Loss_D: 2.9481 Loss_G: 2.1411 D(x): 0.6541 D(G(z)): 0.4864 / 0.5395\n",
"[99/100][79/391] Loss_D: 3.0311 Loss_G: 2.9768 D(x): 0.7034 D(G(z)): 0.5433 / 0.4081\n",
"[99/100][80/391] Loss_D: 2.7223 Loss_G: 3.1784 D(x): 0.7969 D(G(z)): 0.4454 / 0.3800\n",
"[99/100][81/391] Loss_D: 3.2288 Loss_G: 2.1180 D(x): 0.6076 D(G(z)): 0.5020 / 0.5451\n",
"[99/100][82/391] Loss_D: 2.8756 Loss_G: 2.4113 D(x): 0.6448 D(G(z)): 0.3872 / 0.4773\n",
"[99/100][83/391] Loss_D: 2.8132 Loss_G: 3.3989 D(x): 0.6964 D(G(z)): 0.4447 / 0.3552\n",
"[99/100][84/391] Loss_D: 2.4180 Loss_G: 3.8601 D(x): 0.6797 D(G(z)): 0.3846 / 0.3275\n",
"[99/100][85/391] Loss_D: 2.5752 Loss_G: 3.0776 D(x): 0.7521 D(G(z)): 0.4583 / 0.3895\n",
"[99/100][86/391] Loss_D: 2.3679 Loss_G: 2.5627 D(x): 0.6905 D(G(z)): 0.2294 / 0.4466\n",
"[99/100][87/391] Loss_D: 2.6138 Loss_G: 4.0801 D(x): 0.7480 D(G(z)): 0.3464 / 0.2939\n",
"[99/100][88/391] Loss_D: 2.9640 Loss_G: 3.4657 D(x): 0.6698 D(G(z)): 0.5003 / 0.3415\n",
"[99/100][89/391] Loss_D: 2.8458 Loss_G: 3.4584 D(x): 0.5970 D(G(z)): 0.3797 / 0.3555\n",
"[99/100][90/391] Loss_D: 2.8117 Loss_G: 3.0655 D(x): 0.6642 D(G(z)): 0.4119 / 0.3974\n",
"[99/100][91/391] Loss_D: 3.6646 Loss_G: 3.1088 D(x): 0.7618 D(G(z)): 0.4350 / 0.3919\n",
"[99/100][92/391] Loss_D: 2.5842 Loss_G: 2.1417 D(x): 0.6961 D(G(z)): 0.4313 / 0.5417\n",
"[99/100][93/391] Loss_D: 2.7571 Loss_G: 2.8141 D(x): 0.6608 D(G(z)): 0.3945 / 0.4408\n",
"[99/100][94/391] Loss_D: 2.4306 Loss_G: 3.6313 D(x): 0.6842 D(G(z)): 0.3842 / 0.3345\n",
"[99/100][95/391] Loss_D: 2.2545 Loss_G: 3.6814 D(x): 0.7504 D(G(z)): 0.2901 / 0.3320\n",
"[99/100][96/391] Loss_D: 2.5897 Loss_G: 3.0249 D(x): 0.6793 D(G(z)): 0.3894 / 0.4037\n",
"[99/100][97/391] Loss_D: 2.8814 Loss_G: 2.9388 D(x): 0.7182 D(G(z)): 0.4315 / 0.3984\n",
"[99/100][98/391] Loss_D: 2.7914 Loss_G: 4.7235 D(x): 0.6483 D(G(z)): 0.3823 / 0.2414\n",
"[99/100][99/391] Loss_D: 2.8440 Loss_G: 3.2704 D(x): 0.6757 D(G(z)): 0.4297 / 0.3584\n",
"[99/100][100/391] Loss_D: 3.2442 Loss_G: 3.5588 D(x): 0.5981 D(G(z)): 0.4648 / 0.3519\n",
"[99/100][101/391] Loss_D: 2.8391 Loss_G: 2.0254 D(x): 0.7412 D(G(z)): 0.4905 / 0.5305\n",
"[99/100][102/391] Loss_D: 2.6582 Loss_G: 3.3662 D(x): 0.6861 D(G(z)): 0.3863 / 0.3626\n",
"[99/100][103/391] Loss_D: 3.4313 Loss_G: 2.9480 D(x): 0.5378 D(G(z)): 0.4454 / 0.4099\n",
"[99/100][104/391] Loss_D: 2.2603 Loss_G: 2.0530 D(x): 0.7516 D(G(z)): 0.3821 / 0.5449\n",
"[99/100][105/391] Loss_D: 3.7396 Loss_G: 2.5866 D(x): 0.5348 D(G(z)): 0.4962 / 0.4584\n",
"[99/100][106/391] Loss_D: 2.2167 Loss_G: 3.1152 D(x): 0.7177 D(G(z)): 0.2894 / 0.4000\n",
"[99/100][107/391] Loss_D: 2.7613 Loss_G: 2.7938 D(x): 0.7217 D(G(z)): 0.4394 / 0.4354\n",
"[99/100][108/391] Loss_D: 2.5162 Loss_G: 3.4274 D(x): 0.7502 D(G(z)): 0.4075 / 0.3561\n",
"[99/100][109/391] Loss_D: 2.4577 Loss_G: 3.0792 D(x): 0.7419 D(G(z)): 0.3992 / 0.4001\n",
"[99/100][110/391] Loss_D: 3.2445 Loss_G: 3.0593 D(x): 0.6316 D(G(z)): 0.5044 / 0.4031\n",
"[99/100][111/391] Loss_D: 3.0476 Loss_G: 3.4524 D(x): 0.5889 D(G(z)): 0.4104 / 0.3537\n",
"[99/100][112/391] Loss_D: 2.8812 Loss_G: 2.0141 D(x): 0.6563 D(G(z)): 0.4539 / 0.5561\n",
"[99/100][113/391] Loss_D: 2.6015 Loss_G: 2.9610 D(x): 0.7529 D(G(z)): 0.4251 / 0.4146\n",
"[99/100][114/391] Loss_D: 2.8646 Loss_G: 3.9297 D(x): 0.6393 D(G(z)): 0.4398 / 0.2975\n",
"[99/100][115/391] Loss_D: 2.4975 Loss_G: 2.0998 D(x): 0.8030 D(G(z)): 0.3980 / 0.5375\n",
"[99/100][116/391] Loss_D: 2.7177 Loss_G: 4.0201 D(x): 0.7174 D(G(z)): 0.4567 / 0.3009\n",
"[99/100][117/391] Loss_D: 2.7317 Loss_G: 2.6937 D(x): 0.6737 D(G(z)): 0.3018 / 0.4539\n",
"[99/100][118/391] Loss_D: 2.2862 Loss_G: 2.8811 D(x): 0.7076 D(G(z)): 0.2920 / 0.4038\n",
"[99/100][119/391] Loss_D: 3.0274 Loss_G: 3.2521 D(x): 0.6793 D(G(z)): 0.4744 / 0.3728\n",
"[99/100][120/391] Loss_D: 2.4778 Loss_G: 3.0791 D(x): 0.7861 D(G(z)): 0.4242 / 0.3835\n",
"[99/100][121/391] Loss_D: 3.6380 Loss_G: 4.1700 D(x): 0.5534 D(G(z)): 0.4410 / 0.3159\n",
"[99/100][122/391] Loss_D: 2.8451 Loss_G: 2.9546 D(x): 0.6911 D(G(z)): 0.4534 / 0.4276\n",
"[99/100][123/391] Loss_D: 2.9940 Loss_G: 3.6249 D(x): 0.6726 D(G(z)): 0.4382 / 0.3334\n",
"[99/100][124/391] Loss_D: 2.9621 Loss_G: 3.3448 D(x): 0.6607 D(G(z)): 0.4627 / 0.3605\n",
"[99/100][125/391] Loss_D: 2.4909 Loss_G: 2.5560 D(x): 0.7032 D(G(z)): 0.3024 / 0.4517\n",
"[99/100][126/391] Loss_D: 2.9679 Loss_G: 4.2661 D(x): 0.6390 D(G(z)): 0.4456 / 0.2720\n",
"[99/100][127/391] Loss_D: 3.0720 Loss_G: 2.8662 D(x): 0.6816 D(G(z)): 0.4434 / 0.4038\n",
"[99/100][128/391] Loss_D: 2.8517 Loss_G: 2.8000 D(x): 0.5971 D(G(z)): 0.3230 / 0.4270\n",
"[99/100][129/391] Loss_D: 2.4578 Loss_G: 3.0946 D(x): 0.6756 D(G(z)): 0.3072 / 0.3832\n",
"[99/100][130/391] Loss_D: 2.5829 Loss_G: 2.2849 D(x): 0.7536 D(G(z)): 0.3989 / 0.5102\n",
"[99/100][131/391] Loss_D: 3.4935 Loss_G: 2.5247 D(x): 0.7503 D(G(z)): 0.6162 / 0.4655\n",
"[99/100][132/391] Loss_D: 2.4724 Loss_G: 2.9953 D(x): 0.7748 D(G(z)): 0.4138 / 0.4098\n",
"[99/100][133/391] Loss_D: 3.0187 Loss_G: 3.4070 D(x): 0.6360 D(G(z)): 0.5007 / 0.3614\n",
"[99/100][134/391] Loss_D: 3.0400 Loss_G: 3.9377 D(x): 0.6449 D(G(z)): 0.4711 / 0.3059\n",
"[99/100][135/391] Loss_D: 2.8051 Loss_G: 3.5555 D(x): 0.6518 D(G(z)): 0.3771 / 0.3565\n",
"[99/100][136/391] Loss_D: 2.9080 Loss_G: 4.0208 D(x): 0.6008 D(G(z)): 0.3721 / 0.3033\n",
"[99/100][137/391] Loss_D: 2.4650 Loss_G: 3.6351 D(x): 0.6820 D(G(z)): 0.2658 / 0.3335\n",
"[99/100][138/391] Loss_D: 3.3289 Loss_G: 2.3929 D(x): 0.5270 D(G(z)): 0.3724 / 0.4761\n",
"[99/100][139/391] Loss_D: 3.0180 Loss_G: 2.7393 D(x): 0.6331 D(G(z)): 0.3918 / 0.4456\n",
"[99/100][140/391] Loss_D: 2.5052 Loss_G: 3.6764 D(x): 0.7258 D(G(z)): 0.4008 / 0.3199\n",
"[99/100][141/391] Loss_D: 2.8814 Loss_G: 3.0718 D(x): 0.7076 D(G(z)): 0.4533 / 0.4099\n",
"[99/100][142/391] Loss_D: 2.8527 Loss_G: 3.0412 D(x): 0.5982 D(G(z)): 0.4011 / 0.4000\n",
"[99/100][143/391] Loss_D: 2.5476 Loss_G: 1.9149 D(x): 0.7464 D(G(z)): 0.3944 / 0.5638\n",
"[99/100][144/391] Loss_D: 2.7731 Loss_G: 3.4746 D(x): 0.6549 D(G(z)): 0.4350 / 0.3417\n",
"[99/100][145/391] Loss_D: 2.4359 Loss_G: 2.8323 D(x): 0.7829 D(G(z)): 0.3800 / 0.4183\n",
"[99/100][146/391] Loss_D: 2.3807 Loss_G: 2.0271 D(x): 0.7739 D(G(z)): 0.3815 / 0.5524\n",
"[99/100][147/391] Loss_D: 2.8008 Loss_G: 2.9734 D(x): 0.7049 D(G(z)): 0.4167 / 0.3924\n",
"[99/100][148/391] Loss_D: 2.1780 Loss_G: 3.6792 D(x): 0.7194 D(G(z)): 0.3202 / 0.3270\n",
"[99/100][149/391] Loss_D: 2.8031 Loss_G: 2.2766 D(x): 0.6570 D(G(z)): 0.3723 / 0.5152\n",
"[99/100][150/391] Loss_D: 2.7756 Loss_G: 3.2756 D(x): 0.7060 D(G(z)): 0.4327 / 0.3901\n",
"[99/100][151/391] Loss_D: 3.6348 Loss_G: 2.4748 D(x): 0.6548 D(G(z)): 0.3530 / 0.4702\n",
"[99/100][152/391] Loss_D: 2.2766 Loss_G: 2.8985 D(x): 0.6849 D(G(z)): 0.3098 / 0.4300\n",
"[99/100][153/391] Loss_D: 2.8284 Loss_G: 2.8758 D(x): 0.8059 D(G(z)): 0.4935 / 0.4305\n",
"[99/100][154/391] Loss_D: 2.6170 Loss_G: 4.1094 D(x): 0.7322 D(G(z)): 0.4654 / 0.2806\n",
"[99/100][155/391] Loss_D: 2.9427 Loss_G: 3.0998 D(x): 0.6036 D(G(z)): 0.3529 / 0.4031\n",
"[99/100][156/391] Loss_D: 3.2055 Loss_G: 3.6708 D(x): 0.6240 D(G(z)): 0.4773 / 0.3312\n",
"[99/100][157/391] Loss_D: 2.9606 Loss_G: 2.5297 D(x): 0.6601 D(G(z)): 0.4227 / 0.4541\n",
"[99/100][158/391] Loss_D: 2.8372 Loss_G: 2.4797 D(x): 0.6874 D(G(z)): 0.4842 / 0.4801\n",
"[99/100][159/391] Loss_D: 3.1079 Loss_G: 2.8631 D(x): 0.5886 D(G(z)): 0.3512 / 0.4192\n",
"[99/100][160/391] Loss_D: 2.7128 Loss_G: 2.2645 D(x): 0.7211 D(G(z)): 0.4528 / 0.5194\n",
"[99/100][161/391] Loss_D: 2.4139 Loss_G: 2.8680 D(x): 0.7963 D(G(z)): 0.3937 / 0.4169\n",
"[99/100][162/391] Loss_D: 2.9408 Loss_G: 3.3323 D(x): 0.6722 D(G(z)): 0.4768 / 0.3643\n",
"[99/100][163/391] Loss_D: 2.7750 Loss_G: 2.5925 D(x): 0.6254 D(G(z)): 0.3848 / 0.4673\n",
"[99/100][164/391] Loss_D: 2.1753 Loss_G: 3.3309 D(x): 0.7051 D(G(z)): 0.3266 / 0.3729\n",
"[99/100][165/391] Loss_D: 3.6291 Loss_G: 2.5289 D(x): 0.7156 D(G(z)): 0.6127 / 0.4658\n",
"[99/100][166/391] Loss_D: 2.6273 Loss_G: 2.5230 D(x): 0.6149 D(G(z)): 0.3453 / 0.4687\n",
"[99/100][167/391] Loss_D: 2.7729 Loss_G: 2.9617 D(x): 0.7303 D(G(z)): 0.4280 / 0.4081\n",
"[99/100][168/391] Loss_D: 2.7211 Loss_G: 4.4141 D(x): 0.6465 D(G(z)): 0.3520 / 0.2720\n",
"[99/100][169/391] Loss_D: 2.7021 Loss_G: 2.5368 D(x): 0.7196 D(G(z)): 0.4685 / 0.4540\n",
"[99/100][170/391] Loss_D: 3.2469 Loss_G: 2.9201 D(x): 0.6385 D(G(z)): 0.5118 / 0.4202\n",
"[99/100][171/391] Loss_D: 3.3144 Loss_G: 3.5562 D(x): 0.6653 D(G(z)): 0.5426 / 0.3546\n",
"[99/100][172/391] Loss_D: 2.6296 Loss_G: 4.0233 D(x): 0.6646 D(G(z)): 0.3710 / 0.2965\n",
"[99/100][173/391] Loss_D: 2.6815 Loss_G: 3.0007 D(x): 0.6421 D(G(z)): 0.3265 / 0.4119\n",
"[99/100][174/391] Loss_D: 2.0175 Loss_G: 4.1524 D(x): 0.7417 D(G(z)): 0.3170 / 0.2943\n",
"[99/100][175/391] Loss_D: 2.4307 Loss_G: 2.8734 D(x): 0.7054 D(G(z)): 0.3289 / 0.4197\n",
"[99/100][176/391] Loss_D: 2.8628 Loss_G: 2.9657 D(x): 0.6674 D(G(z)): 0.4272 / 0.3891\n",
"[99/100][177/391] Loss_D: 2.9004 Loss_G: 2.8125 D(x): 0.6982 D(G(z)): 0.4747 / 0.4390\n",
"[99/100][178/391] Loss_D: 2.3937 Loss_G: 3.1036 D(x): 0.6831 D(G(z)): 0.3068 / 0.3882\n",
"[99/100][179/391] Loss_D: 2.6017 Loss_G: 3.2679 D(x): 0.7084 D(G(z)): 0.4077 / 0.3658\n",
"[99/100][180/391] Loss_D: 2.6318 Loss_G: 1.9600 D(x): 0.7305 D(G(z)): 0.4277 / 0.5525\n",
"[99/100][181/391] Loss_D: 3.5843 Loss_G: 2.4314 D(x): 0.6430 D(G(z)): 0.4349 / 0.4710\n",
"[99/100][182/391] Loss_D: 2.4841 Loss_G: 3.4651 D(x): 0.6752 D(G(z)): 0.3814 / 0.3476\n",
"[99/100][183/391] Loss_D: 2.3648 Loss_G: 2.1523 D(x): 0.7387 D(G(z)): 0.3847 / 0.5145\n",
"[99/100][184/391] Loss_D: 2.9774 Loss_G: 2.4812 D(x): 0.6906 D(G(z)): 0.4851 / 0.4712\n",
"[99/100][185/391] Loss_D: 3.3274 Loss_G: 3.2950 D(x): 0.5252 D(G(z)): 0.3752 / 0.3655\n",
"[99/100][186/391] Loss_D: 2.8614 Loss_G: 2.9749 D(x): 0.6146 D(G(z)): 0.4032 / 0.3774\n",
"[99/100][187/391] Loss_D: 2.3076 Loss_G: 3.2440 D(x): 0.7805 D(G(z)): 0.2541 / 0.3709\n",
"[99/100][188/391] Loss_D: 2.9002 Loss_G: 2.5117 D(x): 0.7116 D(G(z)): 0.5400 / 0.4806\n",
"[99/100][189/391] Loss_D: 2.5811 Loss_G: 2.2417 D(x): 0.6890 D(G(z)): 0.3607 / 0.5062\n",
"[99/100][190/391] Loss_D: 2.6383 Loss_G: 2.2180 D(x): 0.7638 D(G(z)): 0.4779 / 0.5121\n",
"[99/100][191/391] Loss_D: 3.4001 Loss_G: 3.5583 D(x): 0.5543 D(G(z)): 0.4160 / 0.3470\n",
"[99/100][192/391] Loss_D: 2.9389 Loss_G: 3.3976 D(x): 0.5969 D(G(z)): 0.4291 / 0.3629\n",
"[99/100][193/391] Loss_D: 2.7427 Loss_G: 2.8808 D(x): 0.7032 D(G(z)): 0.4677 / 0.4203\n",
"[99/100][194/391] Loss_D: 3.3412 Loss_G: 2.9436 D(x): 0.5459 D(G(z)): 0.3929 / 0.4106\n",
"[99/100][195/391] Loss_D: 2.9014 Loss_G: 2.9715 D(x): 0.7092 D(G(z)): 0.4698 / 0.4013\n",
"[99/100][196/391] Loss_D: 2.2582 Loss_G: 2.4104 D(x): 0.7154 D(G(z)): 0.3476 / 0.4846\n",
"[99/100][197/391] Loss_D: 2.7081 Loss_G: 2.1692 D(x): 0.6823 D(G(z)): 0.3669 / 0.4940\n",
"[99/100][198/391] Loss_D: 2.6589 Loss_G: 3.4050 D(x): 0.7503 D(G(z)): 0.4716 / 0.3447\n",
"[99/100][199/391] Loss_D: 2.8863 Loss_G: 3.4415 D(x): 0.6656 D(G(z)): 0.4886 / 0.3560\n",
"[99/100][200/391] Loss_D: 3.2054 Loss_G: 2.6406 D(x): 0.5553 D(G(z)): 0.3979 / 0.4520\n",
"[99/100][201/391] Loss_D: 2.3833 Loss_G: 2.1241 D(x): 0.7479 D(G(z)): 0.3348 / 0.5462\n",
"[99/100][202/391] Loss_D: 2.2507 Loss_G: 2.8009 D(x): 0.7308 D(G(z)): 0.3501 / 0.4171\n",
"[99/100][203/391] Loss_D: 2.6910 Loss_G: 2.5462 D(x): 0.7247 D(G(z)): 0.4452 / 0.4612\n",
"[99/100][204/391] Loss_D: 2.3531 Loss_G: 2.7128 D(x): 0.7610 D(G(z)): 0.4526 / 0.4255\n",
"[99/100][205/391] Loss_D: 2.8761 Loss_G: 2.4853 D(x): 0.6184 D(G(z)): 0.4143 / 0.4602\n",
"[99/100][206/391] Loss_D: 2.5460 Loss_G: 3.1806 D(x): 0.6384 D(G(z)): 0.3031 / 0.3823\n",
"[99/100][207/391] Loss_D: 2.8940 Loss_G: 3.5054 D(x): 0.6498 D(G(z)): 0.3915 / 0.3368\n",
"[99/100][208/391] Loss_D: 2.6688 Loss_G: 1.7802 D(x): 0.6528 D(G(z)): 0.3875 / 0.5970\n",
"[99/100][209/391] Loss_D: 2.6750 Loss_G: 2.4789 D(x): 0.7086 D(G(z)): 0.4355 / 0.4723\n",
"[99/100][210/391] Loss_D: 2.6345 Loss_G: 1.9275 D(x): 0.8139 D(G(z)): 0.4770 / 0.5462\n",
"[99/100][211/391] Loss_D: 3.7815 Loss_G: 2.7863 D(x): 0.6232 D(G(z)): 0.5196 / 0.4286\n",
"[99/100][212/391] Loss_D: 2.6567 Loss_G: 2.7601 D(x): 0.7386 D(G(z)): 0.4330 / 0.4398\n",
"[99/100][213/391] Loss_D: 3.3951 Loss_G: 2.6369 D(x): 0.6312 D(G(z)): 0.5294 / 0.4432\n",
"[99/100][214/391] Loss_D: 2.2869 Loss_G: 3.1630 D(x): 0.6897 D(G(z)): 0.3170 / 0.3731\n",
"[99/100][215/391] Loss_D: 2.6117 Loss_G: 3.0077 D(x): 0.6800 D(G(z)): 0.3645 / 0.4044\n",
"[99/100][216/391] Loss_D: 2.5044 Loss_G: 2.3094 D(x): 0.7076 D(G(z)): 0.3364 / 0.4930\n",
"[99/100][217/391] Loss_D: 2.9193 Loss_G: 2.0282 D(x): 0.6787 D(G(z)): 0.4701 / 0.5387\n",
"[99/100][218/391] Loss_D: 3.2406 Loss_G: 1.9190 D(x): 0.6134 D(G(z)): 0.5014 / 0.5423\n",
"[99/100][219/391] Loss_D: 2.5864 Loss_G: 2.7459 D(x): 0.6721 D(G(z)): 0.4136 / 0.4412\n",
"[99/100][220/391] Loss_D: 3.0486 Loss_G: 2.5217 D(x): 0.5944 D(G(z)): 0.4242 / 0.4676\n",
"[99/100][221/391] Loss_D: 2.3861 Loss_G: 2.9705 D(x): 0.8415 D(G(z)): 0.4586 / 0.4073\n",
"[99/100][222/391] Loss_D: 2.2327 Loss_G: 3.0976 D(x): 0.7741 D(G(z)): 0.3574 / 0.3988\n",
"[99/100][223/391] Loss_D: 2.6881 Loss_G: 2.6379 D(x): 0.6768 D(G(z)): 0.4119 / 0.4429\n",
"[99/100][224/391] Loss_D: 2.9614 Loss_G: 3.4822 D(x): 0.6571 D(G(z)): 0.4650 / 0.3570\n",
"[99/100][225/391] Loss_D: 2.5856 Loss_G: 3.7642 D(x): 0.6480 D(G(z)): 0.3293 / 0.3106\n",
"[99/100][226/391] Loss_D: 2.8495 Loss_G: 3.0143 D(x): 0.6875 D(G(z)): 0.4329 / 0.3980\n",
"[99/100][227/391] Loss_D: 2.8414 Loss_G: 2.6424 D(x): 0.6192 D(G(z)): 0.3483 / 0.4415\n",
"[99/100][228/391] Loss_D: 2.4474 Loss_G: 2.2117 D(x): 0.7028 D(G(z)): 0.3022 / 0.4967\n",
"[99/100][229/391] Loss_D: 2.7261 Loss_G: 4.0045 D(x): 0.7688 D(G(z)): 0.5176 / 0.3006\n",
"[99/100][230/391] Loss_D: 2.3272 Loss_G: 3.8986 D(x): 0.6924 D(G(z)): 0.3455 / 0.3183\n",
"[99/100][231/391] Loss_D: 2.6956 Loss_G: 2.6814 D(x): 0.7452 D(G(z)): 0.4309 / 0.4328\n",
"[99/100][232/391] Loss_D: 2.8879 Loss_G: 2.7009 D(x): 0.6748 D(G(z)): 0.4177 / 0.4279\n",
"[99/100][233/391] Loss_D: 2.9120 Loss_G: 2.9157 D(x): 0.6132 D(G(z)): 0.3807 / 0.4187\n",
"[99/100][234/391] Loss_D: 2.9726 Loss_G: 3.3971 D(x): 0.6183 D(G(z)): 0.4767 / 0.3696\n",
"[99/100][235/391] Loss_D: 2.8018 Loss_G: 3.1365 D(x): 0.6898 D(G(z)): 0.4575 / 0.3792\n",
"[99/100][236/391] Loss_D: 3.3634 Loss_G: 3.7297 D(x): 0.5579 D(G(z)): 0.4349 / 0.3172\n",
"[99/100][237/391] Loss_D: 2.7314 Loss_G: 2.0778 D(x): 0.7432 D(G(z)): 0.4548 / 0.5234\n",
"[99/100][238/391] Loss_D: 2.7266 Loss_G: 2.9855 D(x): 0.6570 D(G(z)): 0.4375 / 0.4030\n",
"[99/100][239/391] Loss_D: 2.3623 Loss_G: 2.8262 D(x): 0.7609 D(G(z)): 0.4038 / 0.4365\n",
"[99/100][240/391] Loss_D: 2.8147 Loss_G: 2.3750 D(x): 0.6952 D(G(z)): 0.4213 / 0.4863\n",
"[99/100][241/391] Loss_D: 3.7565 Loss_G: 2.6614 D(x): 0.6526 D(G(z)): 0.3810 / 0.4361\n",
"[99/100][242/391] Loss_D: 2.4217 Loss_G: 3.2226 D(x): 0.7067 D(G(z)): 0.3497 / 0.3759\n",
"[99/100][243/391] Loss_D: 2.7839 Loss_G: 2.7629 D(x): 0.6482 D(G(z)): 0.4241 / 0.4318\n",
"[99/100][244/391] Loss_D: 2.2812 Loss_G: 2.4831 D(x): 0.7490 D(G(z)): 0.3764 / 0.4758\n",
"[99/100][245/391] Loss_D: 2.7525 Loss_G: 2.9728 D(x): 0.7157 D(G(z)): 0.4818 / 0.4117\n",
"[99/100][246/391] Loss_D: 3.0162 Loss_G: 2.4628 D(x): 0.6145 D(G(z)): 0.3712 / 0.4569\n",
"[99/100][247/391] Loss_D: 3.0249 Loss_G: 2.7142 D(x): 0.6138 D(G(z)): 0.4055 / 0.4429\n",
"[99/100][248/391] Loss_D: 2.8186 Loss_G: 2.9039 D(x): 0.6658 D(G(z)): 0.4563 / 0.4219\n",
"[99/100][249/391] Loss_D: 2.4787 Loss_G: 1.8538 D(x): 0.7205 D(G(z)): 0.4037 / 0.5740\n",
"[99/100][250/391] Loss_D: 3.0879 Loss_G: 3.1064 D(x): 0.7191 D(G(z)): 0.5165 / 0.3842\n",
"[99/100][251/391] Loss_D: 2.7283 Loss_G: 2.3646 D(x): 0.6941 D(G(z)): 0.4155 / 0.4860\n",
"[99/100][252/391] Loss_D: 2.5735 Loss_G: 3.7607 D(x): 0.6988 D(G(z)): 0.3701 / 0.3180\n",
"[99/100][253/391] Loss_D: 3.0261 Loss_G: 2.5582 D(x): 0.5743 D(G(z)): 0.3555 / 0.4671\n",
"[99/100][254/391] Loss_D: 2.7291 Loss_G: 3.6738 D(x): 0.6985 D(G(z)): 0.4660 / 0.3252\n",
"[99/100][255/391] Loss_D: 2.3774 Loss_G: 3.5342 D(x): 0.7405 D(G(z)): 0.3265 / 0.3457\n",
"[99/100][256/391] Loss_D: 3.0864 Loss_G: 2.6490 D(x): 0.6592 D(G(z)): 0.4914 / 0.4481\n",
"[99/100][257/391] Loss_D: 2.9164 Loss_G: 3.0115 D(x): 0.7082 D(G(z)): 0.3954 / 0.3888\n",
"[99/100][258/391] Loss_D: 2.2759 Loss_G: 2.8645 D(x): 0.7243 D(G(z)): 0.2930 / 0.4144\n",
"[99/100][259/391] Loss_D: 2.5520 Loss_G: 2.6278 D(x): 0.7062 D(G(z)): 0.3750 / 0.4517\n",
"[99/100][260/391] Loss_D: 2.5386 Loss_G: 2.4710 D(x): 0.7167 D(G(z)): 0.3893 / 0.4939\n",
"[99/100][261/391] Loss_D: 3.0767 Loss_G: 4.1451 D(x): 0.6159 D(G(z)): 0.4096 / 0.2814\n",
"[99/100][262/391] Loss_D: 2.9246 Loss_G: 3.6508 D(x): 0.6948 D(G(z)): 0.4375 / 0.3173\n",
"[99/100][263/391] Loss_D: 3.5194 Loss_G: 3.3133 D(x): 0.6366 D(G(z)): 0.5458 / 0.3682\n",
"[99/100][264/391] Loss_D: 2.6655 Loss_G: 2.8885 D(x): 0.6166 D(G(z)): 0.3745 / 0.4252\n",
"[99/100][265/391] Loss_D: 2.7102 Loss_G: 3.2172 D(x): 0.6975 D(G(z)): 0.3900 / 0.3888\n",
"[99/100][266/391] Loss_D: 2.5852 Loss_G: 2.8454 D(x): 0.7335 D(G(z)): 0.3481 / 0.4302\n",
"[99/100][267/391] Loss_D: 3.1023 Loss_G: 3.6468 D(x): 0.7077 D(G(z)): 0.5224 / 0.3344\n",
"[99/100][268/391] Loss_D: 2.3430 Loss_G: 2.6453 D(x): 0.7343 D(G(z)): 0.4380 / 0.4377\n",
"[99/100][269/391] Loss_D: 2.4623 Loss_G: 3.0049 D(x): 0.7283 D(G(z)): 0.3296 / 0.4048\n",
"[99/100][270/391] Loss_D: 3.3899 Loss_G: 3.6049 D(x): 0.5536 D(G(z)): 0.4362 / 0.3453\n",
"[99/100][271/391] Loss_D: 3.7559 Loss_G: 1.9650 D(x): 0.7444 D(G(z)): 0.4232 / 0.5467\n",
"[99/100][272/391] Loss_D: 3.0620 Loss_G: 3.1842 D(x): 0.6415 D(G(z)): 0.4666 / 0.3767\n",
"[99/100][273/391] Loss_D: 2.3216 Loss_G: 3.6991 D(x): 0.7301 D(G(z)): 0.3231 / 0.3197\n",
"[99/100][274/391] Loss_D: 2.4626 Loss_G: 3.0880 D(x): 0.6569 D(G(z)): 0.3264 / 0.3935\n",
"[99/100][275/391] Loss_D: 2.8048 Loss_G: 2.3943 D(x): 0.6659 D(G(z)): 0.3774 / 0.4770\n",
"[99/100][276/391] Loss_D: 2.4394 Loss_G: 3.4573 D(x): 0.7300 D(G(z)): 0.3281 / 0.3600\n",
"[99/100][277/391] Loss_D: 2.8709 Loss_G: 2.4505 D(x): 0.7010 D(G(z)): 0.4583 / 0.4623\n",
"[99/100][278/391] Loss_D: 2.4379 Loss_G: 3.0852 D(x): 0.6716 D(G(z)): 0.3664 / 0.3882\n",
"[99/100][279/391] Loss_D: 3.0406 Loss_G: 2.4892 D(x): 0.7049 D(G(z)): 0.5266 / 0.4830\n",
"[99/100][280/391] Loss_D: 3.3227 Loss_G: 2.6531 D(x): 0.6809 D(G(z)): 0.5609 / 0.4707\n",
"[99/100][281/391] Loss_D: 3.1783 Loss_G: 2.6135 D(x): 0.6850 D(G(z)): 0.5085 / 0.4597\n",
"[99/100][282/391] Loss_D: 2.5055 Loss_G: 4.0170 D(x): 0.6778 D(G(z)): 0.3634 / 0.2974\n",
"[99/100][283/391] Loss_D: 2.6947 Loss_G: 2.8091 D(x): 0.6362 D(G(z)): 0.2716 / 0.4222\n",
"[99/100][284/391] Loss_D: 2.7078 Loss_G: 3.0644 D(x): 0.6845 D(G(z)): 0.4427 / 0.3927\n",
"[99/100][285/391] Loss_D: 2.8417 Loss_G: 3.2620 D(x): 0.6437 D(G(z)): 0.3874 / 0.3755\n",
"[99/100][286/391] Loss_D: 2.8204 Loss_G: 2.4003 D(x): 0.6335 D(G(z)): 0.3848 / 0.4855\n",
"[99/100][287/391] Loss_D: 2.9368 Loss_G: 3.2681 D(x): 0.6992 D(G(z)): 0.4676 / 0.3697\n",
"[99/100][288/391] Loss_D: 2.6707 Loss_G: 2.9379 D(x): 0.6706 D(G(z)): 0.4356 / 0.4021\n",
"[99/100][289/391] Loss_D: 3.1308 Loss_G: 2.2756 D(x): 0.6204 D(G(z)): 0.4035 / 0.4906\n",
"[99/100][290/391] Loss_D: 2.4189 Loss_G: 2.5070 D(x): 0.7608 D(G(z)): 0.3633 / 0.4735\n",
"[99/100][291/391] Loss_D: 2.8336 Loss_G: 3.2218 D(x): 0.7403 D(G(z)): 0.4812 / 0.3736\n",
"[99/100][292/391] Loss_D: 3.2110 Loss_G: 2.1461 D(x): 0.7342 D(G(z)): 0.5406 / 0.5262\n",
"[99/100][293/391] Loss_D: 3.0154 Loss_G: 2.9143 D(x): 0.6220 D(G(z)): 0.4089 / 0.4171\n",
"[99/100][294/391] Loss_D: 2.9344 Loss_G: 3.7376 D(x): 0.5705 D(G(z)): 0.3814 / 0.3293\n",
"[99/100][295/391] Loss_D: 2.7712 Loss_G: 3.0090 D(x): 0.6750 D(G(z)): 0.3971 / 0.3821\n",
"[99/100][296/391] Loss_D: 2.7483 Loss_G: 3.2321 D(x): 0.7290 D(G(z)): 0.4387 / 0.3834\n",
"[99/100][297/391] Loss_D: 2.9737 Loss_G: 3.3534 D(x): 0.6459 D(G(z)): 0.4064 / 0.3669\n",
"[99/100][298/391] Loss_D: 2.2382 Loss_G: 3.1304 D(x): 0.7656 D(G(z)): 0.4384 / 0.3961\n",
"[99/100][299/391] Loss_D: 2.7072 Loss_G: 3.8535 D(x): 0.6268 D(G(z)): 0.3006 / 0.3059\n",
"[99/100][300/391] Loss_D: 3.4512 Loss_G: 2.9965 D(x): 0.6595 D(G(z)): 0.5532 / 0.4071\n",
"[99/100][301/391] Loss_D: 3.7384 Loss_G: 3.1830 D(x): 0.6086 D(G(z)): 0.4485 / 0.3879\n",
"[99/100][302/391] Loss_D: 2.5974 Loss_G: 3.5804 D(x): 0.6450 D(G(z)): 0.3520 / 0.3296\n",
"[99/100][303/391] Loss_D: 2.4444 Loss_G: 2.6723 D(x): 0.7100 D(G(z)): 0.3287 / 0.4524\n",
"[99/100][304/391] Loss_D: 2.3496 Loss_G: 1.8821 D(x): 0.6682 D(G(z)): 0.3587 / 0.5711\n",
"[99/100][305/391] Loss_D: 2.8637 Loss_G: 2.3764 D(x): 0.6253 D(G(z)): 0.3842 / 0.4927\n",
"[99/100][306/391] Loss_D: 2.7595 Loss_G: 2.4936 D(x): 0.7547 D(G(z)): 0.4489 / 0.4670\n",
"[99/100][307/391] Loss_D: 2.4825 Loss_G: 4.1588 D(x): 0.7269 D(G(z)): 0.3379 / 0.2797\n",
"[99/100][308/391] Loss_D: 2.4204 Loss_G: 2.6206 D(x): 0.6609 D(G(z)): 0.3034 / 0.4434\n",
"[99/100][309/391] Loss_D: 2.3600 Loss_G: 3.3221 D(x): 0.7935 D(G(z)): 0.4282 / 0.3631\n",
"[99/100][310/391] Loss_D: 2.4756 Loss_G: 2.9523 D(x): 0.8333 D(G(z)): 0.4132 / 0.4041\n",
"[99/100][311/391] Loss_D: 3.4343 Loss_G: 2.3553 D(x): 0.6356 D(G(z)): 0.4994 / 0.4913\n",
"[99/100][312/391] Loss_D: 2.4332 Loss_G: 2.6524 D(x): 0.6875 D(G(z)): 0.3392 / 0.4488\n",
"[99/100][313/391] Loss_D: 3.1770 Loss_G: 3.7109 D(x): 0.6154 D(G(z)): 0.4598 / 0.3272\n",
"[99/100][314/391] Loss_D: 2.6843 Loss_G: 3.7119 D(x): 0.6012 D(G(z)): 0.3468 / 0.3142\n",
"[99/100][315/391] Loss_D: 2.4918 Loss_G: 4.1718 D(x): 0.7295 D(G(z)): 0.4169 / 0.2874\n",
"[99/100][316/391] Loss_D: 2.7151 Loss_G: 3.1201 D(x): 0.7496 D(G(z)): 0.4522 / 0.3745\n",
"[99/100][317/391] Loss_D: 2.8374 Loss_G: 2.9482 D(x): 0.7107 D(G(z)): 0.3986 / 0.3983\n",
"[99/100][318/391] Loss_D: 2.3470 Loss_G: 3.4103 D(x): 0.6919 D(G(z)): 0.3403 / 0.3583\n",
"[99/100][319/391] Loss_D: 2.7627 Loss_G: 1.9039 D(x): 0.6900 D(G(z)): 0.4496 / 0.5705\n",
"[99/100][320/391] Loss_D: 2.5175 Loss_G: 3.6983 D(x): 0.7546 D(G(z)): 0.3928 / 0.3563\n",
"[99/100][321/391] Loss_D: 2.9632 Loss_G: 3.4507 D(x): 0.6564 D(G(z)): 0.4092 / 0.3627\n",
"[99/100][322/391] Loss_D: 3.0590 Loss_G: 2.2368 D(x): 0.6364 D(G(z)): 0.4730 / 0.5094\n",
"[99/100][323/391] Loss_D: 2.7432 Loss_G: 3.4960 D(x): 0.6575 D(G(z)): 0.3704 / 0.3542\n",
"[99/100][324/391] Loss_D: 1.8523 Loss_G: 3.1894 D(x): 0.7982 D(G(z)): 0.3441 / 0.3868\n",
"[99/100][325/391] Loss_D: 3.0232 Loss_G: 3.1960 D(x): 0.6753 D(G(z)): 0.4215 / 0.3866\n",
"[99/100][326/391] Loss_D: 2.9828 Loss_G: 2.8606 D(x): 0.5626 D(G(z)): 0.3377 / 0.4252\n",
"[99/100][327/391] Loss_D: 3.6363 Loss_G: 3.0330 D(x): 0.6695 D(G(z)): 0.6176 / 0.3988\n",
"[99/100][328/391] Loss_D: 2.5559 Loss_G: 2.8637 D(x): 0.6290 D(G(z)): 0.2497 / 0.4030\n",
"[99/100][329/391] Loss_D: 2.7185 Loss_G: 2.0747 D(x): 0.7746 D(G(z)): 0.4694 / 0.5363\n",
"[99/100][330/391] Loss_D: 2.6889 Loss_G: 3.1079 D(x): 0.6738 D(G(z)): 0.3810 / 0.4024\n",
"[99/100][331/391] Loss_D: 3.4991 Loss_G: 4.3474 D(x): 0.6365 D(G(z)): 0.3617 / 0.2750\n",
"[99/100][332/391] Loss_D: 2.6085 Loss_G: 2.7777 D(x): 0.6531 D(G(z)): 0.3728 / 0.4423\n",
"[99/100][333/391] Loss_D: 2.6250 Loss_G: 3.0055 D(x): 0.7617 D(G(z)): 0.3722 / 0.3926\n",
"[99/100][334/391] Loss_D: 2.4628 Loss_G: 3.7846 D(x): 0.7417 D(G(z)): 0.4523 / 0.3288\n",
"[99/100][335/391] Loss_D: 2.7557 Loss_G: 2.3924 D(x): 0.7219 D(G(z)): 0.4531 / 0.4851\n",
"[99/100][336/391] Loss_D: 2.6010 Loss_G: 3.7978 D(x): 0.7195 D(G(z)): 0.3573 / 0.3098\n",
"[99/100][337/391] Loss_D: 2.9181 Loss_G: 3.0458 D(x): 0.6662 D(G(z)): 0.4413 / 0.3937\n",
"[99/100][338/391] Loss_D: 2.9224 Loss_G: 2.7885 D(x): 0.6502 D(G(z)): 0.4397 / 0.4277\n",
"[99/100][339/391] Loss_D: 2.8672 Loss_G: 2.7715 D(x): 0.6022 D(G(z)): 0.3385 / 0.4385\n",
"[99/100][340/391] Loss_D: 2.7795 Loss_G: 2.9354 D(x): 0.7211 D(G(z)): 0.4477 / 0.4086\n",
"[99/100][341/391] Loss_D: 2.1079 Loss_G: 3.3790 D(x): 0.7643 D(G(z)): 0.2691 / 0.3583\n",
"[99/100][342/391] Loss_D: 2.8585 Loss_G: 3.2849 D(x): 0.6793 D(G(z)): 0.4635 / 0.3679\n",
"[99/100][343/391] Loss_D: 3.1951 Loss_G: 3.6545 D(x): 0.6693 D(G(z)): 0.5060 / 0.3365\n",
"[99/100][344/391] Loss_D: 2.3822 Loss_G: 3.4158 D(x): 0.7200 D(G(z)): 0.3972 / 0.3455\n",
"[99/100][345/391] Loss_D: 2.5586 Loss_G: 2.7595 D(x): 0.6726 D(G(z)): 0.3357 / 0.4333\n",
"[99/100][346/391] Loss_D: 2.9635 Loss_G: 3.0551 D(x): 0.5771 D(G(z)): 0.3154 / 0.3966\n",
"[99/100][347/391] Loss_D: 2.9336 Loss_G: 2.4887 D(x): 0.6966 D(G(z)): 0.4175 / 0.4603\n",
"[99/100][348/391] Loss_D: 2.7630 Loss_G: 2.6160 D(x): 0.7052 D(G(z)): 0.4738 / 0.4383\n",
"[99/100][349/391] Loss_D: 2.6285 Loss_G: 3.0439 D(x): 0.7122 D(G(z)): 0.4089 / 0.4107\n",
"[99/100][350/391] Loss_D: 2.6018 Loss_G: 2.9487 D(x): 0.7026 D(G(z)): 0.4111 / 0.4209\n",
"[99/100][351/391] Loss_D: 3.3080 Loss_G: 2.9597 D(x): 0.5982 D(G(z)): 0.4584 / 0.4175\n",
"[99/100][352/391] Loss_D: 2.2643 Loss_G: 1.9144 D(x): 0.7491 D(G(z)): 0.3770 / 0.5806\n",
"[99/100][353/391] Loss_D: 2.8582 Loss_G: 3.7210 D(x): 0.6944 D(G(z)): 0.4647 / 0.3228\n",
"[99/100][354/391] Loss_D: 2.0584 Loss_G: 2.3597 D(x): 0.7440 D(G(z)): 0.3314 / 0.4729\n",
"[99/100][355/391] Loss_D: 2.5662 Loss_G: 2.6286 D(x): 0.6836 D(G(z)): 0.3975 / 0.4424\n",
"[99/100][356/391] Loss_D: 2.4625 Loss_G: 2.6663 D(x): 0.6939 D(G(z)): 0.3388 / 0.4538\n",
"[99/100][357/391] Loss_D: 2.9587 Loss_G: 4.3355 D(x): 0.6448 D(G(z)): 0.4391 / 0.2728\n",
"[99/100][358/391] Loss_D: 3.0573 Loss_G: 3.0325 D(x): 0.5949 D(G(z)): 0.4453 / 0.3970\n",
"[99/100][359/391] Loss_D: 3.7465 Loss_G: 3.0025 D(x): 0.5656 D(G(z)): 0.5254 / 0.4099\n",
"[99/100][360/391] Loss_D: 2.7518 Loss_G: 2.3511 D(x): 0.6543 D(G(z)): 0.3844 / 0.4808\n",
"[99/100][361/391] Loss_D: 3.5271 Loss_G: 2.5913 D(x): 0.6549 D(G(z)): 0.4135 / 0.4700\n",
"[99/100][362/391] Loss_D: 3.0122 Loss_G: 3.0874 D(x): 0.8099 D(G(z)): 0.5531 / 0.3861\n",
"[99/100][363/391] Loss_D: 3.0091 Loss_G: 2.5681 D(x): 0.7473 D(G(z)): 0.5335 / 0.4554\n",
"[99/100][364/391] Loss_D: 3.0686 Loss_G: 3.7919 D(x): 0.6288 D(G(z)): 0.4119 / 0.3199\n",
"[99/100][365/391] Loss_D: 3.4515 Loss_G: 4.1294 D(x): 0.5021 D(G(z)): 0.3545 / 0.2762\n",
"[99/100][366/391] Loss_D: 2.6789 Loss_G: 3.0308 D(x): 0.5889 D(G(z)): 0.2597 / 0.3886\n",
"[99/100][367/391] Loss_D: 2.6488 Loss_G: 3.1611 D(x): 0.7592 D(G(z)): 0.4179 / 0.3588\n",
"[99/100][368/391] Loss_D: 2.6654 Loss_G: 2.7927 D(x): 0.6490 D(G(z)): 0.3612 / 0.4182\n",
"[99/100][369/391] Loss_D: 2.5342 Loss_G: 2.7070 D(x): 0.6610 D(G(z)): 0.3340 / 0.4614\n",
"[99/100][370/391] Loss_D: 2.7663 Loss_G: 2.7165 D(x): 0.6419 D(G(z)): 0.4083 / 0.4248\n",
"[99/100][371/391] Loss_D: 2.9600 Loss_G: 3.0412 D(x): 0.7770 D(G(z)): 0.5343 / 0.4060\n",
"[99/100][372/391] Loss_D: 3.2901 Loss_G: 3.2643 D(x): 0.7334 D(G(z)): 0.5881 / 0.3685\n",
"[99/100][373/391] Loss_D: 2.3432 Loss_G: 3.1466 D(x): 0.7676 D(G(z)): 0.3655 / 0.3920\n",
"[99/100][374/391] Loss_D: 2.9041 Loss_G: 2.4185 D(x): 0.7376 D(G(z)): 0.4958 / 0.4927\n",
"[99/100][375/391] Loss_D: 2.6284 Loss_G: 3.0835 D(x): 0.7100 D(G(z)): 0.3756 / 0.3972\n",
"[99/100][376/391] Loss_D: 3.0054 Loss_G: 2.8496 D(x): 0.7083 D(G(z)): 0.4527 / 0.4185\n",
"[99/100][377/391] Loss_D: 2.4163 Loss_G: 3.2544 D(x): 0.7681 D(G(z)): 0.3583 / 0.3825\n",
"[99/100][378/391] Loss_D: 2.4713 Loss_G: 4.1176 D(x): 0.6444 D(G(z)): 0.1721 / 0.2795\n",
"[99/100][379/391] Loss_D: 2.5134 Loss_G: 3.3261 D(x): 0.6790 D(G(z)): 0.3826 / 0.3829\n",
"[99/100][380/391] Loss_D: 2.6084 Loss_G: 2.7601 D(x): 0.7783 D(G(z)): 0.4720 / 0.4262\n",
"[99/100][381/391] Loss_D: 2.2360 Loss_G: 3.1308 D(x): 0.7488 D(G(z)): 0.3104 / 0.3960\n",
"[99/100][382/391] Loss_D: 2.3904 Loss_G: 2.7729 D(x): 0.7098 D(G(z)): 0.3432 / 0.4318\n",
"[99/100][383/391] Loss_D: 2.4745 Loss_G: 2.4823 D(x): 0.7338 D(G(z)): 0.3611 / 0.4619\n",
"[99/100][384/391] Loss_D: 2.8849 Loss_G: 3.4696 D(x): 0.6611 D(G(z)): 0.4915 / 0.3542\n",
"[99/100][385/391] Loss_D: 2.7846 Loss_G: 2.4718 D(x): 0.6635 D(G(z)): 0.4144 / 0.4679\n",
"[99/100][386/391] Loss_D: 2.6549 Loss_G: 2.7205 D(x): 0.6765 D(G(z)): 0.3147 / 0.4409\n",
"[99/100][387/391] Loss_D: 2.4909 Loss_G: 3.1441 D(x): 0.7026 D(G(z)): 0.2992 / 0.3690\n",
"[99/100][388/391] Loss_D: 2.7734 Loss_G: 3.4487 D(x): 0.6279 D(G(z)): 0.3856 / 0.3501\n",
"[99/100][389/391] Loss_D: 2.7265 Loss_G: 3.0979 D(x): 0.6836 D(G(z)): 0.3973 / 0.3927\n",
"[99/100][390/391] Loss_D: 2.8547 Loss_G: 2.6697 D(x): 0.7344 D(G(z)): 0.5310 / 0.4545\n",
"[99/100][391/391] Loss_D: 3.7621 Loss_G: 2.8274 D(x): 0.6773 D(G(z)): 0.2414 / 0.4351\n",
"[100/100][1/391] Loss_D: 3.5876 Loss_G: 1.8612 D(x): 0.6153 D(G(z)): 0.4827 / 0.5600\n",
"[100/100][2/391] Loss_D: 3.2056 Loss_G: 3.1172 D(x): 0.7335 D(G(z)): 0.5696 / 0.3896\n",
"[100/100][3/391] Loss_D: 3.2330 Loss_G: 3.7114 D(x): 0.5978 D(G(z)): 0.4797 / 0.3246\n",
"[100/100][4/391] Loss_D: 2.2735 Loss_G: 3.5580 D(x): 0.7661 D(G(z)): 0.4240 / 0.3535\n",
"[100/100][5/391] Loss_D: 2.2682 Loss_G: 2.2827 D(x): 0.7162 D(G(z)): 0.2920 / 0.4883\n",
"[100/100][6/391] Loss_D: 2.8459 Loss_G: 3.3934 D(x): 0.6234 D(G(z)): 0.3531 / 0.3531\n",
"[100/100][7/391] Loss_D: 2.8947 Loss_G: 3.8225 D(x): 0.6711 D(G(z)): 0.3852 / 0.3170\n",
"[100/100][8/391] Loss_D: 2.0873 Loss_G: 3.2857 D(x): 0.7852 D(G(z)): 0.4125 / 0.3701\n",
"[100/100][9/391] Loss_D: 2.6645 Loss_G: 2.3471 D(x): 0.6854 D(G(z)): 0.4387 / 0.4970\n",
"[100/100][10/391] Loss_D: 2.6828 Loss_G: 3.0604 D(x): 0.6987 D(G(z)): 0.4172 / 0.3972\n",
"[100/100][11/391] Loss_D: 3.6764 Loss_G: 3.4596 D(x): 0.5187 D(G(z)): 0.4866 / 0.3525\n",
"[100/100][12/391] Loss_D: 2.6293 Loss_G: 2.8578 D(x): 0.6857 D(G(z)): 0.3841 / 0.4252\n",
"[100/100][13/391] Loss_D: 2.7501 Loss_G: 3.6327 D(x): 0.6252 D(G(z)): 0.3356 / 0.3404\n",
"[100/100][14/391] Loss_D: 2.3899 Loss_G: 2.6670 D(x): 0.7932 D(G(z)): 0.4252 / 0.4478\n",
"[100/100][15/391] Loss_D: 2.3451 Loss_G: 3.1524 D(x): 0.7256 D(G(z)): 0.3489 / 0.3792\n",
"[100/100][16/391] Loss_D: 3.5670 Loss_G: 3.0274 D(x): 0.5474 D(G(z)): 0.4219 / 0.4070\n",
"[100/100][17/391] Loss_D: 2.4808 Loss_G: 2.8960 D(x): 0.7280 D(G(z)): 0.3714 / 0.3914\n",
"[100/100][18/391] Loss_D: 2.4754 Loss_G: 2.7719 D(x): 0.7034 D(G(z)): 0.3865 / 0.4225\n",
"[100/100][19/391] Loss_D: 2.9882 Loss_G: 3.1154 D(x): 0.6916 D(G(z)): 0.4930 / 0.3956\n",
"[100/100][20/391] Loss_D: 2.4942 Loss_G: 2.2033 D(x): 0.7376 D(G(z)): 0.4247 / 0.5251\n",
"[100/100][21/391] Loss_D: 2.8321 Loss_G: 2.9254 D(x): 0.7460 D(G(z)): 0.5336 / 0.4266\n",
"[100/100][22/391] Loss_D: 2.2712 Loss_G: 2.7885 D(x): 0.7585 D(G(z)): 0.3293 / 0.4261\n",
"[100/100][23/391] Loss_D: 2.6541 Loss_G: 4.0132 D(x): 0.6476 D(G(z)): 0.3342 / 0.2898\n",
"[100/100][24/391] Loss_D: 2.8355 Loss_G: 3.8658 D(x): 0.6264 D(G(z)): 0.3838 / 0.3097\n",
"[100/100][25/391] Loss_D: 2.6001 Loss_G: 2.9988 D(x): 0.6979 D(G(z)): 0.3318 / 0.3905\n",
"[100/100][26/391] Loss_D: 2.5162 Loss_G: 3.1736 D(x): 0.6318 D(G(z)): 0.2904 / 0.3781\n",
"[100/100][27/391] Loss_D: 3.1608 Loss_G: 2.5690 D(x): 0.6980 D(G(z)): 0.4883 / 0.4615\n",
"[100/100][28/391] Loss_D: 2.5665 Loss_G: 3.3504 D(x): 0.6696 D(G(z)): 0.4128 / 0.3579\n",
"[100/100][29/391] Loss_D: 2.5716 Loss_G: 2.4444 D(x): 0.6811 D(G(z)): 0.3579 / 0.4712\n",
"[100/100][30/391] Loss_D: 2.6489 Loss_G: 3.0669 D(x): 0.6875 D(G(z)): 0.4161 / 0.4133\n",
"[100/100][31/391] Loss_D: 3.6760 Loss_G: 2.8265 D(x): 0.6881 D(G(z)): 0.4496 / 0.4254\n",
"[100/100][32/391] Loss_D: 2.5755 Loss_G: 2.1574 D(x): 0.7004 D(G(z)): 0.3856 / 0.5230\n",
"[100/100][33/391] Loss_D: 3.0027 Loss_G: 2.7976 D(x): 0.6956 D(G(z)): 0.4704 / 0.4380\n",
"[100/100][34/391] Loss_D: 2.3341 Loss_G: 3.4336 D(x): 0.8454 D(G(z)): 0.4627 / 0.3403\n",
"[100/100][35/391] Loss_D: 2.9542 Loss_G: 3.1050 D(x): 0.6377 D(G(z)): 0.4591 / 0.3972\n",
"[100/100][36/391] Loss_D: 2.6831 Loss_G: 3.8445 D(x): 0.8146 D(G(z)): 0.4668 / 0.3081\n",
"[100/100][37/391] Loss_D: 2.4732 Loss_G: 3.0603 D(x): 0.7060 D(G(z)): 0.3299 / 0.3823\n",
"[100/100][38/391] Loss_D: 2.2764 Loss_G: 2.8449 D(x): 0.7215 D(G(z)): 0.3382 / 0.4142\n",
"[100/100][39/391] Loss_D: 2.7916 Loss_G: 3.0781 D(x): 0.6463 D(G(z)): 0.3171 / 0.4098\n",
"[100/100][40/391] Loss_D: 3.1288 Loss_G: 3.3068 D(x): 0.6325 D(G(z)): 0.4127 / 0.3601\n",
"[100/100][41/391] Loss_D: 3.2388 Loss_G: 3.7187 D(x): 0.5831 D(G(z)): 0.4162 / 0.3263\n",
"[100/100][42/391] Loss_D: 3.3358 Loss_G: 3.7075 D(x): 0.6393 D(G(z)): 0.5224 / 0.3242\n",
"[100/100][43/391] Loss_D: 3.1725 Loss_G: 2.8311 D(x): 0.6981 D(G(z)): 0.5175 / 0.4328\n",
"[100/100][44/391] Loss_D: 2.4848 Loss_G: 2.7609 D(x): 0.6573 D(G(z)): 0.3702 / 0.4316\n",
"[100/100][45/391] Loss_D: 2.9054 Loss_G: 3.1479 D(x): 0.5969 D(G(z)): 0.3714 / 0.3797\n",
"[100/100][46/391] Loss_D: 2.8644 Loss_G: 3.0426 D(x): 0.6333 D(G(z)): 0.4096 / 0.3771\n",
"[100/100][47/391] Loss_D: 3.1570 Loss_G: 2.7966 D(x): 0.6410 D(G(z)): 0.4887 / 0.4033\n",
"[100/100][48/391] Loss_D: 3.3367 Loss_G: 3.0637 D(x): 0.6188 D(G(z)): 0.4949 / 0.3792\n",
"[100/100][49/391] Loss_D: 3.1006 Loss_G: 3.3043 D(x): 0.7056 D(G(z)): 0.5521 / 0.3806\n",
"[100/100][50/391] Loss_D: 2.6059 Loss_G: 3.1627 D(x): 0.7374 D(G(z)): 0.4471 / 0.3909\n",
"[100/100][51/391] Loss_D: 3.1001 Loss_G: 3.9971 D(x): 0.6268 D(G(z)): 0.4521 / 0.2921\n",
"[100/100][52/391] Loss_D: 3.0354 Loss_G: 3.0622 D(x): 0.6108 D(G(z)): 0.3856 / 0.4064\n",
"[100/100][53/391] Loss_D: 2.7492 Loss_G: 2.7815 D(x): 0.6502 D(G(z)): 0.4095 / 0.4375\n",
"[100/100][54/391] Loss_D: 2.3057 Loss_G: 2.5508 D(x): 0.7516 D(G(z)): 0.3964 / 0.4586\n",
"[100/100][55/391] Loss_D: 3.1689 Loss_G: 2.8429 D(x): 0.7392 D(G(z)): 0.5913 / 0.4328\n",
"[100/100][56/391] Loss_D: 3.1098 Loss_G: 3.4198 D(x): 0.5976 D(G(z)): 0.3915 / 0.3385\n",
"[100/100][57/391] Loss_D: 3.6004 Loss_G: 2.3949 D(x): 0.5657 D(G(z)): 0.4979 / 0.4570\n",
"[100/100][58/391] Loss_D: 2.7903 Loss_G: 3.8903 D(x): 0.6342 D(G(z)): 0.4086 / 0.3008\n",
"[100/100][59/391] Loss_D: 3.0268 Loss_G: 2.4764 D(x): 0.5548 D(G(z)): 0.2777 / 0.4886\n",
"[100/100][60/391] Loss_D: 2.8216 Loss_G: 3.6197 D(x): 0.6101 D(G(z)): 0.3438 / 0.3301\n",
"[100/100][61/391] Loss_D: 3.6201 Loss_G: 1.6301 D(x): 0.6176 D(G(z)): 0.4546 / 0.6033\n",
"[100/100][62/391] Loss_D: 3.0907 Loss_G: 3.3076 D(x): 0.7809 D(G(z)): 0.5394 / 0.3761\n",
"[100/100][63/391] Loss_D: 3.6134 Loss_G: 1.9376 D(x): 0.5332 D(G(z)): 0.4887 / 0.5562\n",
"[100/100][64/391] Loss_D: 3.0182 Loss_G: 3.1352 D(x): 0.6716 D(G(z)): 0.5239 / 0.3855\n",
"[100/100][65/391] Loss_D: 3.1522 Loss_G: 3.2602 D(x): 0.5939 D(G(z)): 0.4004 / 0.3658\n",
"[100/100][66/391] Loss_D: 2.4745 Loss_G: 2.3621 D(x): 0.7473 D(G(z)): 0.3874 / 0.4720\n",
"[100/100][67/391] Loss_D: 3.2063 Loss_G: 3.4284 D(x): 0.6203 D(G(z)): 0.4131 / 0.3445\n",
"[100/100][68/391] Loss_D: 2.3680 Loss_G: 2.9599 D(x): 0.7021 D(G(z)): 0.3572 / 0.4016\n",
"[100/100][69/391] Loss_D: 2.7366 Loss_G: 1.9400 D(x): 0.6983 D(G(z)): 0.4135 / 0.5683\n",
"[100/100][70/391] Loss_D: 2.9718 Loss_G: 3.2602 D(x): 0.6581 D(G(z)): 0.4485 / 0.3700\n",
"[100/100][71/391] Loss_D: 3.2414 Loss_G: 2.9582 D(x): 0.7336 D(G(z)): 0.5391 / 0.4237\n",
"[100/100][72/391] Loss_D: 2.3036 Loss_G: 2.3423 D(x): 0.7128 D(G(z)): 0.3335 / 0.4837\n",
"[100/100][73/391] Loss_D: 2.4623 Loss_G: 2.6827 D(x): 0.7588 D(G(z)): 0.4200 / 0.4439\n",
"[100/100][74/391] Loss_D: 2.8326 Loss_G: 2.6285 D(x): 0.6037 D(G(z)): 0.3446 / 0.4404\n",
"[100/100][75/391] Loss_D: 2.5185 Loss_G: 3.1144 D(x): 0.7466 D(G(z)): 0.4168 / 0.3770\n",
"[100/100][76/391] Loss_D: 2.7894 Loss_G: 2.9726 D(x): 0.6991 D(G(z)): 0.4548 / 0.4102\n",
"[100/100][77/391] Loss_D: 2.8646 Loss_G: 3.9332 D(x): 0.6156 D(G(z)): 0.3352 / 0.2982\n",
"[100/100][78/391] Loss_D: 2.6712 Loss_G: 3.7679 D(x): 0.6490 D(G(z)): 0.3874 / 0.3271\n",
"[100/100][79/391] Loss_D: 2.6546 Loss_G: 2.6028 D(x): 0.7038 D(G(z)): 0.4277 / 0.4635\n",
"[100/100][80/391] Loss_D: 3.0457 Loss_G: 1.9223 D(x): 0.6381 D(G(z)): 0.4553 / 0.5634\n",
"[100/100][81/391] Loss_D: 2.8718 Loss_G: 4.2125 D(x): 0.6516 D(G(z)): 0.4590 / 0.2800\n",
"[100/100][82/391] Loss_D: 2.5195 Loss_G: 2.8348 D(x): 0.6786 D(G(z)): 0.3063 / 0.4248\n",
"[100/100][83/391] Loss_D: 3.3436 Loss_G: 2.7259 D(x): 0.5883 D(G(z)): 0.4311 / 0.4369\n",
"[100/100][84/391] Loss_D: 2.5270 Loss_G: 3.1133 D(x): 0.7102 D(G(z)): 0.4393 / 0.3929\n",
"[100/100][85/391] Loss_D: 3.0016 Loss_G: 2.0599 D(x): 0.6810 D(G(z)): 0.4874 / 0.5295\n",
"[100/100][86/391] Loss_D: 2.5089 Loss_G: 3.9862 D(x): 0.7938 D(G(z)): 0.4145 / 0.2909\n",
"[100/100][87/391] Loss_D: 3.0070 Loss_G: 3.3603 D(x): 0.6771 D(G(z)): 0.3850 / 0.3522\n",
"[100/100][88/391] Loss_D: 2.0705 Loss_G: 2.9485 D(x): 0.7794 D(G(z)): 0.3338 / 0.4176\n",
"[100/100][89/391] Loss_D: 2.8064 Loss_G: 2.6665 D(x): 0.6546 D(G(z)): 0.4239 / 0.4432\n",
"[100/100][90/391] Loss_D: 2.9193 Loss_G: 2.8605 D(x): 0.6572 D(G(z)): 0.4358 / 0.4219\n",
"[100/100][91/391] Loss_D: 3.7121 Loss_G: 2.3862 D(x): 0.6459 D(G(z)): 0.5408 / 0.4992\n",
"[100/100][92/391] Loss_D: 2.8098 Loss_G: 3.3821 D(x): 0.6195 D(G(z)): 0.4165 / 0.3570\n",
"[100/100][93/391] Loss_D: 2.7046 Loss_G: 3.6837 D(x): 0.7048 D(G(z)): 0.3967 / 0.3236\n",
"[100/100][94/391] Loss_D: 3.2994 Loss_G: 2.7466 D(x): 0.6139 D(G(z)): 0.5028 / 0.4398\n",
"[100/100][95/391] Loss_D: 2.6720 Loss_G: 2.1688 D(x): 0.6368 D(G(z)): 0.3163 / 0.5206\n",
"[100/100][96/391] Loss_D: 2.4437 Loss_G: 2.9001 D(x): 0.7672 D(G(z)): 0.3992 / 0.4237\n",
"[100/100][97/391] Loss_D: 2.6619 Loss_G: 3.6074 D(x): 0.7400 D(G(z)): 0.3695 / 0.3339\n",
"[100/100][98/391] Loss_D: 2.9354 Loss_G: 4.0113 D(x): 0.6725 D(G(z)): 0.4946 / 0.3108\n",
"[100/100][99/391] Loss_D: 2.5468 Loss_G: 3.4472 D(x): 0.7545 D(G(z)): 0.4237 / 0.3590\n",
"[100/100][100/391] Loss_D: 3.0742 Loss_G: 2.9517 D(x): 0.5878 D(G(z)): 0.4136 / 0.4262\n",
"[100/100][101/391] Loss_D: 2.2532 Loss_G: 3.6718 D(x): 0.7484 D(G(z)): 0.3594 / 0.3385\n",
"[100/100][102/391] Loss_D: 2.7139 Loss_G: 2.4938 D(x): 0.6177 D(G(z)): 0.3025 / 0.4627\n",
"[100/100][103/391] Loss_D: 2.7006 Loss_G: 2.5440 D(x): 0.7607 D(G(z)): 0.4873 / 0.4775\n",
"[100/100][104/391] Loss_D: 3.1045 Loss_G: 3.6405 D(x): 0.6962 D(G(z)): 0.5249 / 0.3307\n",
"[100/100][105/391] Loss_D: 2.5897 Loss_G: 3.2787 D(x): 0.7391 D(G(z)): 0.3913 / 0.3671\n",
"[100/100][106/391] Loss_D: 2.5782 Loss_G: 4.0100 D(x): 0.6922 D(G(z)): 0.3739 / 0.3016\n",
"[100/100][107/391] Loss_D: 3.1456 Loss_G: 2.8780 D(x): 0.5927 D(G(z)): 0.4131 / 0.4220\n",
"[100/100][108/391] Loss_D: 3.0135 Loss_G: 4.3564 D(x): 0.6087 D(G(z)): 0.3346 / 0.2790\n",
"[100/100][109/391] Loss_D: 2.7333 Loss_G: 3.1678 D(x): 0.6377 D(G(z)): 0.3827 / 0.3990\n",
"[100/100][110/391] Loss_D: 2.8032 Loss_G: 2.0443 D(x): 0.6292 D(G(z)): 0.3603 / 0.5630\n",
"[100/100][111/391] Loss_D: 2.5058 Loss_G: 2.4536 D(x): 0.7800 D(G(z)): 0.4221 / 0.4775\n",
"[100/100][112/391] Loss_D: 2.8336 Loss_G: 2.5678 D(x): 0.6851 D(G(z)): 0.4499 / 0.4595\n",
"[100/100][113/391] Loss_D: 2.8700 Loss_G: 2.5196 D(x): 0.8041 D(G(z)): 0.5238 / 0.4678\n",
"[100/100][114/391] Loss_D: 2.1018 Loss_G: 2.5092 D(x): 0.7590 D(G(z)): 0.3364 / 0.4738\n",
"[100/100][115/391] Loss_D: 3.1259 Loss_G: 3.6509 D(x): 0.5773 D(G(z)): 0.3536 / 0.3338\n",
"[100/100][116/391] Loss_D: 2.9579 Loss_G: 3.5926 D(x): 0.5531 D(G(z)): 0.3003 / 0.3251\n",
"[100/100][117/391] Loss_D: 2.5716 Loss_G: 2.5277 D(x): 0.8370 D(G(z)): 0.4136 / 0.4623\n",
"[100/100][118/391] Loss_D: 2.7878 Loss_G: 3.4025 D(x): 0.6916 D(G(z)): 0.4972 / 0.3536\n",
"[100/100][119/391] Loss_D: 3.1760 Loss_G: 2.9957 D(x): 0.7001 D(G(z)): 0.5468 / 0.4069\n",
"[100/100][120/391] Loss_D: 2.5901 Loss_G: 3.1843 D(x): 0.7176 D(G(z)): 0.4274 / 0.3935\n",
"[100/100][121/391] Loss_D: 3.5296 Loss_G: 3.2682 D(x): 0.6447 D(G(z)): 0.4382 / 0.3892\n",
"[100/100][122/391] Loss_D: 2.9652 Loss_G: 4.6760 D(x): 0.5762 D(G(z)): 0.3068 / 0.2438\n",
"[100/100][123/391] Loss_D: 2.8077 Loss_G: 2.5312 D(x): 0.6334 D(G(z)): 0.3362 / 0.4474\n",
"[100/100][124/391] Loss_D: 2.6690 Loss_G: 2.7336 D(x): 0.7088 D(G(z)): 0.4591 / 0.4446\n",
"[100/100][125/391] Loss_D: 2.6864 Loss_G: 3.3830 D(x): 0.6837 D(G(z)): 0.3437 / 0.3637\n",
"[100/100][126/391] Loss_D: 2.5245 Loss_G: 2.8995 D(x): 0.6736 D(G(z)): 0.3471 / 0.4083\n",
"[100/100][127/391] Loss_D: 3.1822 Loss_G: 2.3208 D(x): 0.7128 D(G(z)): 0.5055 / 0.4994\n",
"[100/100][128/391] Loss_D: 2.1409 Loss_G: 3.2205 D(x): 0.8068 D(G(z)): 0.4365 / 0.3713\n",
"[100/100][129/391] Loss_D: 2.7926 Loss_G: 2.9943 D(x): 0.6848 D(G(z)): 0.4383 / 0.4188\n",
"[100/100][130/391] Loss_D: 2.6063 Loss_G: 2.5009 D(x): 0.7267 D(G(z)): 0.3989 / 0.4694\n",
"[100/100][131/391] Loss_D: 2.5276 Loss_G: 2.6630 D(x): 0.7305 D(G(z)): 0.4304 / 0.4434\n",
"[100/100][132/391] Loss_D: 2.9451 Loss_G: 3.9342 D(x): 0.6074 D(G(z)): 0.3703 / 0.2952\n",
"[100/100][133/391] Loss_D: 2.4545 Loss_G: 3.7852 D(x): 0.6596 D(G(z)): 0.3259 / 0.3226\n",
"[100/100][134/391] Loss_D: 2.7752 Loss_G: 2.3340 D(x): 0.6082 D(G(z)): 0.3595 / 0.4826\n",
"[100/100][135/391] Loss_D: 2.4126 Loss_G: 3.9794 D(x): 0.7614 D(G(z)): 0.3916 / 0.2962\n",
"[100/100][136/391] Loss_D: 2.6190 Loss_G: 4.5259 D(x): 0.7516 D(G(z)): 0.4154 / 0.2513\n",
"[100/100][137/391] Loss_D: 2.6229 Loss_G: 3.3505 D(x): 0.7284 D(G(z)): 0.4353 / 0.3656\n",
"[100/100][138/391] Loss_D: 3.4271 Loss_G: 2.8140 D(x): 0.5510 D(G(z)): 0.4464 / 0.4347\n",
"[100/100][139/391] Loss_D: 2.2867 Loss_G: 2.7510 D(x): 0.7356 D(G(z)): 0.2525 / 0.4332\n",
"[100/100][140/391] Loss_D: 2.8088 Loss_G: 2.0318 D(x): 0.6789 D(G(z)): 0.4497 / 0.5360\n",
"[100/100][141/391] Loss_D: 2.5508 Loss_G: 2.7531 D(x): 0.6860 D(G(z)): 0.3526 / 0.4337\n",
"[100/100][142/391] Loss_D: 2.5392 Loss_G: 3.4911 D(x): 0.6852 D(G(z)): 0.3756 / 0.3460\n",
"[100/100][143/391] Loss_D: 2.8359 Loss_G: 2.9404 D(x): 0.6464 D(G(z)): 0.3901 / 0.4098\n",
"[100/100][144/391] Loss_D: 2.9180 Loss_G: 2.9552 D(x): 0.7465 D(G(z)): 0.5230 / 0.4071\n",
"[100/100][145/391] Loss_D: 3.2434 Loss_G: 3.3861 D(x): 0.6419 D(G(z)): 0.4769 / 0.3633\n",
"[100/100][146/391] Loss_D: 3.2380 Loss_G: 3.7290 D(x): 0.6485 D(G(z)): 0.5112 / 0.3074\n",
"[100/100][147/391] Loss_D: 3.0237 Loss_G: 2.9623 D(x): 0.7030 D(G(z)): 0.4865 / 0.4141\n",
"[100/100][148/391] Loss_D: 2.6465 Loss_G: 2.0779 D(x): 0.6163 D(G(z)): 0.3210 / 0.5505\n",
"[100/100][149/391] Loss_D: 3.0277 Loss_G: 3.1338 D(x): 0.7213 D(G(z)): 0.5188 / 0.3970\n",
"[100/100][150/391] Loss_D: 3.1133 Loss_G: 3.8875 D(x): 0.6039 D(G(z)): 0.4003 / 0.3160\n",
"[100/100][151/391] Loss_D: 3.6076 Loss_G: 3.5522 D(x): 0.6893 D(G(z)): 0.3367 / 0.3342\n",
"[100/100][152/391] Loss_D: 2.6461 Loss_G: 3.0206 D(x): 0.6563 D(G(z)): 0.4142 / 0.3972\n",
"[100/100][153/391] Loss_D: 2.7282 Loss_G: 3.0359 D(x): 0.6415 D(G(z)): 0.3305 / 0.3899\n",
"[100/100][154/391] Loss_D: 2.7529 Loss_G: 2.7045 D(x): 0.7646 D(G(z)): 0.5434 / 0.4435\n",
"[100/100][155/391] Loss_D: 3.9932 Loss_G: 2.9581 D(x): 0.5226 D(G(z)): 0.5174 / 0.3932\n",
"[100/100][156/391] Loss_D: 3.0217 Loss_G: 3.0263 D(x): 0.5715 D(G(z)): 0.3512 / 0.4007\n",
"[100/100][157/391] Loss_D: 2.6135 Loss_G: 3.0805 D(x): 0.7543 D(G(z)): 0.3876 / 0.3991\n",
"[100/100][158/391] Loss_D: 2.3908 Loss_G: 1.6611 D(x): 0.7085 D(G(z)): 0.3418 / 0.6122\n",
"[100/100][159/391] Loss_D: 2.8014 Loss_G: 3.2475 D(x): 0.7227 D(G(z)): 0.4498 / 0.3770\n",
"[100/100][160/391] Loss_D: 2.9532 Loss_G: 2.0726 D(x): 0.6899 D(G(z)): 0.4675 / 0.5469\n",
"[100/100][161/391] Loss_D: 3.1601 Loss_G: 3.2921 D(x): 0.6229 D(G(z)): 0.4383 / 0.3826\n",
"[100/100][162/391] Loss_D: 2.4434 Loss_G: 3.3033 D(x): 0.7330 D(G(z)): 0.3835 / 0.3673\n",
"[100/100][163/391] Loss_D: 2.5135 Loss_G: 3.6282 D(x): 0.7562 D(G(z)): 0.4419 / 0.3404\n",
"[100/100][164/391] Loss_D: 2.2834 Loss_G: 3.6437 D(x): 0.7520 D(G(z)): 0.4029 / 0.3363\n",
"[100/100][165/391] Loss_D: 2.8195 Loss_G: 3.8875 D(x): 0.6736 D(G(z)): 0.4502 / 0.3259\n",
"[100/100][166/391] Loss_D: 2.5471 Loss_G: 3.8816 D(x): 0.7082 D(G(z)): 0.3711 / 0.3054\n",
"[100/100][167/391] Loss_D: 2.4153 Loss_G: 3.8079 D(x): 0.7269 D(G(z)): 0.3091 / 0.3030\n",
"[100/100][168/391] Loss_D: 2.6692 Loss_G: 2.9050 D(x): 0.6852 D(G(z)): 0.3708 / 0.4191\n",
"[100/100][169/391] Loss_D: 2.8022 Loss_G: 2.3682 D(x): 0.6406 D(G(z)): 0.4153 / 0.4952\n",
"[100/100][170/391] Loss_D: 2.6308 Loss_G: 3.5027 D(x): 0.6245 D(G(z)): 0.3238 / 0.3628\n",
"[100/100][171/391] Loss_D: 2.7482 Loss_G: 3.0913 D(x): 0.7157 D(G(z)): 0.4160 / 0.4094\n",
"[100/100][172/391] Loss_D: 2.8715 Loss_G: 4.3877 D(x): 0.6834 D(G(z)): 0.4641 / 0.2759\n",
"[100/100][173/391] Loss_D: 2.9721 Loss_G: 2.2274 D(x): 0.6440 D(G(z)): 0.3907 / 0.5064\n",
"[100/100][174/391] Loss_D: 2.8968 Loss_G: 3.0480 D(x): 0.7431 D(G(z)): 0.5256 / 0.4019\n",
"[100/100][175/391] Loss_D: 3.4296 Loss_G: 2.9388 D(x): 0.5664 D(G(z)): 0.4550 / 0.4041\n",
"[100/100][176/391] Loss_D: 3.4808 Loss_G: 3.2740 D(x): 0.6056 D(G(z)): 0.4980 / 0.3636\n",
"[100/100][177/391] Loss_D: 2.5961 Loss_G: 3.3584 D(x): 0.6847 D(G(z)): 0.3772 / 0.3452\n",
"[100/100][178/391] Loss_D: 2.1002 Loss_G: 2.3855 D(x): 0.7413 D(G(z)): 0.3312 / 0.4755\n",
"[100/100][179/391] Loss_D: 2.2871 Loss_G: 3.3529 D(x): 0.7327 D(G(z)): 0.2859 / 0.3601\n",
"[100/100][180/391] Loss_D: 2.4795 Loss_G: 3.1728 D(x): 0.6871 D(G(z)): 0.3369 / 0.3929\n",
"[100/100][181/391] Loss_D: 3.5278 Loss_G: 2.5834 D(x): 0.6574 D(G(z)): 0.4282 / 0.4419\n",
"[100/100][182/391] Loss_D: 2.2022 Loss_G: 2.9926 D(x): 0.6925 D(G(z)): 0.2653 / 0.3902\n",
"[100/100][183/391] Loss_D: 2.8323 Loss_G: 2.2244 D(x): 0.7110 D(G(z)): 0.5001 / 0.5123\n",
"[100/100][184/391] Loss_D: 2.3662 Loss_G: 2.9176 D(x): 0.7362 D(G(z)): 0.3613 / 0.4093\n",
"[100/100][185/391] Loss_D: 3.1057 Loss_G: 3.1349 D(x): 0.6515 D(G(z)): 0.4689 / 0.3928\n",
"[100/100][186/391] Loss_D: 2.7739 Loss_G: 2.9639 D(x): 0.7232 D(G(z)): 0.4343 / 0.4008\n",
"[100/100][187/391] Loss_D: 3.0494 Loss_G: 3.0553 D(x): 0.7271 D(G(z)): 0.5370 / 0.3899\n",
"[100/100][188/391] Loss_D: 2.5450 Loss_G: 3.5480 D(x): 0.6907 D(G(z)): 0.4185 / 0.3597\n",
"[100/100][189/391] Loss_D: 2.4174 Loss_G: 2.7111 D(x): 0.6881 D(G(z)): 0.3309 / 0.4452\n",
"[100/100][190/391] Loss_D: 2.6497 Loss_G: 2.8674 D(x): 0.6926 D(G(z)): 0.4192 / 0.4273\n",
"[100/100][191/391] Loss_D: 2.8293 Loss_G: 3.8116 D(x): 0.7106 D(G(z)): 0.4179 / 0.3104\n",
"[100/100][192/391] Loss_D: 2.3349 Loss_G: 2.8185 D(x): 0.6862 D(G(z)): 0.3554 / 0.4368\n",
"[100/100][193/391] Loss_D: 2.6797 Loss_G: 2.9433 D(x): 0.6699 D(G(z)): 0.4127 / 0.4206\n",
"[100/100][194/391] Loss_D: 3.2641 Loss_G: 1.9238 D(x): 0.6580 D(G(z)): 0.5166 / 0.5564\n",
"[100/100][195/391] Loss_D: 2.6437 Loss_G: 3.4890 D(x): 0.7025 D(G(z)): 0.3463 / 0.3408\n",
"[100/100][196/391] Loss_D: 2.7108 Loss_G: 2.8416 D(x): 0.7304 D(G(z)): 0.4840 / 0.4218\n",
"[100/100][197/391] Loss_D: 2.5007 Loss_G: 3.4431 D(x): 0.7336 D(G(z)): 0.2712 / 0.3519\n",
"[100/100][198/391] Loss_D: 2.6240 Loss_G: 2.8879 D(x): 0.6380 D(G(z)): 0.2394 / 0.4165\n",
"[100/100][199/391] Loss_D: 2.6398 Loss_G: 2.5962 D(x): 0.6985 D(G(z)): 0.4084 / 0.4540\n",
"[100/100][200/391] Loss_D: 2.6438 Loss_G: 2.0326 D(x): 0.7524 D(G(z)): 0.4597 / 0.5297\n",
"[100/100][201/391] Loss_D: 3.5841 Loss_G: 3.1301 D(x): 0.5689 D(G(z)): 0.4734 / 0.4021\n",
"[100/100][202/391] Loss_D: 3.0301 Loss_G: 2.8184 D(x): 0.6372 D(G(z)): 0.4877 / 0.4207\n",
"[100/100][203/391] Loss_D: 2.7971 Loss_G: 2.8360 D(x): 0.6553 D(G(z)): 0.3702 / 0.4305\n",
"[100/100][204/391] Loss_D: 2.3679 Loss_G: 3.3267 D(x): 0.7187 D(G(z)): 0.4304 / 0.3707\n",
"[100/100][205/391] Loss_D: 2.7801 Loss_G: 2.3337 D(x): 0.6437 D(G(z)): 0.4100 / 0.4831\n",
"[100/100][206/391] Loss_D: 2.9567 Loss_G: 3.7991 D(x): 0.5844 D(G(z)): 0.4009 / 0.3202\n",
"[100/100][207/391] Loss_D: 2.8047 Loss_G: 2.5842 D(x): 0.7047 D(G(z)): 0.4544 / 0.4685\n",
"[100/100][208/391] Loss_D: 2.8156 Loss_G: 2.6660 D(x): 0.6845 D(G(z)): 0.4714 / 0.4361\n",
"[100/100][209/391] Loss_D: 2.6693 Loss_G: 2.3263 D(x): 0.6651 D(G(z)): 0.3742 / 0.4996\n",
"[100/100][210/391] Loss_D: 2.6504 Loss_G: 2.2026 D(x): 0.6759 D(G(z)): 0.3744 / 0.5153\n",
"[100/100][211/391] Loss_D: 3.6414 Loss_G: 2.8657 D(x): 0.7173 D(G(z)): 0.4107 / 0.4221\n",
"[100/100][212/391] Loss_D: 3.2538 Loss_G: 2.9030 D(x): 0.6721 D(G(z)): 0.5196 / 0.4148\n",
"[100/100][213/391] Loss_D: 2.6112 Loss_G: 2.7832 D(x): 0.6736 D(G(z)): 0.3453 / 0.4351\n",
"[100/100][214/391] Loss_D: 2.4522 Loss_G: 3.2645 D(x): 0.7417 D(G(z)): 0.4265 / 0.3728\n",
"[100/100][215/391] Loss_D: 2.9611 Loss_G: 2.2753 D(x): 0.6904 D(G(z)): 0.4795 / 0.4967\n",
"[100/100][216/391] Loss_D: 2.6038 Loss_G: 3.2997 D(x): 0.6936 D(G(z)): 0.3704 / 0.3689\n",
"[100/100][217/391] Loss_D: 2.4960 Loss_G: 2.5945 D(x): 0.6663 D(G(z)): 0.3104 / 0.4491\n",
"[100/100][218/391] Loss_D: 3.0220 Loss_G: 3.0278 D(x): 0.5624 D(G(z)): 0.3411 / 0.4080\n",
"[100/100][219/391] Loss_D: 2.3455 Loss_G: 2.9194 D(x): 0.7480 D(G(z)): 0.3936 / 0.4120\n",
"[100/100][220/391] Loss_D: 3.1216 Loss_G: 2.9552 D(x): 0.7600 D(G(z)): 0.5464 / 0.4116\n",
"[100/100][221/391] Loss_D: 2.7208 Loss_G: 3.8267 D(x): 0.7089 D(G(z)): 0.4281 / 0.3299\n",
"[100/100][222/391] Loss_D: 2.2029 Loss_G: 2.3072 D(x): 0.7172 D(G(z)): 0.3172 / 0.4944\n",
"[100/100][223/391] Loss_D: 2.4211 Loss_G: 3.7693 D(x): 0.7371 D(G(z)): 0.3744 / 0.3237\n",
"[100/100][224/391] Loss_D: 2.7071 Loss_G: 2.4981 D(x): 0.6375 D(G(z)): 0.3769 / 0.4729\n",
"[100/100][225/391] Loss_D: 2.4712 Loss_G: 3.0262 D(x): 0.6977 D(G(z)): 0.3468 / 0.3882\n",
"[100/100][226/391] Loss_D: 2.5633 Loss_G: 3.9100 D(x): 0.7178 D(G(z)): 0.4306 / 0.3023\n",
"[100/100][227/391] Loss_D: 2.3651 Loss_G: 3.5904 D(x): 0.7090 D(G(z)): 0.2564 / 0.3496\n",
"[100/100][228/391] Loss_D: 2.5538 Loss_G: 2.1809 D(x): 0.6973 D(G(z)): 0.3810 / 0.5174\n",
"[100/100][229/391] Loss_D: 2.2520 Loss_G: 2.8189 D(x): 0.8134 D(G(z)): 0.4218 / 0.4306\n",
"[100/100][230/391] Loss_D: 2.0441 Loss_G: 3.1226 D(x): 0.7558 D(G(z)): 0.3209 / 0.3907\n",
"[100/100][231/391] Loss_D: 3.0389 Loss_G: 3.1575 D(x): 0.6094 D(G(z)): 0.4135 / 0.3969\n",
"[100/100][232/391] Loss_D: 3.2463 Loss_G: 3.2971 D(x): 0.6699 D(G(z)): 0.5394 / 0.3668\n",
"[100/100][233/391] Loss_D: 2.3844 Loss_G: 2.4776 D(x): 0.7525 D(G(z)): 0.3772 / 0.4633\n",
"[100/100][234/391] Loss_D: 2.0508 Loss_G: 3.4441 D(x): 0.6984 D(G(z)): 0.2772 / 0.3490\n",
"[100/100][235/391] Loss_D: 2.6142 Loss_G: 2.6363 D(x): 0.7272 D(G(z)): 0.4415 / 0.4514\n",
"[100/100][236/391] Loss_D: 2.7456 Loss_G: 3.8342 D(x): 0.7183 D(G(z)): 0.4568 / 0.3262\n",
"[100/100][237/391] Loss_D: 3.1665 Loss_G: 2.4883 D(x): 0.5941 D(G(z)): 0.4160 / 0.4534\n",
"[100/100][238/391] Loss_D: 2.2797 Loss_G: 2.9488 D(x): 0.7114 D(G(z)): 0.2904 / 0.4078\n",
"[100/100][239/391] Loss_D: 2.6983 Loss_G: 3.6641 D(x): 0.6897 D(G(z)): 0.4169 / 0.3395\n",
"[100/100][240/391] Loss_D: 3.0974 Loss_G: 2.3960 D(x): 0.6851 D(G(z)): 0.4988 / 0.4857\n",
"[100/100][241/391] Loss_D: 3.7110 Loss_G: 3.3261 D(x): 0.6819 D(G(z)): 0.3808 / 0.3797\n",
"[100/100][242/391] Loss_D: 2.5966 Loss_G: 4.0927 D(x): 0.8429 D(G(z)): 0.4803 / 0.2848\n",
"[100/100][243/391] Loss_D: 3.4529 Loss_G: 2.8482 D(x): 0.5685 D(G(z)): 0.4669 / 0.4232\n",
"[100/100][244/391] Loss_D: 2.3139 Loss_G: 2.3151 D(x): 0.7179 D(G(z)): 0.3843 / 0.5114\n",
"[100/100][245/391] Loss_D: 2.5296 Loss_G: 3.0631 D(x): 0.6404 D(G(z)): 0.3012 / 0.4072\n",
"[100/100][246/391] Loss_D: 2.3920 Loss_G: 2.7680 D(x): 0.7360 D(G(z)): 0.3539 / 0.4249\n",
"[100/100][247/391] Loss_D: 2.9765 Loss_G: 2.9276 D(x): 0.6358 D(G(z)): 0.4652 / 0.3969\n",
"[100/100][248/391] Loss_D: 2.7964 Loss_G: 3.7157 D(x): 0.6357 D(G(z)): 0.3930 / 0.3242\n",
"[100/100][249/391] Loss_D: 2.7417 Loss_G: 2.6430 D(x): 0.6948 D(G(z)): 0.4682 / 0.4560\n",
"[100/100][250/391] Loss_D: 2.4483 Loss_G: 3.0225 D(x): 0.7638 D(G(z)): 0.3908 / 0.4104\n",
"[100/100][251/391] Loss_D: 3.2844 Loss_G: 2.6130 D(x): 0.6263 D(G(z)): 0.4781 / 0.4387\n",
"[100/100][252/391] Loss_D: 2.8746 Loss_G: 3.5302 D(x): 0.7447 D(G(z)): 0.4826 / 0.3549\n",
"[100/100][253/391] Loss_D: 2.6170 Loss_G: 2.8727 D(x): 0.6922 D(G(z)): 0.3427 / 0.4451\n",
"[100/100][254/391] Loss_D: 2.8210 Loss_G: 4.0676 D(x): 0.6010 D(G(z)): 0.3536 / 0.2951\n",
"[100/100][255/391] Loss_D: 3.0767 Loss_G: 3.0904 D(x): 0.5677 D(G(z)): 0.2915 / 0.3847\n",
"[100/100][256/391] Loss_D: 2.7919 Loss_G: 2.6075 D(x): 0.7379 D(G(z)): 0.4665 / 0.4452\n",
"[100/100][257/391] Loss_D: 2.7727 Loss_G: 2.6627 D(x): 0.7403 D(G(z)): 0.3941 / 0.4646\n",
"[100/100][258/391] Loss_D: 2.7238 Loss_G: 2.6045 D(x): 0.7111 D(G(z)): 0.5008 / 0.4535\n",
"[100/100][259/391] Loss_D: 2.8224 Loss_G: 3.2815 D(x): 0.7228 D(G(z)): 0.4763 / 0.3675\n",
"[100/100][260/391] Loss_D: 2.8323 Loss_G: 2.7163 D(x): 0.6993 D(G(z)): 0.4697 / 0.4411\n",
"[100/100][261/391] Loss_D: 2.7144 Loss_G: 2.8006 D(x): 0.6428 D(G(z)): 0.3068 / 0.4191\n",
"[100/100][262/391] Loss_D: 2.7071 Loss_G: 3.1058 D(x): 0.7341 D(G(z)): 0.4222 / 0.3944\n",
"[100/100][263/391] Loss_D: 2.7524 Loss_G: 3.6811 D(x): 0.6663 D(G(z)): 0.3310 / 0.3291\n",
"[100/100][264/391] Loss_D: 2.1649 Loss_G: 3.4979 D(x): 0.7160 D(G(z)): 0.3552 / 0.3453\n",
"[100/100][265/391] Loss_D: 3.0272 Loss_G: 2.9860 D(x): 0.7184 D(G(z)): 0.5173 / 0.4194\n",
"[100/100][266/391] Loss_D: 3.1388 Loss_G: 4.4349 D(x): 0.6026 D(G(z)): 0.4155 / 0.2598\n",
"[100/100][267/391] Loss_D: 2.7302 Loss_G: 3.1930 D(x): 0.6555 D(G(z)): 0.4177 / 0.3885\n",
"[100/100][268/391] Loss_D: 2.7822 Loss_G: 3.5401 D(x): 0.5954 D(G(z)): 0.3443 / 0.3384\n",
"[100/100][269/391] Loss_D: 3.1918 Loss_G: 2.2426 D(x): 0.6486 D(G(z)): 0.4471 / 0.4942\n",
"[100/100][270/391] Loss_D: 3.3269 Loss_G: 2.2077 D(x): 0.6877 D(G(z)): 0.5544 / 0.5024\n",
"[100/100][271/391] Loss_D: 3.7314 Loss_G: 2.4246 D(x): 0.5406 D(G(z)): 0.4354 / 0.4918\n",
"[100/100][272/391] Loss_D: 2.0709 Loss_G: 2.2608 D(x): 0.7893 D(G(z)): 0.2902 / 0.5049\n",
"[100/100][273/391] Loss_D: 2.3940 Loss_G: 3.9284 D(x): 0.7844 D(G(z)): 0.3403 / 0.2934\n",
"[100/100][274/391] Loss_D: 3.1095 Loss_G: 2.4617 D(x): 0.6766 D(G(z)): 0.5104 / 0.4716\n",
"[100/100][275/391] Loss_D: 2.8031 Loss_G: 3.4157 D(x): 0.6854 D(G(z)): 0.4428 / 0.3609\n",
"[100/100][276/391] Loss_D: 2.8509 Loss_G: 3.1627 D(x): 0.6734 D(G(z)): 0.4225 / 0.3877\n",
"[100/100][277/391] Loss_D: 2.5828 Loss_G: 3.5799 D(x): 0.7440 D(G(z)): 0.3381 / 0.3560\n",
"[100/100][278/391] Loss_D: 2.6129 Loss_G: 2.8465 D(x): 0.7214 D(G(z)): 0.4639 / 0.4205\n",
"[100/100][279/391] Loss_D: 2.7589 Loss_G: 3.1904 D(x): 0.6948 D(G(z)): 0.4519 / 0.3874\n",
"[100/100][280/391] Loss_D: 2.8684 Loss_G: 3.8957 D(x): 0.6376 D(G(z)): 0.4052 / 0.3029\n",
"[100/100][281/391] Loss_D: 2.9801 Loss_G: 2.6733 D(x): 0.6889 D(G(z)): 0.4716 / 0.4532\n",
"[100/100][282/391] Loss_D: 2.4573 Loss_G: 3.1858 D(x): 0.7303 D(G(z)): 0.3877 / 0.3854\n",
"[100/100][283/391] Loss_D: 2.7935 Loss_G: 2.8794 D(x): 0.6193 D(G(z)): 0.3234 / 0.4250\n",
"[100/100][284/391] Loss_D: 2.0504 Loss_G: 2.9460 D(x): 0.7745 D(G(z)): 0.3782 / 0.4171\n",
"[100/100][285/391] Loss_D: 2.7562 Loss_G: 3.2811 D(x): 0.7087 D(G(z)): 0.4613 / 0.3703\n",
"[100/100][286/391] Loss_D: 2.7710 Loss_G: 3.2262 D(x): 0.6848 D(G(z)): 0.4194 / 0.3703\n",
"[100/100][287/391] Loss_D: 2.5341 Loss_G: 3.5553 D(x): 0.7239 D(G(z)): 0.3109 / 0.3480\n",
"[100/100][288/391] Loss_D: 2.7634 Loss_G: 4.2888 D(x): 0.6234 D(G(z)): 0.3821 / 0.2870\n",
"[100/100][289/391] Loss_D: 2.7842 Loss_G: 2.4319 D(x): 0.6594 D(G(z)): 0.3387 / 0.4772\n",
"[100/100][290/391] Loss_D: 3.0593 Loss_G: 3.0489 D(x): 0.6741 D(G(z)): 0.4868 / 0.3968\n",
"[100/100][291/391] Loss_D: 2.9598 Loss_G: 2.3027 D(x): 0.6667 D(G(z)): 0.4378 / 0.5070\n",
"[100/100][292/391] Loss_D: 2.3349 Loss_G: 1.9605 D(x): 0.7816 D(G(z)): 0.3442 / 0.5607\n",
"[100/100][293/391] Loss_D: 2.5734 Loss_G: 3.0118 D(x): 0.6798 D(G(z)): 0.3612 / 0.4049\n",
"[100/100][294/391] Loss_D: 3.1820 Loss_G: 3.3203 D(x): 0.6011 D(G(z)): 0.4807 / 0.3718\n",
"[100/100][295/391] Loss_D: 2.5378 Loss_G: 3.0424 D(x): 0.7757 D(G(z)): 0.4141 / 0.3954\n",
"[100/100][296/391] Loss_D: 2.5337 Loss_G: 3.0866 D(x): 0.7292 D(G(z)): 0.3751 / 0.3783\n",
"[100/100][297/391] Loss_D: 3.2493 Loss_G: 2.5716 D(x): 0.6061 D(G(z)): 0.4212 / 0.4749\n",
"[100/100][298/391] Loss_D: 3.0229 Loss_G: 2.6776 D(x): 0.6829 D(G(z)): 0.4999 / 0.4383\n",
"[100/100][299/391] Loss_D: 2.7401 Loss_G: 4.0089 D(x): 0.6805 D(G(z)): 0.4102 / 0.3041\n",
"[100/100][300/391] Loss_D: 2.9779 Loss_G: 3.2804 D(x): 0.6346 D(G(z)): 0.4239 / 0.3879\n",
"[100/100][301/391] Loss_D: 3.6039 Loss_G: 2.5379 D(x): 0.6116 D(G(z)): 0.3355 / 0.4894\n",
"[100/100][302/391] Loss_D: 2.6698 Loss_G: 3.4052 D(x): 0.6715 D(G(z)): 0.4062 / 0.3543\n",
"[100/100][303/391] Loss_D: 2.7731 Loss_G: 3.2622 D(x): 0.7365 D(G(z)): 0.4998 / 0.3822\n",
"[100/100][304/391] Loss_D: 2.7373 Loss_G: 3.6826 D(x): 0.6263 D(G(z)): 0.4051 / 0.3286\n",
"[100/100][305/391] Loss_D: 2.5272 Loss_G: 3.4537 D(x): 0.7245 D(G(z)): 0.3846 / 0.3568\n",
"[100/100][306/391] Loss_D: 2.6651 Loss_G: 3.0489 D(x): 0.7222 D(G(z)): 0.4170 / 0.4162\n",
"[100/100][307/391] Loss_D: 2.9544 Loss_G: 3.6952 D(x): 0.5968 D(G(z)): 0.3990 / 0.3205\n",
"[100/100][308/391] Loss_D: 2.5957 Loss_G: 2.6996 D(x): 0.6319 D(G(z)): 0.3020 / 0.4374\n",
"[100/100][309/391] Loss_D: 2.8936 Loss_G: 3.7811 D(x): 0.7371 D(G(z)): 0.5426 / 0.3330\n",
"[100/100][310/391] Loss_D: 2.4863 Loss_G: 2.5007 D(x): 0.7683 D(G(z)): 0.3863 / 0.4700\n",
"[100/100][311/391] Loss_D: 3.0916 Loss_G: 1.8754 D(x): 0.5283 D(G(z)): 0.2503 / 0.5722\n",
"[100/100][312/391] Loss_D: 2.5639 Loss_G: 2.0232 D(x): 0.6901 D(G(z)): 0.3776 / 0.5418\n",
"[100/100][313/391] Loss_D: 2.6387 Loss_G: 3.0752 D(x): 0.7097 D(G(z)): 0.4392 / 0.3999\n",
"[100/100][314/391] Loss_D: 2.8086 Loss_G: 3.3875 D(x): 0.7159 D(G(z)): 0.5165 / 0.3732\n",
"[100/100][315/391] Loss_D: 2.6493 Loss_G: 2.4412 D(x): 0.7101 D(G(z)): 0.4245 / 0.4859\n",
"[100/100][316/391] Loss_D: 2.8535 Loss_G: 3.2968 D(x): 0.6890 D(G(z)): 0.4389 / 0.3660\n",
"[100/100][317/391] Loss_D: 2.8523 Loss_G: 3.4819 D(x): 0.7283 D(G(z)): 0.4196 / 0.3441\n",
"[100/100][318/391] Loss_D: 2.5549 Loss_G: 4.1848 D(x): 0.6789 D(G(z)): 0.4129 / 0.2801\n",
"[100/100][319/391] Loss_D: 2.5410 Loss_G: 2.9968 D(x): 0.6733 D(G(z)): 0.3626 / 0.4121\n",
"[100/100][320/391] Loss_D: 3.1255 Loss_G: 3.8959 D(x): 0.7237 D(G(z)): 0.5238 / 0.3118\n",
"[100/100][321/391] Loss_D: 3.6141 Loss_G: 3.0085 D(x): 0.6014 D(G(z)): 0.5285 / 0.3987\n",
"[100/100][322/391] Loss_D: 3.1975 Loss_G: 4.3477 D(x): 0.5873 D(G(z)): 0.4201 / 0.2702\n",
"[100/100][323/391] Loss_D: 2.7804 Loss_G: 2.7780 D(x): 0.6768 D(G(z)): 0.3776 / 0.4178\n",
"[100/100][324/391] Loss_D: 2.2593 Loss_G: 3.1501 D(x): 0.7555 D(G(z)): 0.4064 / 0.3858\n",
"[100/100][325/391] Loss_D: 2.9619 Loss_G: 2.3227 D(x): 0.6117 D(G(z)): 0.3428 / 0.4804\n",
"[100/100][326/391] Loss_D: 3.2786 Loss_G: 2.9723 D(x): 0.6046 D(G(z)): 0.4783 / 0.4064\n",
"[100/100][327/391] Loss_D: 2.9982 Loss_G: 3.1071 D(x): 0.5890 D(G(z)): 0.4127 / 0.3839\n",
"[100/100][328/391] Loss_D: 2.7954 Loss_G: 1.6865 D(x): 0.6416 D(G(z)): 0.4214 / 0.6038\n",
"[100/100][329/391] Loss_D: 3.2223 Loss_G: 2.9577 D(x): 0.6821 D(G(z)): 0.5284 / 0.4116\n",
"[100/100][330/391] Loss_D: 2.4040 Loss_G: 2.3344 D(x): 0.7127 D(G(z)): 0.3279 / 0.4918\n",
"[100/100][331/391] Loss_D: 3.5371 Loss_G: 2.7613 D(x): 0.7468 D(G(z)): 0.4073 / 0.4252\n",
"[100/100][332/391] Loss_D: 2.6051 Loss_G: 2.7175 D(x): 0.6880 D(G(z)): 0.4022 / 0.4420\n",
"[100/100][333/391] Loss_D: 2.7465 Loss_G: 2.3363 D(x): 0.7499 D(G(z)): 0.4028 / 0.4892\n",
"[100/100][334/391] Loss_D: 2.1713 Loss_G: 3.1985 D(x): 0.7236 D(G(z)): 0.3562 / 0.3869\n",
"[100/100][335/391] Loss_D: 2.7197 Loss_G: 2.9534 D(x): 0.6029 D(G(z)): 0.2980 / 0.3996\n",
"[100/100][336/391] Loss_D: 2.4743 Loss_G: 2.1807 D(x): 0.7140 D(G(z)): 0.3323 / 0.5249\n",
"[100/100][337/391] Loss_D: 2.7129 Loss_G: 3.2961 D(x): 0.6959 D(G(z)): 0.4182 / 0.3581\n",
"[100/100][338/391] Loss_D: 2.9303 Loss_G: 2.5696 D(x): 0.6963 D(G(z)): 0.4964 / 0.4563\n",
"[100/100][339/391] Loss_D: 2.6115 Loss_G: 2.9080 D(x): 0.7952 D(G(z)): 0.4970 / 0.4383\n",
"[100/100][340/391] Loss_D: 2.9233 Loss_G: 3.8636 D(x): 0.6867 D(G(z)): 0.4727 / 0.3170\n",
"[100/100][341/391] Loss_D: 2.2707 Loss_G: 4.9629 D(x): 0.7799 D(G(z)): 0.3632 / 0.2237\n",
"[100/100][342/391] Loss_D: 2.7922 Loss_G: 3.4430 D(x): 0.7169 D(G(z)): 0.4676 / 0.3557\n",
"[100/100][343/391] Loss_D: 2.6612 Loss_G: 2.7681 D(x): 0.6808 D(G(z)): 0.3627 / 0.4446\n",
"[100/100][344/391] Loss_D: 3.2541 Loss_G: 4.9652 D(x): 0.5810 D(G(z)): 0.4422 / 0.2172\n",
"[100/100][345/391] Loss_D: 2.3038 Loss_G: 3.4191 D(x): 0.7031 D(G(z)): 0.2994 / 0.3594\n",
"[100/100][346/391] Loss_D: 2.7914 Loss_G: 2.8519 D(x): 0.6309 D(G(z)): 0.3905 / 0.4160\n",
"[100/100][347/391] Loss_D: 2.7598 Loss_G: 2.9866 D(x): 0.7325 D(G(z)): 0.4332 / 0.4062\n",
"[100/100][348/391] Loss_D: 2.6144 Loss_G: 3.9635 D(x): 0.6446 D(G(z)): 0.3507 / 0.3087\n",
"[100/100][349/391] Loss_D: 3.0911 Loss_G: 2.5143 D(x): 0.5804 D(G(z)): 0.4064 / 0.4670\n",
"[100/100][350/391] Loss_D: 2.5746 Loss_G: 1.9987 D(x): 0.7147 D(G(z)): 0.4378 / 0.5445\n",
"[100/100][351/391] Loss_D: 2.7581 Loss_G: 2.6362 D(x): 0.7592 D(G(z)): 0.4424 / 0.4489\n",
"[100/100][352/391] Loss_D: 2.8034 Loss_G: 2.1663 D(x): 0.6339 D(G(z)): 0.4284 / 0.5131\n",
"[100/100][353/391] Loss_D: 2.4298 Loss_G: 2.0544 D(x): 0.7137 D(G(z)): 0.3280 / 0.5279\n",
"[100/100][354/391] Loss_D: 2.5165 Loss_G: 2.8412 D(x): 0.7229 D(G(z)): 0.4278 / 0.4192\n",
"[100/100][355/391] Loss_D: 2.6573 Loss_G: 2.3075 D(x): 0.6838 D(G(z)): 0.4521 / 0.4907\n",
"[100/100][356/391] Loss_D: 2.7211 Loss_G: 3.2231 D(x): 0.7236 D(G(z)): 0.4667 / 0.3756\n",
"[100/100][357/391] Loss_D: 3.1393 Loss_G: 3.5680 D(x): 0.5352 D(G(z)): 0.3464 / 0.3385\n",
"[100/100][358/391] Loss_D: 3.1859 Loss_G: 2.9191 D(x): 0.5410 D(G(z)): 0.3774 / 0.4114\n",
"[100/100][359/391] Loss_D: 3.1919 Loss_G: 2.1779 D(x): 0.7273 D(G(z)): 0.5720 / 0.5225\n",
"[100/100][360/391] Loss_D: 2.9309 Loss_G: 2.7878 D(x): 0.6514 D(G(z)): 0.4384 / 0.4466\n",
"[100/100][361/391] Loss_D: 3.4801 Loss_G: 3.3842 D(x): 0.7176 D(G(z)): 0.4277 / 0.3706\n",
"[100/100][362/391] Loss_D: 2.7401 Loss_G: 3.3838 D(x): 0.7229 D(G(z)): 0.4014 / 0.3491\n",
"[100/100][363/391] Loss_D: 3.0241 Loss_G: 2.8220 D(x): 0.7606 D(G(z)): 0.5384 / 0.4267\n",
"[100/100][364/391] Loss_D: 2.6431 Loss_G: 3.8266 D(x): 0.7244 D(G(z)): 0.4183 / 0.3160\n",
"[100/100][365/391] Loss_D: 2.7717 Loss_G: 3.5561 D(x): 0.6467 D(G(z)): 0.3671 / 0.3412\n",
"[100/100][366/391] Loss_D: 3.0296 Loss_G: 2.7674 D(x): 0.5488 D(G(z)): 0.3622 / 0.4420\n",
"[100/100][367/391] Loss_D: 2.6218 Loss_G: 3.0929 D(x): 0.7032 D(G(z)): 0.3616 / 0.3877\n",
"[100/100][368/391] Loss_D: 2.6316 Loss_G: 3.2072 D(x): 0.6877 D(G(z)): 0.4004 / 0.3786\n",
"[100/100][369/391] Loss_D: 2.2341 Loss_G: 3.2685 D(x): 0.7605 D(G(z)): 0.3761 / 0.3780\n",
"[100/100][370/391] Loss_D: 2.7501 Loss_G: 3.4588 D(x): 0.6725 D(G(z)): 0.4408 / 0.3590\n",
"[100/100][371/391] Loss_D: 2.7761 Loss_G: 4.6703 D(x): 0.6561 D(G(z)): 0.4160 / 0.2474\n",
"[100/100][372/391] Loss_D: 2.7308 Loss_G: 3.4280 D(x): 0.6478 D(G(z)): 0.3890 / 0.3664\n",
"[100/100][373/391] Loss_D: 3.2333 Loss_G: 2.4757 D(x): 0.7294 D(G(z)): 0.5485 / 0.4578\n",
"[100/100][374/391] Loss_D: 2.8886 Loss_G: 3.0710 D(x): 0.5752 D(G(z)): 0.3146 / 0.3984\n",
"[100/100][375/391] Loss_D: 2.9752 Loss_G: 3.1183 D(x): 0.6718 D(G(z)): 0.4414 / 0.3854\n",
"[100/100][376/391] Loss_D: 2.5504 Loss_G: 3.8102 D(x): 0.7259 D(G(z)): 0.3447 / 0.3066\n",
"[100/100][377/391] Loss_D: 2.8814 Loss_G: 3.5837 D(x): 0.7254 D(G(z)): 0.4601 / 0.3344\n",
"[100/100][378/391] Loss_D: 2.6286 Loss_G: 3.1255 D(x): 0.6487 D(G(z)): 0.3877 / 0.3967\n",
"[100/100][379/391] Loss_D: 2.2463 Loss_G: 3.8835 D(x): 0.7403 D(G(z)): 0.3595 / 0.3125\n",
"[100/100][380/391] Loss_D: 2.3725 Loss_G: 3.0023 D(x): 0.7687 D(G(z)): 0.3869 / 0.4217\n",
"[100/100][381/391] Loss_D: 2.8461 Loss_G: 2.3684 D(x): 0.6747 D(G(z)): 0.4146 / 0.4744\n",
"[100/100][382/391] Loss_D: 2.3957 Loss_G: 3.7617 D(x): 0.6830 D(G(z)): 0.3486 / 0.3230\n",
"[100/100][383/391] Loss_D: 2.7056 Loss_G: 3.1843 D(x): 0.6907 D(G(z)): 0.4041 / 0.3740\n",
"[100/100][384/391] Loss_D: 3.1410 Loss_G: 3.0321 D(x): 0.6527 D(G(z)): 0.5234 / 0.4014\n",
"[100/100][385/391] Loss_D: 3.0333 Loss_G: 3.0557 D(x): 0.6145 D(G(z)): 0.4497 / 0.3948\n",
"[100/100][386/391] Loss_D: 2.7664 Loss_G: 3.0171 D(x): 0.7321 D(G(z)): 0.4167 / 0.3878\n",
"[100/100][387/391] Loss_D: 2.9515 Loss_G: 4.4019 D(x): 0.6776 D(G(z)): 0.4481 / 0.2593\n",
"[100/100][388/391] Loss_D: 2.7690 Loss_G: 3.2095 D(x): 0.6668 D(G(z)): 0.4365 / 0.3763\n",
"[100/100][389/391] Loss_D: 3.0740 Loss_G: 3.5476 D(x): 0.5859 D(G(z)): 0.3742 / 0.3446\n",
"[100/100][390/391] Loss_D: 2.5813 Loss_G: 3.7379 D(x): 0.6772 D(G(z)): 0.3707 / 0.3278\n",
"[100/100][391/391] Loss_D: 3.7127 Loss_G: 3.3112 D(x): 0.6844 D(G(z)): 0.4466 / 0.3768\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "rOj2KZiim1Bf"
},
"source": [
"# save models \n",
"# if your discriminator/generator are conditional you'll want to change the inputs here\n",
"torch.jit.save(torch.jit.trace(model_G, (fixed_noise, fixed_labels)), '/content/drive/MyDrive/icl_dl_cw2/GAN2/GAN_G_model.pth')\n",
"torch.jit.save(torch.jit.trace(model_D, (fake)), '/content/drive/MyDrive/icl_dl_cw2/GAN2/GAN_D_model.pth')"
],
"execution_count": 73,
"outputs": []
},
{
"source": [
"## Part 2.1c: Results (10 Points)\n",
"This part is fairly open-ended, but not worth too much so do not go crazy. The table below shows examples of what are considered good samples. Level 3 and above will get you 10/10 points, level 2 will roughly get you 5/10 points and level 1 and below will get you 0/10 points.\n",
"\n",
"<table><tr>\n",
"<td> \n",
" <p align=\"center\" style=\"padding: 10px\">\n",
" <img alt=\"Forwarding\" src=\"https://drive.google.com/uc?id=1wQ2f10-A1Vs7k0LMfBPPyYTsPlkBF9QE\" width=\"%30\">\n",
" <br>\n",
" <em style=\"color: grey\">Level 1</em>\n",
" </p> \n",
"</td>\n",
"<td> \n",
" <p align=\"center\">\n",
" <img alt=\"Routing\" src=\"https://drive.google.com/uc?id=1wlDhX4hROET4s8Ndxn8nhj_0RLM2rnuG\" width=\"%30\">\n",
" <br>\n",
" <em style=\"color: grey\">Level 2</em>\n",
" </p> \n",
"</td>\n",
"<td> \n",
" <p align=\"center\">\n",
" <img alt=\"Routing\" src=\"https://drive.google.com/uc?id=1w9VrgfJLCRaTPhwoFVYdYhtCeaQmFHGb\" width=\"%30\">\n",
" <br>\n",
" <em style=\"color: grey\">Level 3</em>\n",
" </p> \n",
"</td>\n",
"</tr></table>"
],
"cell_type": "markdown",
"metadata": {
"id": "QOjxGURDINm7"
}
},
{
"cell_type": "markdown",
"metadata": {
"id": "5IQIKTdPZ-P5"
},
"source": [
"### Generator samples"
]
},
{
"cell_type": "code",
"metadata": {
"id": "VIHi0HrJZ-P8",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 955
},
"outputId": "97c5e032-d9ec-40ef-deea-9e796c599bb6"
},
"source": [
"input_noise = torch.randn(100, latent_vector_size, device=device)\n",
"# labels = [i for i in range(10)] *10\n",
"# labels = torch.Tensor(labels).long().to(device)\n",
"labels = torch.randint(0,10,(100,),dtype = torch.long,device = device)\n",
"with torch.no_grad():\n",
" # visualize the generated images\n",
" generated = model_G(input_noise, labels).cpu()\n",
" generated = make_grid(denorm(generated)[:100], nrow=10, padding=2, normalize=False, \n",
" range=None, scale_each=False, pad_value=0)\n",
" plt.figure(figsize=(8,8))\n",
" save_image(generated,'/content/drive/MyDrive/icl_dl_cw2/GAN2/Teaching30final.png')\n",
" show(generated) # note these are now class conditional images columns rep classes 1-10\n",
"\n",
"it = iter(loader_test)\n",
"sample_inputs, _ = next(it)\n",
"fixed_input = sample_inputs[0:64, :, :, :]\n",
"# visualize the original images of the last batch of the test set for comparison\n",
"img = make_grid(denorm(fixed_input), nrow=8, padding=2, normalize=False,\n",
" range=None, scale_each=False, pad_value=0)\n",
"plt.figure(figsize=(8,8))\n",
"show(img)"
],
"execution_count": 109,
"outputs": [
{
"output_type": "display_data",
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAdsAAAHVCAYAAAC5cFFEAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOy92Y9tW5be9RuzWc1uojvdbbKtyqxyuVzlykoXJQvekCVbBiHxBPwBfkAgIV5AhgcQwhL+D1xYSLz5AQleKAnzAFJJIGwkSxblwkVWVmbeLu85J040u1lrzW7wMNfaETdBvkbUxVkohhQ3TkTsZu255hzfGN/4xriiqjzZkz3Zkz3Zkz3ZV2fmn/UFPNmTPdmTPdmT/f/dnsD2yZ7syZ7syZ7sK7YnsH2yJ3uyJ3uyJ/uK7Qlsn+zJnuzJnuzJvmJ7Atsne7Ine7Ine7Kv2J7A9sme7Mme7Mme7Cu2rwRsReQvi8g/FpEfiMi//1W8x5M92ZM92ZM92Z8Wkz/pPlsRscAfAn8J+Bj4+8C/rqr/6E/0jZ7syZ7syZ7syf6U2FeR2f5zwA9U9YeqGoC/A/wrX8H7PNmTPdmTPdmT/akw9xW85ofAR49+/hj47Z99kIj8NeCvzT9+/yu4jid7sid7sid7sv8v7a2qvvi/+8NXAbb/VKaqvwP8DoCI6L/5b/87/MZv/CaoYkRAFbQgBow1WGMAQVUxMr+InF7t9IMg8z+Vxwy5zv8VlUfPW54mX3igoohCVoUMqkqmYMTwn//t3+HN67f81X/5X0XJKFBKIeVMzAkjghWgFFIMpBSZwkQIE7kUNCUome3mnFfvf8jFxSWubUhZCWEk5wkjhqZpaJsW7ww5J3JRVBQtD59LRFDAiGCMICKICgVFRPg/fvBH/O2/9be42G7YrM948d7X6M+23O1u2B/uKNMR1Uwugpb6fC0FjRlnHK33OGfIKeKc0DWevutZNQ0UAVEUpSAUFY7THpsHtl5onaG3DecX56yuLhHb8JOf3vL6+pZN2/BnvvstfulX/ixN1/GHf/AH/Pe/9z/xj/74I64+OOPlN8/ZnrXY/UDZR2KCwVj2uRBLpl3D9txCyRwPBdP2+H7N/l1guJmQIjhryWGgxJGuFa4+OGd7dcb16z3xGFm3jrPOE3Yjx0Mg5EJWCKIkp5hWaNaWtrNYEQSh5LotW+v59Ae37G8Lv/nr3yfEQMwRrEOsJcRAyolSMqr1fjVNh2s7xECKgXE4EGPAGIMxjsZ7DJByIISJkhKo0nY9280G7zw5JaYwkVIGhaJ13xsjIAbnPMY6jDDvk0LbNuQU+Xt/73/hP/0bf4Nnzy8pFDCm3ruiCGDEgCx7CBA5nTkRqa+57DUxqADzZxMBY+bnI/NzHl7zZ07Y6dwJULTSawpoKVhj+Ot//T+As/f59m/9JYoRBFP/rlJfTwzMvxNTfYPIfPYBXT4HZT4n9axYY7AAlOpeVGe/IBRViio5198ZQLSuoRatvkPAovWsqUAp/Pgf/h4f/f7/zL/77/1HpFyYUiaETFJFjAWpn8tQUC3Vp4nOayZYazDG1n8bgxXBWq1rh9brEJD53/Veg2BAHu5HXfvZz6miWn2DFVv3XFbu7nf8J//xf8irs56VM6e1UqBgUDEUY8hqyLP/UwQjgjOWrrNcXvT80i++x9XlGU1jUVUOxyN//NFrPv7ohmFMAFgyxijOGKwR2rZhu+44267YbLa0fcdqtaFpekqGcZwwGLb9mu16Tdev+S//q/+a7B3/4r/0VzDG4cWxWq3YrLds+vW8ZtUPCcriGGX5EuZ1UFDIpcy4UO81Yihazz1iyLkQi5BVSKkQ87yXUZIWxmliGgZSDPXcSN1/IoYf/eHv83f/m78D8OOf3eqLfRVg+wnw9Uc/f23+3T/RfuN73+cv/+W/Uk8fIFqAjDHgvJs3XwVbZjCpOFkPoMyOZznVdd0VtB786hzkdOhldh48OqTMh29xkKqgucKJUr3K7/7uf0uIhT//vb9AKRGVuilzzoQcsNQDIKrkFMk5MgwDx+OeEAJxmtAUuby44pu/8F3ee/8D+vWGmBL393fsDzdoTlxdPOP585e03lVnHmO9ilI/p8y7SVGsGKy1NUhZPpyBrl8BsOo7Xr14yS//2q9y9fIlb69f8/rNJwz370jTwHEopLRsVCXHjBWh8w3WCjlHvBPa1tO3Hb33aFFKyXWjGw9q6aLQiedZbzhvW1rf8PLle7z48EPUdlys3/CH/mNWfctvfe/X+O2/+BdZbc45awy/97/+A0BoL1ou3rvk5fM121zgemCaCnfG8Pld4JgHVpeGl+81qCTurgP9+Tmrsyt++oMdb/d3kB1N48kWok20vfDs5Yav/8L79O0t+3dHrtqGl6uOe/YcZGSImYnCYDOhgebc0p05fCNINkimOnlnaLuGm+uRNCq/+J0/x/39W/bjgWIsxjmOw0CYRnKpwaIg9P2Gbn2GWMs47jju7wnTCCI462h8gzFKjGP9vYI1wnaz5dmzZzTeMxwH7u93TNMEQM71HFjnMaaCrfMtGKGkTIiB7aZHtTrA3/z+b/CdX/olxNR9M+8kVPV0BmQGNxFw1tVzZsxpbxkBqR8KmQ+RiHzhd7IAtDGn16yoc8Le+fvPvDeKGPjP/ubfJF99g6/9+j8PtoLrArYiBhWLMaaCKjPYmgoKJ3AXQUTna1aE+hnc4om1BhrLKuRSHXAqhZIVu0DX7G+06Pw682sqOITdu9d8/oN/wPd++19gGCcOx4kpZjLzfbEGI9T3JaOaMaIYY7AOnHMVZE0NEK0RrNH6sUVwSH281M9n5sWrwU0FZWPMaW1r0KKntbLGgFFiKrx+fY2IcLlqOGt9DTAQMIaMkLEkMUQVUjEkhVwUa+se3a4bXr0643u/+V1evbjEz+97GCfa1YppNFy/OyLW4E3BW7DW4MTS9S1nmzXPrracX5zTrVZstlv6bo0mOA5HjCqX6w0vnl/x3qv3+d3/4X9k6hp+9ft/jhwzne14/9XXePniPbbrLc4u66EYdAnzZix4cPIP/rxQVOeflVLhgURdr1RgShCLEpIyhro3rBGSZg7DwHg4EsNETnPkbQRjLc4If/dLMO6rANu/D3xXRL5NBdl/Dfg3vuxJpmRMqlFkLkpKIyVPuMbi7LqCJTCflBl0lwPO6ZBpqZnpab1PWe/8dx7AVR5H3bJEQHp6D5H6FloEVWpUPKNwzplyeunZucw3FiMYYxFvMM7V6KoA5UgKmUIhxsThcCDESFsyOSfGceTN528Y9nek9wPn23P6tsFZR8kFLblGx0vkLsuhWrJbM69LzVBLzgB0refFq1f8wne/y+WLK168d8nlJyvefPRjbq6vSdMRJM6vWaNYg0EMFeBFiEVJY2SYEs7WCLwURYzgfMIbS2OEddfRdZbGtzSdx7UNzjYU6/FOcFIQW+i3He26w3UeEVvXBygZUoAyNbTbNe5qwo0jU4z4IWNHg6il4BEr+JXiPZQwEA8Hpv2REg259SCBmBXX1Pu56lrO+hWmUVq16GAIgyVMjlSESCajOGvorKcRi8lKnpSSKuhYHKIexGIcdJs17+6u2R9HQilY7wmhZq6lFJqmoWkaUq73XHJmGiMppnroSyGkTCkZEaWkCS0F7zxd17E5O2O93SII+8PINAamEBBjZrAFp4CxxFRwBaxrSCkTQqBJHjODTs4BLRmZM1YrJ/dUzxRzoDgfiiUDs4YKaAsAPQbVhWYSRcQ8OlszeJ7O2IwGstBNMv/u8RldDrDOqG5q5rwE1wIqWvc95eHKtXpNnYOEGpiXB+d7ymggSWXKlsC6fn9wwKIVUIsuznv2T3NQXkoFaUQqo4OhFOX2ds/9bsd+f6jZunO4xtO0DW3ToNacAg3lITt95LpmMDAPCYfMwZSp62we+7H5M2eUUsoX/dgpw1M0Z9IUCSEy7QcAvHO41pOLYgqUeR8opmbsS6Jyupum7hkRBMuq27Lqtkg2UITSRs42z/HNG5QBsKhpKKY60IyQgyHvM1OZuBsPd
"text/plain": [
"<Figure size 576x576 with 1 Axes>"
]
},
"metadata": {
"tags": [],
"needs_background": "light"
}
},
{
"output_type": "display_data",
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAdsAAAHVCAYAAAC5cFFEAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOy9S6xtW5Ke9UWMMeZca+3XOefmzcrMclF+FDSQgAYPiw5ulJAsOu4a3HDPAokmDTo0LdymaVm06dHBlmgAEiCQyi1kylkFxuByudKZ93HO2Y+11pxjjAgaMeba+17fysxK+VJZaEfmvGfvvR7zNeL1xx8xxd15lVd5lVd5lVd5lW9P9I/7AF7lVV7lVV7lVf7/Lq/O9lVe5VVe5VVe5VuWV2f7Kq/yKq/yKq/yLcurs32VV3mVV3mVV/mW5dXZvsqrvMqrvMqrfMvy6mxf5VVe5VVe5VW+ZflWnK2I/EUR+V0R+Qci8p9+G/t4lVd5lVd5lVf5kyLyz7vPVkQS8H8A/y7w+8DfBf59d//7/1x39Cqv8iqv8iqv8idEvo3M9t8C/oG7/0N3X4H/CvhL38J+XuVVXuVVXuVV/kRI/ha+81eBf/zi998H/vzX3yQifw34a+PXf/1bOI5XeZVXeZVXeZX/L+Vzd//0m174NpztzyXu/jeBvwkgIv7uuvDnfnCFOCAgIsjze2PDEREQYKDfPn6Q7T8u4JGwizgiNn6+fFt8yolt+5PI5Xd3wXm5m8vOMHd48frlHe6YgxHvMYd/+AdH5v0Vv/mbv3l5zy8iz8f+U8S/9uPXP/K1XYu8/MPzm79+jF/ft7vzW7/1W/z4xz/h3/gLfzH+Zk7vjd4a4ORcmKaJnBOqiiCIgEr8u/1+uX5u8boK+vLejxvQe8dxkiZEZXz++T1xzP6VcxYk7ok9338RRcb7/Rv+/sMf/n1+54c/5F/91/4VdrsDmmZUFRUlJQWEbo5Zw8yQ7d6PcxEEFRnn+c33zPEX92JbO3xtzcs/c//i657X2sv7s20qOg5mbG5YO/PZ55/x9377d7h7c828mxBVujutNVrrcX3N0SSknJGckJS+cgzWnb5U6nmJe1GUMhdy0jhXl7iVCKpCLkqZEykn5KXCiuAGvTuttti3wzwX9rsZFaGulYeHR8ydUgopKWZGWxsGiCaSJqwbX/zoC379O7f86rsbzq1yXFaezhV3p+TMYS7MKaEO4obbsCMqiMY9dbcX60RBwBy6Gd0Mc3ARRBNIwhxa79TWsG6Ie9z3cd983FNzG/dVL3bGPWxYEiWPa1N7o3UL2zJs048+PvA2C//SPpEVikIRSAIqjgokjU3VX1zfWExOnIOZYxY/++WGxv0woJvTL6+P19KEzwd8nrF1pZ2ONHcqQneQ3sGMtQldFNcESTGE5oaYoWPDDXux1v/J6njOfP9Xv3e5Vtt/tmW96dJFN7fjFgM87Po4Z7Nhy23YbZdxT33YZIszHf7jhSkf92PzL6Gbbv7sbzYncTk2/8p3u9t4D9S1bmvoH32T3sO342z/CfBrL37/U+NvP1X+3A+u+A//vT8DbqhATopqAsLY9t4w76Skl8V7OdntBgjgGawAoNpJqaLCRRHAMLexyBx3wVxDiUzoLnQTbLNnFycfN6U3o1s4ZnFHwr3S3Vm7sxqs3Wnm/M2//Y/57ve/z9/6W38Ls1Don8txfk1+5mf8q//6sH1bKCII4vL8PtmumQ0FlMuC/nmc7V/9q3+V/+6//x/4j/6z/wIRwVrl+PTA6fiE9c7NzS3v3r3j6uqKaSpkEVISctL4WZWsghLXz3olZ2cuadx3SBJG2905nc6YdeZ5JudMSokkimocu3nD6RfnG4qqWDdqXXGHlBI5Z1SV1hq9VXCn5BIORoS/8Z//dX7nh3+dv/JX/jKffu/Xma++w1T2zKmw2804yrk2zucn2npGpMc1HoFEEmXSTNZEVkWUy+ubdodxsK9cTxsGWIfj2gzN5nRlBBIijrOt+QhMVOO8Ss4kzQgZCOPntrA+/Zj/6X/+H/l7v/03+PU/8z0+/d4n6Dyz1MbD0yOPT0dOxzPejHk/sbvdo4c9lAlJAqq4O+1Yefz8I/c//hwXY3+749137zgcJpIIdKV3BYdpl7m+m7l5u2d/NYe+jnNWzViH01Pl8f7I8XjEEL7zyRt+7Vc/YU6J91984Hd/5/9iqY2buxt200SvnceHIx0hTTvmMlPPlf/lb/+v/Dv/8r/AX/o3f4Mf3z/we599yR988UB3uL3a86fe3fJ2npkMpDa8NUwczYqWjKjQe2dZVnDQnBARaneWWjnVxmJOF0XKjOY9S4fH88L94xPreUGaM2uiSLo4kIZRewQHmlM4AjNwyJqYy8TVfk/WxMfjE4/nhdrtklT8N//bD/mNfeI/+bUd1wVuZ7jOMIuTxZiSs5uE/QQ5GdAx8dBtZQQEcF5haVC7YCY4gmg442bOqXZO1Vk7dANPGZtuaW+/R333CfXje84//qc8WefelVMTSl1gXXl/yhxlps47WsksopxrJbWVQ10pdUV6o4qFDXXnv37fsP3Mn/8L//YlUdqCYjdDGLqEkjSTUkEkDR1oIB3NnZw7SKVWozenNaU3wVrGXUeg1Gm90q3i3sPxuuPhs0fg7BFImg8/E76md6P3hvc4Lvc4/gi4O30EqZtd//jlR5bz8lPN9LfhbP8u8C+KyJ8hnOxfBv6Dn/UhR+gAGO6GeAZXdDgKGRHzFrRv2ajIFr35JSEQIsoUsZFpRPSJ93DmbnSPSMhIEZ1JwkRprnQkDOAl/XVyEsTj+9CIFuOuReQUceJwbeO4NhERVH/x8vjPdLZb4DeirBHcsUW4l9K8XN7Ilps/5+jfvI9v2vf2t6lkVMDEaSXTcqJ5J2lcr5KFKUs4ToEkRtJETlA0MsC4104SwgkPZxuOJIx0zkJrEgEVsSUNBw6RJXWz8Xo4YUFAnzNM1XDyIoICNrLrlBI5pZEZbueaQQqmO7ru6WnGZAZR0Ip5o7YVtz6i7Lh6Jo6rYwpNFU2xJiPiDdPSeosscuzPx/plBHSXNSwaqEBEefhmmvw54i85MU0zmmbSCCRCQXQ4W/BlAg0115xIcyHtd/RUKeuCqpKykqbM4XbP7uaA5ULfvkcFt4ES9I5jaBamubA/TOwPO9SFVgVfDfdOmoQ8J0hC3xxIj2uQsuCuw/n4uBZKSZndVJiSkhR66yynlZRO9KlHUtOdNE3kMuEkutVYjykhueA4c058+vaGlBLX+4mb/cQkgtZ4H5ooeQRCI+h0EZLKAEfiWNWcIoqlWIPNDG8doSLdkV5R7II4hCXYNC1shyKRkS8Vswhrc0rklClJScNO5JSYckZ1Q/AiMDlk+NO3sM+wLzClSOHcjLRprIUFch/JAw6qdIfanKWGs13N6T7Wj8ZqbDhLFZbm1AbNA8FobeXUP/D4dMaWE5xP1G5gAl1i3ZuQ8wxpzzFPvLfOsYVzvTHjQGThIh564Qw7zmWRb+jTZh/7JWPctGXbbCQPBt7ovSJqaOro0DeRNlBPH9ktyLg/Igru43bbJXGSEbh6pMXj+/tIkzvitqXMX8lyGb5BgCyCp/TCdvzh8s/d2bp7E5H/GPhvgQT8l+7+2z/zcwT8Go6rxWI1wUlxcryEzIZSANuN6w6Ogo9N4JJS+BaZdGDLKgRHaSSqZ6olWldqV2oNA6hu6DiJnCJzEXQY2P4CWvBn2FM2+HrsRV4afP2FoOSfNxuWbXHGxeIFUDuOVV78WZ7fv13HP+K+y3CkHWEuSk0CHbI6JTklw5QGNMyA2yBgsSwXw9bFiCtrI6rdYGdAwii5PJ+fiKMS19zd6K3S20pKSsp5fN5hwNJOOHJNegkrlGdlyfqyLLAFYYqTMZnpOtFlIiVFUaTskDUMvVojW8CTIo4npUvGJY/9R9Yh4pvdiDUenj6gX4m1dAkix3UXjSDgGdpiIBAynPFELjtynkd2HlmNC4EvmuGpBMwHyFTQ3UTeT7gq03mm5CM2JXb7mau7A9P1gXMH73FAATl3v
"text/plain": [
"<Figure size 576x576 with 1 Axes>"
]
},
"metadata": {
"tags": [],
"needs_background": "light"
}
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "9tRtYjH_LbQ0"
},
"source": [
"## Part 2.1d: Engineering Choices (10 Points)\n",
"\n",
"Discuss the process you took to arrive at your final architecture. This should include:\n",
"\n",
"* Which empirically useful methods did you utilize\n",
"* What didn't work, what worked and what mattered most\n",
"* Are there any tricks you came across in the literature etc. which you suspect would be helpful here"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "HnFqwePhXeZ4"
},
"source": [
"**Your Answer**\n",
"\n",
"My final architecture is ACGAN, I'll talk about something I have tried to make the model much better.\n",
"\n",
"1. Flip some labels when training generator: real = fake, fake = real. It really worked, but have no idea why it works.\n",
"2. Using batchnorm.\n",
"3. Avoid sparse gradients: ReLU, MaxPool. So use leakyReLU and Conv2d + stride for downsampling, ConvTranspose2d + stride for upsampling.\n",
"4. Use soft and noisy labels. If it is real, then replace the label with a random number between 0.7 and 1.2, and if it is a fake sample, replace it with a random number between 0.0 and 0.3.\n",
"5. Use the ADAM optimizer.\n",
"6. Use dropouts.\n",
"7. Use an Embedding layer in conditional GANs."
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "DC6ndLP5Z-P-"
},
"source": [
"## Part 2.2: Understanding GAN Training (5 points)\n"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "zz6oy7ixZ-P_"
},
"source": [
"### Loss Curves\n",
"**Your task:**\n",
"\n",
"\n",
"Plot the losses curves for the discriminator $D$ and the generator $G$ as the training progresses and explain whether the produced curves are theoretically sensible and why this is (or not) the case (x-axis: epochs, y-axis: loss).\n",
"\n",
"Make sure that the version of the notebook you deliver includes these results."
]
},
{
"cell_type": "code",
"metadata": {
"id": "kxrUDHfBZ-QA",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 281
},
"outputId": "708a041d-eb0a-42b5-ee71-464fea9648bb"
},
"source": [
"# ANSWER FOR PART 2.2 IN THIS CELL*\r\n",
"import matplotlib.pyplot as plt\r\n",
"plt.plot(list(range(0, np.array(train_losses_D).shape[0])), np.array(train_losses_D), label='loss_D')\r\n",
"plt.plot(list(range(0, np.array(train_losses_G).shape[0])), np.array(train_losses_G), label='loss_G')\r\n",
"plt.legend()\r\n",
"plt.title('Train Losses')\r\n",
"plt.show()\r\n",
"\r\n",
"# file=open('/content/drive/MyDrive/icl_dl_cw2/GAN2/train_losses_D.txt','w')\r\n",
"# file.write(str(train_losses_D));\r\n",
"# file.close()\r\n",
"\r\n",
"# file=open('/content/drive/MyDrive/icl_dl_cw2/GAN2/train_losses_G.txt','w')\r\n",
"# file.write(str(train_losses_G));\r\n",
"# file.close()"
],
"execution_count": 110,
"outputs": [
{
"output_type": "display_data",
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAWoAAAEICAYAAAB25L6yAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nO3dd3hc1Zn48e+r0Uij3qtlWe4FF2xk3MCAaTYQSiBxSKGEElg2sCSbBEJ+YRPIkkA2G7IhEEPCQgBTTUKcpWMwJsZGNjbuuMqWXFSs3qU5vz/OyJaNi2TNeO7MvJ/n0SPNzJ173+srvzrznnPPEWMMSimlnCsq2AEopZQ6Nk3USinlcJqolVLK4TRRK6WUw2miVkoph9NErZRSDqeJWoUEEXldRK4NdhxKBYPoOGoVKCLS2ONhPNAGdPkef8cY8+xJimMHcKMx5p2TcTyl/C062AGo8GWMSez++VjJUkSijTGdJzM2pUKJlj7USSciZ4tImYj8SET2Ak+KSJqILBSRShGp8f1c0OM974vIjb6frxORJSLya9+220VkzgnEESsivxWR3b6v34pIrO+1TF8MtSKyX0Q+FJEo32s/EpFyEWkQkU0icq7v+SgRuUtEtopItYi8KCLpvtc8IvKM7/laEflERHL88M+pIoAmahUsuUA6MAi4Gfu7+KTvcSHQAvz+GO+fAmwCMoEHgT+JiPQxhnuAqcCpwATgdOAnvte+D5QBWUAO8GPAiMhI4F+BycaYJOBCYIfvPd8FLgfOAvKBGuAR32vXAinAQCADuMV3jkodlyZqFSxe4F5jTJsxpsUYU22MecUY02yMaQB+gU14R1NqjHncGNMFPAXkYRNqX3wD+LkxpsIYUwn8DPiW77UO3z4HGWM6jDEfGtuh0wXEAmNExG2M2WGM2ep7zy3APcaYMmNMG/AfwFUiEu3bXwYwzBjTZYxZYYyp72O8KkJpolbBUmmMae1+ICLxIvJHESkVkXpgMZAqIq6jvH9v9w/GmGbfj4lH2fZo8oHSHo9Lfc8BPARsAd4SkW0icpfvWFuAf8Mm4QoReV5Eut8zCHjVV9qoBTZgE3sO8BfgTeB5X5nlQRFx9zFeFaE0UatgOXy40feBkcAUY0wyMNP3fF/LGX2xG5tcuxX6nsMY02CM+b4xZghwKfC97lq0MeY5Y8wZvvca4Fe+9+8C5hhjUnt8eYwx5b5W+c+MMWOA6cAlwDUBPDcVRjRRK6dIwtZsa30dcPf6ef9uX4de91c0MB/4iYhkiUgm8FPgGQARuUREhvnq3nXYlrFXREaKyCxfp2OrL2av7xiPAb8QkUG+fWSJyGW+n88RkXG+Twj12FKIF6V6QRO1corfAnFAFfAx8Iaf9/9/2KTa/fUfwP1ACfAZsAZY6XsOYDjwDtAILAX+YIxZhK1P/9IX514gG7jb956Hgdew5ZIG33lM8b2WC7yMTdIbgA+w5RCljktveFFKKYfTFrVSSjmcJmqllHI4TdRKKeVwmqiVUsrhAjIpU2ZmpikqKgrErpVSKiytWLGiyhiTdaTXApKoi4qKKCkpCcSulVIqLIlI6dFe09KHUko5nCZqpZRyOE3USinlcLrCi1Iq4Do6OigrK6O1tfX4G4c5j8dDQUEBbnfvJ0/URK2UCriysjKSkpIoKiqi7+s7hA9jDNXV1ZSVlTF48OBev69XpQ8RuVNE1onIWhGZLyKeE45UKRVxWltbycjIiOgkDSAiZGRk9PmTxXETtYgMAG4Hio0xYwEX8LUTilIpFbEiPUl3O5F/h952JkYDcb45fOPxTa7uV8bABw/Clnf9vmullAplx03Uxphy4NfATmAPUGeMeevw7UTkZhEpEZGSysrKvkciAh/9Dra80/f3KqVUGOtN6SMNuAwYjF1PLkFEvnn4dsaYecaYYmNMcVbWEe+CPL64NGipObH3KqXUMSQm9nVJzb657rrrGDx4MBMmTGDEiBFcc801lJWV+WXfvSl9nAdsN8ZUGmM6gAXYNd/8Ly5FE7VSKmQ99NBDrF69mk2bNjFx4kRmzZpFe3t7v/fbm+F5O4GpIhKPXcLoXOzyRf4XlwYttQHZtVLKGX7293Ws313v132OyU/m3i+d0qttjTH88Ic/5PXXX0dE+MlPfsLcuXPZs2cPc+fOpb6+ns7OTh599FGmT5/ODTfcQElJCSLCt7/9be68887jHkNEuPPOO3n11Vd5/fXXueyyy/p1fsdN1MaYZSLyMnY9uU7gU2Bev456NHFpULExILtWSimABQsWsGrVKlavXk1VVRWTJ09m5syZPPfcc1x44YXcc889dHV10dzczKpVqygvL2ft2rUA1Nb2rSE5adIkNm7cGPhEDWCMuRf/rwr9RZ5ULX0oFeZ62/INlCVLlnD11VfjcrnIycnhrLPO4pNPPmHy5Ml8+9vfpqOjg8svv5xTTz2VIUOGsG3bNr773e9y8cUXc8EFF/TpWP5ak9ZZc33EpUFrrR2qp5RSJ9HMmTNZvHgxAwYM4LrrruPpp58mLS2N1atXc/bZZ/PYY49x44039mmfn376KaNHj+53bM5L1F3t0NEc7EiUUmHqzDPP5IUXXqCrq4vKykoWL17M6aefTmlpKTk5Odx0003ceOONrFy5kqqqKrxeL1deeSX3338/K1eu7NUxjDH87ne/Y8+ePcyePbvfMTtrro+4VPu9pRZiEoIbi1IqLF1xxRUsXbqUCRMmICI8+OCD5Obm8tRTT/HQQw/hdrtJTEzk6aefpry8nOuvvx6v1wvAAw88cMx9/+AHP+C+++6jubmZqVOnsmjRImJiYvods/irhtJTcXGxOaEVXtb/DV68Bm75CHLH+j0upVRwbNiwwS8lgHBxpH8PEVlhjCk+0vbOKn14ulvU2qGolFLdHFb6SLPfW3UstVLKmW677TY++uijQ5674447uP766wN2TGcmam1RK6Uc6pFHHjnpx3RW6SNOSx9KKXU4ZyXqmESIitbbyJVSqgdnJWoRnUFPKaUO46xEDXobuVJKHcZ5ibr7NnKllPKjQM9HDfCb3/yGUaNGMW7cOCZMmMD3vvc9Ojo6+r1fZyZqbVErpULMY489xltvvcXHH3/MmjVr+OSTT8jOzqalpaXf+3bW8DywIz8qNwQ7CqVUoLx+F+xd49995o6DOb/s1aaBmo/6F7/4BYsXLyY11Y5ei4mJ4a677vLL6TkwUadBS12wo1BKhalAzEddX19PY2MjgwcPDkjMzkvUnlRoq4OuTnA5LzylVD/1suUbKCdjPuo333yTH/3oR9TW1vLcc88xfXr/Vi90Zo0aoFVb1Uqpk6c/81EnJyeTmJjI9u3bAbjwwgtZtWoVY8eO9cuaiQ5O1DryQynlf4Gaj/ruu+/m1ltvPVAeMcbQ2trql5idV1vQ28iVUgEUqPmob731VpqampgyZQqxsbEkJiYyY8YMJk6c2O+YnTUfNcCu5fCn8+Ebr8Dw8/wbmFIqKHQ+6kOF9nzUoDPoKaXUYZxX+tDFA5RSDqbzUcPBGrV2JioVVowxiEiww+i3/s5HfSLlZueVPlxuO92ptqiVChsej4fq6uoTSlLhxBhDdXU1Ho+nT+9zXosadL4PpcJMQUEBZWVlVFZWBjuUoPN4PBQUFPTpPcdN1CIyEnihx1NDgJ8aY37bt/D6IC5VFw9QKoy43e6A3V4dCY6bqI0xm4BTAUTEBZQDrwY0Kp2TWimlDuhrjfpcYKsxpjQQwRygpQ+llDqgr4n6a8D8I70gIjeLSImIlPS7DqWLByil1AG9TtQiEgNcCrx0pNeNMfOMMcXGmOKsrKz+RRXnK31EeA+xUkpB31rUc4CVxph9gQrmgLg06GqHjuaAH0oppZyuL4n6ao5S9vC7A7eRa/lDKaV6lahFJAE4H1gQ2HB89DZypZQ6oFc3vBhjmoCMAMdykE7MpJRSBzjvFnLQ+T6UUqoHhyZqbVErpVQ3hydqbVErpZQzE3VMIohLW9RKKYVTE7WI3kaulFI+zkzUALGJ0N4U7CiUUironJuo3Ql6Z6JSSuHoR
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
},
"metadata": {
"tags": [],
"needs_background": "light"
}
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "mZCUIHKkS0kF"
},
"source": [
"### Discussion\n",
"\n",
"Do your loss curves look sensible? What would you expect to see and why?"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "jYYOnd6YBN3k"
},
"source": [
"**YOUR ANSWER**\n",
"\n",
"The loss curves look sensible, at the beginning, loss_G dropped significantly and it is because the initial weights are easily optimized. After about 10 epochs, the loss of the two begins to fluctuate steadily, indicating that the two fight against each other. After 75 epochs,loss_G rises, indicating that the generation more difficult to generate images to deceive. When the generator learns to produce our images well, it's inevidable to make the job of the discriminator harder."
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "z_WLkdSNZ-QE"
},
"source": [
"## Part 2.3: Understanding Mode Collapse (5 points) \n",
"**Your task:** \n",
"\n",
"Based on the images created by your generator using the `fixed_noise` vector during training, provide a discussion on whether you noticed any mode collapse, what this behaviour may be attributed to, and explain what you did in order to cope with mode collapse."
]
},
{
"cell_type": "code",
"metadata": {
"id": "HiPei-I0FCGM"
},
"source": [
"# Any additional code"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "markdown",
"metadata": {
"id": "DpZuxPYUFedE"
},
"source": [
"### Discussion\n"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "SynUx_QV7olI"
},
"source": [
"**YOUR ANSWER**\n",
"\n",
"In fact, when ACGAN is not used, mode collapse is easy to happen, and the generated pictures are not diverse enough. After trying many methods, ACGAN was finally selected to get a better generator. \n",
"\n",
"Because generating similar or the same type of pictures, the generator can better fool the discriminator, but when ACGAN is used, because the label is added, it makes it better to generate different types of pictures."
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "Cud7gZw2M-0U"
},
"source": [
"\n",
"\n",
"# TA Test Cell\n",
"TAs will run this cell to ensure that your results are reproducible, and that your models have been defined suitably. \n",
"\n",
"<font color=\"blue\"> <b> Please provide the input and output transformations required to make your VAE and GANs work. If your GAN generator requires more than just noise as input, also specify this below (there are two marked cells for you to inspect) </b></font>\n"
]
},
{
"cell_type": "code",
"metadata": {
"id": "JesvipjEFbGx"
},
"source": [
"# If you want to run these tests yourself, change directory:\n",
"# !cd /content/drive/MyDrive/icl_dl_cw2/"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "QLlCRIn6m9ZS"
},
"source": [
"!pip install -q torch torchvision"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "FUmBbya2nQh9",
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "fceada4f-cffe-4f04-d595-4a69d7d172df"
},
"source": [
"# Do not remove anything here\n",
"import os\n",
"import numpy as np\n",
"import torch\n",
"import torch.nn as nn\n",
"from torch.utils.data import DataLoader, sampler\n",
"from torchvision import datasets, transforms\n",
"from torchvision.utils import save_image, make_grid\n",
"import torch.nn.functional as F\n",
"import matplotlib.pyplot as plt \n",
"\n",
"show = lambda img: plt.imshow(np.transpose(img.cpu().numpy(), (1,2,0)))\n",
"\n",
"device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n",
"\n",
"# Do not change this cell!\n",
"torch.backends.cudnn.deterministic = True\n",
"torch.manual_seed(0)"
],
"execution_count": 1,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/plain": [
"<torch._C.Generator at 0x287e7af8c90>"
]
},
"metadata": {},
"execution_count": 1
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "IFfBZsnTzQIU"
},
"source": [
"############# CHANGE THESE (COPY AND PASTE FROM YOUR OWN CODE) #############\n",
"vae_transform = transforms.Compose([\n",
" transforms.ToTensor(),\n",
"])\n",
"\n",
"def vae_denorm(x):\n",
" return x\n",
"\n",
"def gan_denorm(x):\n",
" x = 0.5 * (x + 1)\n",
" x = x.clamp(0, 1)\n",
" return x\n",
"\n",
"gan_latent_size = 100\n",
"\n",
"# If your generator requires something other than noise as input, please specify\n",
"# two cells down from here"
],
"execution_count": 2,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "5t0CVMCFyxgU",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 417,
"referenced_widgets": [
"208254968d00463da4fd6218773e4a83",
"c3361e8a6f02493ab6f599ecca41bb6d",
"a06338df87b349e2a79355b801ceb3a7",
"edb18eb99fc34c6b80b0176191df8a3b",
"1923dadc242949febc606e4652a0b4b5",
"ebb299f97f93402eb7acce9784955020",
"439c5b234eb84943b19eefcb5747a1f9",
"47023f26b6c643fe834a4a587a87f798",
"175bd5ac104e4246abbac3e8cf87cef9",
"84d0690198f74e638cd8e7b08e720705",
"8af14b2a15e946a4bbd0f903d113cbe5",
"8b5475fa86874f5cb311e0ff64cf8891",
"03564ee9e1a945129177cad0cf040a56",
"1516c0706ead46abb20a40eab3ff27e9",
"ccea99f12154455a99d1bc6e4d7816ff",
"4738f5763ee04734acf3306d1bf19850",
"34e4fecfa60643a9897e7125ec70cd9a",
"d2c1bd006aa54791a93257b6760221bc",
"ae63f0b072f2450d9751c4bc46803b00",
"4f40f22058744466902a762959895af0",
"8469cc2a05b94c8ebeb06e7c0ef48ed9",
"9a1b16b06f5840aaa188d0026dea345b",
"161d7f9f503942a5937e8099fe2c7953",
"b9fe179f1c0d4130942349edb1ee7023",
"d6e3a81c8fa04646ad977c2f712713bf",
"e86265fb7d4b4fb0abdb60a9d012741c",
"5a64e0bf3c23450aa585320262158182",
"c47f145ab37d4e959bf6ae4b1e1dfb37",
"21ee221a6b484ccdbf5047ba67d260a0",
"af8622099a214ca188b2053d50e85f6b",
"0e66434877534e09a9b1249c08d7f5a1",
"99ec63cc36a04a8ea6348f9dd78dc015"
]
},
"outputId": "f6d4e35c-4a24-40b4-e350-343c28da3f83"
},
"source": [
"# Load VAE Dataset\n",
"test_dat = datasets.MNIST(\"./data/\", train=False, transform=vae_transform, \n",
" download=True)\n",
"vae_loader_test = DataLoader(test_dat, batch_size=32, shuffle=False)"
],
"execution_count": 11,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "HTNsHqK-mh6U"
},
"source": [
"############# MODIFY IF NEEDED #############\n",
"vae_input, _ = next(iter(vae_loader_test))\n",
"\n",
"# If your generator is conditional, then please modify this input suitably\n",
"input_noise = torch.randn(100, gan_latent_size, device=device)\n",
"input_labels = [7] * 100\n",
"input_labels = torch.Tensor(input_labels).long().to(device)\n",
"# input_labels = torch.randint(0,10,(100,),dtype = torch.long, device = device) # use it to generate different types of pictures\n",
"gan_input = [input_noise, input_labels] # In case you want to provide a tuple, we wrap ours"
],
"execution_count": 9,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "EIg06rOqo3XA",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 450
},
"outputId": "e53634ea-0b79-4db2-9b47-db9acc720d94"
},
"source": [
"# VAE Tests\n",
"# TAs will change these paths as you will have provided the model files manually\n",
"\"\"\"To TAs, you should have been creating a folder with the student uid\n",
" And the .ipynb + models in the root. Then that path is './VAE_model.pth' etc.\n",
"\"\"\"\n",
"vae = torch.jit.load('./VAE_model.pth')\n",
"vae.eval()\n",
"\n",
"# Check if VAE is convolutional\n",
"\n",
"for module in vae.children():\n",
" for layer in module.children():\n",
" if \"Conv2d\" in layer.original_name:\n",
" print(\"Used Convs\")\n",
" break\n",
"\n",
"vae_in = make_grid(vae_denorm(vae_input), nrow=8, padding=2, normalize=False,\n",
" range=None, scale_each=False, pad_value=0)\n",
"plt.figure()\n",
"plt.axis('off')\n",
"show(vae_in)\n",
"\n",
"vae_test = vae(vae_input.to(device))[0].detach()\n",
"vae_reco = make_grid(vae_denorm(vae_test), nrow=8, padding=2, normalize=False,\n",
" range=None, scale_each=False, pad_value=0)\n",
"plt.figure()\n",
"plt.axis('off')\n",
"show(vae_reco)"
],
"execution_count": 5,
"outputs": [
{
"output_type": "stream",
"name": "stderr",
"text": [
"9920512it [00:20, 473306.83it/s] Used Convs\n",
"Used Convs\n",
"Used Convs\n",
"\n"
]
},
{
"output_type": "execute_result",
"data": {
"text/plain": [
"<matplotlib.image.AxesImage at 0x28785856908>"
]
},
"metadata": {},
"execution_count": 5
},
{
"output_type": "display_data",
"data": {
"text/plain": "<Figure size 432x288 with 1 Axes>",
"image/svg+xml": "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"no\"?>\r\n<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.1//EN\"\r\n \"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\">\r\n<!-- Created with matplotlib (https://matplotlib.org/) -->\r\n<svg height=\"183.183471pt\" version=\"1.1\" viewBox=\"0 0 349.2 183.183471\" width=\"349.2pt\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\">\r\n <metadata>\r\n <rdf:RDF xmlns:cc=\"http://creativecommons.org/ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\">\r\n <cc:Work>\r\n <dc:type rdf:resource=\"http://purl.org/dc/dcmitype/StillImage\"/>\r\n <dc:date>2021-02-23T14:04:37.484292</dc:date>\r\n <dc:format>image/svg+xml</dc:format>\r\n <dc:creator>\r\n <cc:Agent>\r\n <dc:title>Matplotlib v3.3.4, https://matplotlib.org/</dc:title>\r\n </cc:Agent>\r\n </dc:creator>\r\n </cc:Work>\r\n </rdf:RDF>\r\n </metadata>\r\n <defs>\r\n <style type=\"text/css\">*{stroke-linecap:butt;stroke-linejoin:round;}</style>\r\n </defs>\r\n <g id=\"figure_1\">\r\n <g id=\"patch_1\">\r\n <path d=\"M 0 183.183471 \r\nL 349.2 183.183471 \r\nL 349.2 0 \r\nL 0 0 \r\nz\r\n\" style=\"fill:none;\"/>\r\n </g>\r\n <g id=\"axes_1\">\r\n <g clip-path=\"url(#p46f253317c)\">\r\n <image height=\"169\" id=\"image212dc075d5\" transform=\"scale(1 -1)translate(0 -169)\" width=\"335\" x=\"7.2\" xlink:href=\"data:image/png;base64,\r\niVBORw0KGgoAAAANSUhEUgAAAU8AAACpCAYAAACrp7/ZAACD10lEQVR4nOz9d3hc5Zm4j9/TpdFIM6NRH/VerGJV927AAWNICCG08A2EFDaNhGQ3bTckn2TZVDaFhBA2QEJvptrYlrsty7Jk2eq99z6aXs7vD//mrAU2uGhGIqv7us512dLMnEdnznne532qBBBYYokllljispAutABLLLHEEh9HlpTnx4CAgACMRiPLly9HoVAgkUgWWqQllvg/z5Ly/BiQlJTEl770Jfbu3UtERAQKhWKhRVpiif/zLCnPjwkSiYTAwEB+9rOfkZ+fv9DiLLHE/3nkCy3AEh+N2WxmZmYGq9VKbm4u0dHRqNVqLBbLQou2xD8pEomEtLQ04uPjCQoKYnBwkDNnzmCz2RZatEWD35SnUqlELpcjk8mYnZ1FEJaC/JfK9PQ0o6OjjIyMEBsbS0REBMHBwYtWeapUKlQqFQ6HA4fDgcfj8bsMUqkUqVSKTCZDoVCgUChwuVzi4XQ6/S7TxwmpVEpJSQmbN28mJiaGqqoqxsbGGBkZEb/XjxMymQyVSoVarcZsNuNwOHC73Vf1mX5Tntdccw15eXmkpaXxta99DZPJ5K9Tf+yZnp6mtraWV155hYceeojQ0FBCQkIYHh5eaNEuyC233MInP/lJ9uzZw86dOxkcHPTr+dVqNSEhIcTExJCYmMjmzZspLS2loaGBmpoaamtrOXz4MAAej2dBlPtiRyaTUVxcTFlZGampqaxcuZJt27Zx9OhR3n33XXbt2vWxMoBSU1O55ZZb+Pa3v833v/993nvvPdra2q7qM/2iPKVSKaWlpaxYsQKNRoNc7h+dbTAYCA8PJzU1ldtuu43g4GCcTidnzpzh5MmTjI+PIwgCXV1dmEymRb0lGRoa4sSJE7jdbvLz8+nv76e1tXWhxfoACQkJ5OTkUFpaSmBgIJWVlYyOjuJyufxyfolEQnBwMIWFhXz3u98lMDCQ8PBwdDodkZGRFBYWMjMzw8TEBADHjx+nsrKSffv2LQplYDQaSU1N5ROf+ARGo5HKykreeOMNurq6/CqHIAiYTCacTidSqZTAwECSk5PR6XQsX76cgoIC3nrrLdrb2xftDuh8tmzZwooVK1AoFAwNDWG1Wq/6M/2ixZRKJYmJiYSHh9Pa2nrV5vKloFarycnJISsri7y8PK6//npCQkJwOBzExsYSFhbG+Pg4Ho+Hmpoa+vv7GR4eZmBgwOeyXQnT09N0dHQgCAIpKSlkZmYutEgXJCoqiqioKCIjI/F4PISFhREQEMDs7KzPzy2RSIiJiaGwsJB169axZs2aOb8PCQnBaDTO+ZlOpyMoKIiOjg76+voWfDsaHh5Odna2qDwBqqqq6O7u9qty93g8tLS00NvbS3h4OBKJhNDQUBISEkS5LBYLOp2O5uZmxsbGFrUFn5+fT1paGh6Ph76+Psxm81V/ps+Vp1QqRaPRYDQamZ2d5dFHH50Xrf9R54yPj+ezn/0sW7duJSkpSfydUqmkpKSEkpIS4NxNcvz4cWpra6murubpp5/G4/EsCivkfMxmM319fQBkZmYyNDSERCJZdHLGxMSg1WqRyWTExsZiNBoxGAx+UZ4KhYJNmzbx1a9+laKiIvHnF7pG3lzZVatWkZSUxMzMDH/7298YHR1dUCUQHR1NdnY2WVlZwLndU3x8PBUVFX79rt1uN6+++ip6vV58Xjdv3kxISAgKhYKVK1eSlpbG6dOnefzxx9m1axdWq3XR3Y9ecnNzSU1NxWQyUV9f//FQnkqlknXr1iGXyxkdHWVoaMinN2dwcDAJCQk8+eSTJCUlERIS8qGvl0gklJSUkJubyy233MKDDz7I//t//4+TJ0/S3t7uMzmvBEEQmJiYIDQ0lJSUFG6//XZef/31ebkR5ouBgQFmZmZExR4ZGUlYWBjd3d0+P7dSqaSsrIzQ0NA5PxcEAZfLxfT0NEFBQWLw0ktERAT/+q//ikwmo7y8nIqKCp/L+nHAarXy17/+lb///e/Aueu7efNmbrvtNjZs2IBOp2Pt2rUsX76c3bt38+KLL7Jnzx7sdvsCS/6/KBQKEhISCAwMRCqVzmuBiV+27SqVisnJSYaGhnxq3oeGhpKXl8e1115LcnLyJflXJRKJ+DCp1WqCg4O5+eab0ev17Nq1i66urkWzmjqdTl566SWuv/56Ud7FVm3U3d1Nf38/U1NTaLVaoqOjiYiI8Pl55XI5wcHBc5SnIAhYrVY6Oztpbm7m4MGDpKSkoNfrCQoKAiAxMZGYmBjCw8NZtWoVY2NjNDU1MTU15XOZL0RMTAzJyckLcu73IwgCFotljk+zoqICq9VKXV0dW7duJTk5mdDQUFauXIlEIiE6Opq//OUvCyj1XBQKBWlpaahUqnl/jn2uPCUSCWq1WlSe09PTPjtXeHg4hYWFfOpTn0Kr1SKVXnoNgDe1RS6Xs3nzZpRKJePj47hcLiYmJrBarQvu03E6nbz++ussW7YMo9GITCZbUHkuxNDQEAMDA4yOjhISEkJYWNgHLEFfIJPJRD+390Fxu910dnZy/PhxDh48y
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAAC3CAYAAACxII3nAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAACAk0lEQVR4nO29d3hcZ7Wo/+7pmqZRGfXeZcmSLMu2LNuxHZc0J04jpEFygRSSAPdAqAcO58C9F7iUCwTIIbQUSIhTHcex496Lmi1Zxeq9S6M+Gk3bvz/8m40dlzixZkaBeZ9HT2LNnpmlXda3vlUFURQJECBAgAC+QeZvAQIECBDgX4mA0g0QIEAAHxJQugECBAjgQwJKN0CAAAF8SEDpBggQIIAPCSjdAAECBPAhiiu9KAhCIJ8sQIAAAT4ioigKl3stYOkGCBAggA8JKN0AAQIE8CEBpRsgQIAAPiSgdAMECBDAhwSUboAAAQL4kCtmL/yrEBQUhCAICIKAQqFApVIhk51bj2ZmZnA6nbhcLmZnZ/0saYAAAT7pBJQucN999xEeHk5QUBBZWVmsWrWKiIgI3G43L730EmfPnqWhoYF3333X36IGCBDgE86/tNKNi4vjM5/5DLfffjtGoxGZTIZeryc0NBS5XI5MJmPjxo0sXLiQM2fOcPLkSUZHR3E6nf4W/SIEQSA6OprHHnuM2NhYtm7dyp49e5iZmfGbTGq1mnXr1rF48WKWLVtGe3s7+/fv58CBA4yMjPhNrgAB/Mm/rNJNSkqisLCQG2+8kdzcXDQaDW63G6vVytDQEC6XC41GQ0xMDAaDAYDU1FSqq6vnpdKVyWRERUVRXFxMYmIiJ06cQC6X+00eQRDQ6XQUFRVxww03sGzZMjo6OpicnKSpqcmvSlcul6PX68nNzaW7u5uOjo4PfY/JZEIURWw227xxMwUFBRESEkJeXh7d3d0MDQ0xMDDgs++Xy+UYDAaysrIYHx9nbGyM0dFRbDbbVX+GQqFAp9Mhl8sZHR3lX6G/979sIG3z5s08+eSTrFixArVajcvlwm6309HRwZ49e9ixYwcnTpzA4XBgNBpJTEzk+uuvlxTwfEMmk5GSkoJOp8NutzM8PIzL5fKrPOHh4SxdupSlS5ciiiKJiYksXLiQgoICv8kFoNVqSU9P59e//jW33XbbVb0nJyeHzMxMzGazl6W7emJiYli3bh1bt27ly1/+MsuXL/fpQqvX68nJyeHHP/4xjz76KKtXryY8PPyq3y8IAgaDgezsbPLy8lAofGMDymQy6ccf/MtZujKZjJycHFavXk1xcTEAbW1tvPnmm/z9739nZGSEyclJ3G43Wq2WLVu2kJmZiVwux2Qy+dV6vBJyuZwFCxag0Wjo6+tjx44dfrPI1Go1kZGR/PSnP/W7gr0UJpOJ1atXo9frr/p6lpSUkJCQgNPp5Omnn/brggZgNBpZv349//Zv/4ZCoeD+++8nMjKSnTt3+kQ2o9HIv//7v3PXXXcRGRnJxMQElZWV9Pb2XtX7FQoFISEhbNmyhejoaDo7O3n88cfp6urC4XB4RWa5XI5Op+PLX/4yycnJzM7O8uUvf9nnO9c5U7rZ2dmkpqYSERHB1NQUTqeTkZER2tvbmZ6eZnJycl5sywRBIDQ0FJ1OB8DAwAAvvvgihw8fprm5GZvNhtPpJDw8nPz8fMxmMxqNhvHxcRoaGubF3/BBgoKCiI6O5rrrrmNqaoqzZ8/6Tc5FixaRlJREZmYmCxcuxGg0XvC6yWQiPT2dFStWMDw8zOjoKIODgz6TTxAEjEYjeXl5qFSqq36fxWIhOTmZhIQE1Go1MzMzft0KJyQkkJSURExMDAD9/f309vZ6XYF4FNcDDzzAsmXLMBgMVFRU8MYbb1BRUYHb7b6qz4mOjmbFihVkZmbS2NhIWVkZFovFawtGSkoK6enplJSUsGHDBkJDQxkcHESj0WC1Wq9a7rlgTpSuTCZj6dKlrFixgvT0dEZGRnA4HHR0dHDq1CmGhobo7+9nYmLiqj7P7XbjdDoZHx/Hbrd75eYeHh6mpaWFrq4u3njjDTo6OpienpZeDw8PZ+3atURGRhIUFMTw8DCtra3zUunqdDri4uIoLCxk//791NXV+U2WkpISioqKyMnJITY29qItnNlsJicnB1EUaW1tpampifHxcZ+dV41GQ1hYGAsWLPhI/lmLxYLb7SYqKgq1Ws3s7KzfrF1BEMjMzCQxMRG9Xg9AU1MT9fX1Xle6KpWK8PBw7r77bjIyMpidnWX//v28++67V+2nVygUJCYmcsMNN6BSqaiqquLAgQOMjY15RWaFQkFeXh5r167lrrvuIiIiArlcjlKpJC4ujs7OTp8uonOmdJ966inS0tIusmxEUWR6epru7u6rPqnj4+N0dHTwpz/9ibq6OqxW61yICYDL5eLgwYPU1tZKq9zY2NhFK11kZCQ33ngjWq0WQbhsw6B5gcFgICkpCa1WS3V1NSdOnPCbLF/4whfIzc297A2ckpJCcnIymzZtYmBggAMHDvCLX/yC6upqnyix9PR0li5dSn5+Pr///e85e/bsVb3PYDAQFRVFcnIyQUFBTE9P+0XpCoKASqXi3nvvZenSpdLvT548yfHjx73+/SaTiZycHJYuXUpQUBDV1dX89re//UgKMz4+npKSEh544AF27NjBzp07OXr0qFfklclkREZG8tnPfvaimExYWBjf+973+NnPfkZjY+MFRpc3mROl63K5+NWvfkVSUhLBwcF0d3cTExOD2WwmNjaWnJwcoqKiiIyMZHR0lLCwsAt8aW63G7vdzvT0NKGhobjdblJTU+no6KC7u3tOla6HsbExZDIZLpfrIoV7/fXXc8MNN5CQkIBMJqO/v5/a2lpqa2s/UmTWV6Snp/OpT32KoaEhWlparioaP9dER0fz9a9/XbJuL6V0JycnsdvtuN1uzGYz4eHhbNy4kczMTNatW8fk5KTX5JPJZCQlJfHII4+wYsUKuru7eeWVV65a6S5evJjU1FSvyXe1qNVqUlNTyczMJCIiAlEUcblcnD171us7HIPBQEpKCtdddx1yuZzt27ezY8cORkdHr2oBUigUxMfH853vfIeSkhJcLheHDx+ms7PTa8+VRqPhvvvuIz09Ha1We8FrWq2WDRs2kJKSQlVVFXv37mXbtm3Mzs561eqdE6UriiKlpaW0trai1WoZGBjAbDYTEhJCREQETU1NREVFoVAo6OrqIjk5GaVSKb3f6XRitVoZHR3lU5/6FAaDAafTiVqt9pqVealtmEwmIzo6mlWrVrFkyRK0Wi12u53GxkZKS0sZGxvzewDlg3j8uQsWLGB2dpaJiQmfrdgeDAYDcXFxkvXzQaxWK11dXbS1tTE9PY0oimRkZJCamkpYWBh6vZ7MzEzJ1eANFAoFS5YsYeHChej1evbu3UtLS8uHfp8gCCiVSlJSUoiMjPT7rkej0ZCZmYnJZJKybvr6+hgZGfH6dc/NzaW4uJglS5YwMTFBVVUVZWVlV+3SUCqVJCUlsXDhQkJDQ2lubqayspKhoSGv+FSVSiUmk4mVK1cSHh6O2+1mYmICm82GwWBAr9cTFhaGTqdDr9fjdrvZt28fTqfTq26aOQukNTY20tjYeMnXgoODSUxMRKlU0tjYSE5ODmq1Wnrd4XAwNTWFxWJhzZo1qFQq7HY7fX19XotkXgqVSkVhYSE333wzBQUFyOVyhoeHOXbsGNu3b8dut/tMlqslLCyMuLg4EhISaGlpwW63+/ScCYJAXFwcubm5xMTEoFAoEEURt9st/XdgYIAdO3Zw9OhRScktX76chx56iKSkJARBYP369TidTmpqaub8hhcEgaCgIDZv3kx8fDw9PT08++yzVxW4kcvlhISEkJaWRkREhN+LOrRarbS4iaKI0+mksrLSa/7Q89mwYQMbN26kuLiYqqoqqqurqa2tvar3egJwubm5mM1mxsfH2bt3L0ePHvWaP1+r1UpGlFarlXLEh4eHSUtLIykpCaVSiVqtltItn3nmGWZnZ5mamvKKTOCjlLHx8XFqamqAc66E0
},
"metadata": {
"needs_background": "light"
}
},
{
"output_type": "display_data",
"data": {
"text/plain": "<Figure size 432x288 with 1 Axes>",
"image/svg+xml": "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"no\"?>\r\n<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.1//EN\"\r\n \"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\">\r\n<!-- Created with matplotlib (https://matplotlib.org/) -->\r\n<svg height=\"183.183471pt\" version=\"1.1\" viewBox=\"0 0 349.2 183.183471\" width=\"349.2pt\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\">\r\n <metadata>\r\n <rdf:RDF xmlns:cc=\"http://creativecommons.org/ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\">\r\n <cc:Work>\r\n <dc:type rdf:resource=\"http://purl.org/dc/dcmitype/StillImage\"/>\r\n <dc:date>2021-02-23T14:04:37.557100</dc:date>\r\n <dc:format>image/svg+xml</dc:format>\r\n <dc:creator>\r\n <cc:Agent>\r\n <dc:title>Matplotlib v3.3.4, https://matplotlib.org/</dc:title>\r\n </cc:Agent>\r\n </dc:creator>\r\n </cc:Work>\r\n </rdf:RDF>\r\n </metadata>\r\n <defs>\r\n <style type=\"text/css\">*{stroke-linecap:butt;stroke-linejoin:round;}</style>\r\n </defs>\r\n <g id=\"figure_1\">\r\n <g id=\"patch_1\">\r\n <path d=\"M 0 183.183471 \r\nL 349.2 183.183471 \r\nL 349.2 0 \r\nL 0 0 \r\nz\r\n\" style=\"fill:none;\"/>\r\n </g>\r\n <g id=\"axes_1\">\r\n <g clip-path=\"url(#pb6a78cd7a8)\">\r\n <image height=\"169\" id=\"imaged70f2f6057\" transform=\"scale(1 -1)translate(0 -169)\" width=\"335\" x=\"7.2\" xlink:href=\"data:image/png;base64,\r\niVBORw0KGgoAAAANSUhEUgAAAU8AAACpCAYAAACrp7/ZAACMLUlEQVR4nO39V5Cc6XUejj+dc85pYk8EMMgZ2OUGkituoEWKlGSxqKLLli2pZJev7FK5dGOrdOFwY7n8K7FKpExZshiWYlhyl+QG7iKHRZiAyaGnezrnnP8X+J+zPdhdLIDp7hlS/VShsMAC6Le/733Pe8JzniMA0EQPPfTQQw+PBeFuL6CHHnro4VcR4t1eQA+fDJFIBLFYDLH4/uuqVquo1+toNptoNBq7vLoeeviniZ7x3OMQCoXQaDQwmUywWq1oNpsIBoNIp9OoVCoolUo9A9pDD7uAnvHc42g2m6jX6+jr68PLL7+Mo0ePYmFhAdPT0/jhD3+IUCiESqWCZnNvpK4FAgH/915ZUw89dAJdM55CoZAPVqPR6B2sR0Sz2UStVkOtVkO9XofFYkEwGIREItlTRlMoFEIqlWJoaAhCoRCVSgVbW1solUqcYuhh70MgEEAkEkEqlcJgMEClUkEmk6HRaGBzcxPFYhG1Wq33PtEl4ykSiaBWqyESidBoNJDP51Gv13vh5iOiVqshm80iHA6jUqkgm80inU4jl8vtmWcoEomgUqlw9uxZSKVS5HI5XLp0CaFQiA9ctyEQCCAUCiEUflAXpYt7rzy3vQahUAiZTAatVouxsTG43W7o9XrU63VcuHABkUgE6XT6VzJdRM5buwx/x42nSCSC0+nEF77wBVitVkSjUXzve99DIpFgr6SToAMkEAj4R7PZ3PYDwId+3kuo1+uoVCooFAooFouIRCIIh8PbCke7CYFAAIlEAovFgq9+9atwOByoVCp444038P3vfx+Li4uIRCJdOWz0viUSCRQKBdxuNzQaDeRyOWq1GkKhENLpNOeM96IBoO9ABcJGo4FqtdqVzxYKhVCpVHC5XPjsZz+Lo0ePYmBgACqVCltbW7h9+zbee+89/OQnP0Emk9mzz/BBKBQKSKVSSKVSpNNp1Gq1Ha+7o8aTDpXX68XBgwdhs9mwsbEBqVTKRqxTnyuXy+FyudDX14fjx49Dr9dDKpVCJBKhUqkgGo0iHo8jGAwiEokgHo8jlUohl8vtCYPUimaziVKphHQ6jWazCalUCplMttvLYrRePCKRCDKZDFKpFBaLBTKZbFvKppMQCASQSqWYmJjA+Pg4zp49C6fTyaGnQCBAPp9HOp1GKBTCa6+9huXlZUQiEZRKpV1/57Rv9Xo9Dh06hOHhYeTzeQQCAVy6dAnFYrHjzkaj0UCxWEQsFsPS0hJsNht0Oh00Gg2cTicUCgX6+vowOjqKX/7yl1hcXITf7++acX9cCIVC6PV6/NEf/REsFguSyST++q//GrFYDMVicUfvvOOep1gsRn9/PxwOB3Q6HQKBAIfsndislK+ZmJjA/v37MTY2hlOnTsFoNEIqlXI+LhqNIhaLIRAIIBAIwO/3IxAIYHl5Gel0mr26vYBms4lyuYxMJgOJRAKlUgmFQrHby9oG8o7q9TobMaPRyN5TNyAQCKDVarFv3z6cOnUKzz77LEwmE6RSKVO9arUaisUiotEoKpUKbDYblpaWsLi4yOmk3TKiAoEACoUCDocDTz31FEZGRhCLxTA3N4dr16515QJqNBqoVCrIZDJYXl6GXq9Ho9GAUCiEy+WCxWKBXq+HWq2GUCiE0WjE9evX4fP5UK1Wd/0CehBSqRTDw8N46qmn4HA4EAwG8fd///cQCoU7XmtXPM+JiQnodDpUq1Vsbm6yu9/uBy0QCCCTyWC1WvHVr34VZ86cwcDAANRqNcRi8bacx9jYGOr1OqrVKpLJJDY2NrCwsIBXX30Vs7OzSCaTKBaLeyYkKZVKiMfjUKvVMBqN0Ol0u72kbajX68jn83ybkxEgj6/TEAgEEIvFcLlcePrpp3H27Fm43W6OcBqNBur1OkQiETQaDbRaLf7oj/4IKysruHXrFv7X//pfWFpaQqFQ2JX8LHB/X+r1eni9Xnzxi1+EUqnE2toacrncthRTp9dQrVaRzWZx69YtbG5u4ubNmwgGg/jc5z4Hm80GlUqFAwcOYHh4GMvLy/jOd76Db33rW+x07CWoVCq88MILGB4e5ou0Uqm0xTHqqPFsNpvsDYjFYsTjcczOznYs/JBKpRgdHcXzzz+PF198ERaLBQqF4kNFAwCcU5JKpZDL5TCbzZicnMTJkyfx5ptv4vLly/jlL3+JRCKxJ9gB+XweoVAIAoEAXq8X8Xi8K0bpUVGr1ZBOp7G0tASr1QqTyQSHwwG9Xg+5XN7x5ycUCiGXy3Ho0CG4XC6oVCrUajWEw2FsbGxgc3MT6XQa/f39cLvd8Hq9UKlUGB4ehs1mg9frxX//7/8d09PT2NjY2JWogzxPnU4HuVwO4P57j0QiXTXotN9zuRxKpRKi0ShWV1dx4cIFjI2N4cCBA/j0pz8NjUaDiYkJ/Pt//+8xODiIH//4x7h48SLy+XzX1vowUBrk2LFjUKvVfInm8/m2GPmOe56U/K7X6ygUCh0tHKhUKgwMDOD06dMwmUyQy+UQiUQAPlwQIsNDHotIJIJEIoFYLMaZM2fY6
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAAC3CAYAAACxII3nAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAACLeElEQVR4nO29V4yc6ZUe/FTOuaqrqqs658ycOVnSWHlWu/IGy+s1bNiAvYBh+MLwhe9s+MaADXixsPdi/5VWG2a1WmtGYUaaGU0gOcxks5sdq1N1deWcc/0X9DnzNUVyQldVc6R6AEIih+x66/ve97wnPOc5okajgQ466KCDDtoD8WEvoIMOOujgNwkdo9tBBx100EZ0jG4HHXTQQRvRMboddNBBB21Ex+h20EEHHbQRHaPbQQcddNBGSJ/0H0UiUYdP1kEHHXTwKdFoNESP+28dT7eDDjrooI3oGN0OOuiggzaiY3Q76KCDDtqIjtHtoIMOOmgjOkb3CRCJRBCJHpsP76CDDjr41Hgie+E3AWKxGDKZDBKJBGKxmP+MDG61Wt33q4NPDnqGYrEYYrEY9Xod9XodjUYDHaGlDn5T8RtrdCUSCWQyGbRaLZ555hm43W5YrVZUKhWYTCaoVCpIJBIsLS1hfX0d29vb8Hg8qFQqqNfrh738px4ikQhyuRxmsxlGoxF2ux3RaBTJZBLpdBq5XI4N8OcJwsjn87b2Dp4OiJ60cX4deboikQgqlQp2ux09PT04deoUzp8/D6vVCo1Gw55ZvV5HqVRCLBZDNBrFzs4OfvCDH2BzcxP5fP6pMrwSiQRKpRJHjhzB0NAQGo0Gbty4gc3NTZTL5bavRyQSQSKRwGw24+LFi5icnMTw8DB8Ph9WVlawvLyMlZUVFItF1Gq1QzNeIpHoE3+2WCyGQqGASqWCWq1GsVhEsVhELpc7lPVLpVJYrVZ0d3fDYrFgY2MDkUgEmUym7Wv5POHhdGGr3t2TeLq/UZ4uGQODwYDBwUFMTk7i7NmzmJqaglqthlQqRaPRQDabRblchkgkgtPphMlkglarxa1btxAMBlEqlZ4qo0se5dmzZzE6OopCoYCtrS14vV5UKpW2GwVK2TgcDkxPT+PYsWPo7++HxWJBvV5HMpnE9vY2KpUKarVaW9dGl6pCoQAATht93PuUy+UwmUyw2+2wWq3w+XyIRqPI5/OHYnSVSiV6enpw8uRJdHd3QyqVQiQSoVAotDwNJhaLIZfLIZfL+XPpXdZqNVSr1X0ppEelkyj1RP+eLkC6hB/+9weF8POkUimnEkulEmq1WlvP82+c0ZXJZOju7saJEydw/PhxzM7OQq/Xo1KpIJ1Oo1AoYGdnB6VSCWq1GhaLBTKZDGazGTMzM1haWkI2m+WN9TRAp9NhcHAQ3/zmN6HX67GzswOVSsWbq92GTSqVQqPRYGZmBseOHcPMzAzUajVEIhEikQh2d3eh0WiQy+U+lbfZDIjFYqhUKthsNohEImQyGWQyGZRKpceuQyQSQavVor+/H0ePHsXw8DDeeustFAoFxOPxtq2dIBaLYTQacezYMfzu7/4unE4nG5FgMMi581aA0nIWiwV2ux0GgwESiQTxeBzZbBa5XA65XA7lcpnTR0JDCnxkAMkBkslkbLiLxSKq1SpqtRrK5TIajUZT9gg5Amq1GkajETKZDPV6HZFIBPl8/hNdvM3CgY2usML/8IN5WowSodFooFqtIpPJIB6PIxKJIBwOY2dnBzdv3sTly5ext7eHSqUCtVoNt9uNs2fPwmQyAQDi8Tjq9fpTxWgQiUSw2+04ffo0ent7+eYuFAqHErrTgRwZGcE3v/lNTE5OwmKxQCKRoFwuo6+vD5lMBsvLy8hkMm1Nf4hEIn6vzzzzDHvcOzs7iEQij72cxGIxbDYbZmZm8IUvfAFSqRRXr16FSCQ6tIinp6cHw8PDGBkZ4cJvpVJhY9cKSKVS6HQ6dHd348UXX0R/fz8MBgOy2Sxu374Nr9eLvb09AA+eGUVZUukDM0O2QqvVQqPRwGQyYWpqiqNLj8eDaDSKYrGIcrnMe+Oge1gkEkGpVKK3txfPP/88XnrpJSiVShSLRdy8eRNvvPEGdnd3kUgk2lIsP5DRJc9Rq9XC5XJBo9FwyEE50Xw+j2QyiXK5zF+IbpV6vc6FKWFVu5lhxcOg8Nbj8aDRaLC3sri4iHv37iGVSgEA1Go1SqUSnE4nstksRCIRvF7voRmzJ4Gev1KpRDKZRCwWQzAYbHtqQSQSQa/XY2xsDC+++CImJiZgNBr50FHE0NfXh6mpKQQCARQKBRQKhbasTywWQ6/Xw+FwYGRkBB6PB36//4nvkwyF0+nkVJPf70cqlUI2m23Luh+FoaEhuN1uqNVq5HI5+P1+7O3ttSSqoZSMUqmE3W7HzMwM5ubmoNfrUavVEAqFkE6n+RcZTXquQhYL2Yuuri709PSgr68P5XKZ9y2l9prJcpFIJHA6nZiamsLzzz+PyclJyGQyVCoVqFQqZDIZzM/PY2FhAalUquUX6YGNrlwuh9FoxMzMDOx2O4xGI5RKJarVKvL5PFKpFILBIIccjUZjXwhRLBZRqVQ4t0YFinK5/MSQ77Og0WigXq8jk8lge3sbhUIBuVyOvd29vT3U63WIxWJUq1VIpVIOHxuNBnZ3d5HP59serj8JIpEIGo0GTqcTUqkU6XQakUgE0Wi07SkQsVgMu92OqakpXLx4EW63m0PHWq0GsVgMnU4Hl8uF2dlZrKys8L5oxzOVSCSwWq182AOBAHuIT3pOEokEPT09cDqdUKlUCAaDiMVih1ZEE4vFGB4ehsvlgkwmQ6FQgN/vRyAQaLrBIGMpkUig1+vhcrkwPT2NwcFB5HI5hEIh7Ozs8DNJp9P7vO1Go8GUQeDBszSZTOju7sbIyAhcLhcymQwajQaUSiX/vWbtB0onDQ0NYW5uDidOnIDJZOJiuUajQSQSQbVahd/vRzabbTml8UBGlxLqer0eR44cwdzcHAYGBqDX6/mhU+hDuR0yqsLEe6VSQS6XQywWw8rKCu7fv4/V1VUsLy833atsNBrMSqjValAqlZzLBT4qtqlUKlitVphMJshkMqTTaQQCAc7/PC2QSCSwWCwYGxtDo9GA1+uFx+NBoVBou8FVKpV44YUX8MILL2BmZgYKhYJzc5T2UCqVcDqd+OIXv4h6vY5Lly7hZz/7GdLpdEvXKxaLoVarcfr0aZw9exYDAwO4ffs26vU6OwOPAuUdL168iImJCUilUly5cgWBQADFYrFl630c6HucOXMGIyMjqNfr8Hq9iEajyOVyLfk8qVQKtVqNsbExHDt2DKdOnYJSqcTNmzdx/fp1vPvuu0in0xzBPvwshbZAqVRidnYWs7OzmJmZgUqlgsfjQTweRyAQQDQaRaFQaEqURoWzwcFBfPvb38a5c+c4l09GndYDAJlMhp9jpVI50Gc/CQcyuvV6Hfl8HoFAAFevXuVbymq1csMBNR0IK4QSiQRyuZxvG/pZbrcbY2NjmJqawvz8PIdxzTZyZHhTqRS2trbQaDQ41BCLxRgbG8PQ0BBmZ2fhcDiwvLzMBvdpSy2oVCrodDro9XqUSiX4fD54vd62rpMiHpfLhbNnz2J8fBwKhQLVahXJZBLZbBaZTAYajQZqtZqLGefPn4fRaIRcLsdrr73GBcpWrE8qlWJ2dhYnTpzAyMgIcrkclpaW4Pf7H5tXFolEUCgU6O3tRV9fH/R6PZLJJNbW1lp+STwOCoUC/f396OrqglqtRrlcxu3btxGPx5seLQgr/mq1Gna7naPZdDqNnZ0deDweZLNZNpKPeybkcQ4PD+Ps2bMYGxuDzWaD3+/H2toa7ty5g2Aw2NSillgshkajwfPPP4/x8XFYrVaIRCLkcjl2/CQSCUQiEQwGA/r7+2EymTjqbtX7PZDRpcIUJcHlcjni8Tjn8YRVSsrdSiQSKBQKKBQKKJVKfqE6nQ42mw12ux3VahXpdJrzLc0GrbtUKiGdTkMmk0EsFsNgMMBsNuPo0aMYGRnB4OAgh+qBQOCpbIwwmUzQ6/WQSqUoF
},
"metadata": {
"needs_background": "light"
}
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "HoJLGFZSo7ON",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 500
},
"outputId": "ecd6cfb0-7794-4777-f4e6-dd697da97096"
},
"source": [
"# GAN Tests\n",
"model_G = torch.jit.load('./GAN_G_model.pth')\n",
"model_D = torch.jit.load('./GAN_D_model.pth')\n",
"[model.eval() for model in (model_G, model_D)] \n",
"\n",
"# Check that GAN doesn't have too many parameters\n",
"num_param = sum(p.numel() for p in [*model_G.parameters(),*model_D.parameters()])\n",
"\n",
"print(f\"Number of Parameters is {num_param} which is\", \"ok\" if num_param<25E+6 else \"not ok\")\n",
"\n",
"# visualize the generated images\n",
"generated = model_G(*gan_input).cpu()\n",
"generated = make_grid(gan_denorm(generated)[:100].detach(), nrow=10, padding=2, normalize=False, \n",
" range=None, scale_each=False, pad_value=0)\n",
"plt.figure(figsize=(8,8))\n",
"plt.axis('off')\n",
"show(generated)"
],
"execution_count": 10,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Number of Parameters is 6433512 which is ok\n"
]
},
{
"output_type": "execute_result",
"data": {
"text/plain": [
"<matplotlib.image.AxesImage at 0x287d10a3688>"
]
},
"metadata": {},
"execution_count": 10
},
{
"output_type": "display_data",
"data": {
"text/plain": "<Figure size 576x576 with 1 Axes>",
"image/svg+xml": "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"no\"?>\r\n<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.1//EN\"\r\n \"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\">\r\n<!-- Created with matplotlib (https://matplotlib.org/) -->\r\n<svg height=\"449.28pt\" version=\"1.1\" viewBox=\"0 0 449.28 449.28\" width=\"449.28pt\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\">\r\n <metadata>\r\n <rdf:RDF xmlns:cc=\"http://creativecommons.org/ns#\" xmlns:dc=\"http://purl.org/dc/elements/1.1/\" xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\">\r\n <cc:Work>\r\n <dc:type rdf:resource=\"http://purl.org/dc/dcmitype/StillImage\"/>\r\n <dc:date>2021-02-23T14:05:35.093015</dc:date>\r\n <dc:format>image/svg+xml</dc:format>\r\n <dc:creator>\r\n <cc:Agent>\r\n <dc:title>Matplotlib v3.3.4, https://matplotlib.org/</dc:title>\r\n </cc:Agent>\r\n </dc:creator>\r\n </cc:Work>\r\n </rdf:RDF>\r\n </metadata>\r\n <defs>\r\n <style type=\"text/css\">*{stroke-linecap:butt;stroke-linejoin:round;}</style>\r\n </defs>\r\n <g id=\"figure_1\">\r\n <g id=\"patch_1\">\r\n <path d=\"M 0 449.28 \r\nL 449.28 449.28 \r\nL 449.28 0 \r\nL 0 0 \r\nz\r\n\" style=\"fill:none;\"/>\r\n </g>\r\n <g id=\"axes_1\">\r\n <g clip-path=\"url(#p25c0f140d8)\">\r\n <image height=\"435\" id=\"image3723462847\" transform=\"scale(1 -1)translate(0 -435)\" width=\"435\" x=\"7.2\" xlink:href=\"data:image/png;base64,\r\niVBORw0KGgoAAAANSUhEUgAAAbMAAAGzCAYAAACl7fmHAAEAAElEQVR4nOz9Sa+t2ZamCT1jVl+x1trFsWPFveE3PJzwzIhQAhkoUYJoISToIEEjER0a8A/4GbQQEuJH0AMpQXRSIMhEdEAkEUEWER6R7n79ml2zU+2911pfMUsaY651zH8CUmzp6No1O2eftb9vzlG8433fIUDjX3/9669//fWvv/7117/++v/jLwfwP/jv/3f4N//NPwNAaLRSqaXSqAgAjdYqpTZygVwyKWdKzpRaoYK1FmsNRr8DIqK/jGANtJ4zRfrf0SqtNahCEwEEaNBuubWBVCoNaiO3xv/tP/onWBz/w//uf5tWGoJg+icUEYxz/fsDTaCVX33Pov9YG2IEEf2kIg1pjVYrKUZy/9lruWX5/jeIft5Gu31SxBiMsbQGIo3Wf14x8Jc/f+B/9x/8h/xP/uf/Hsnv7HkDEaQ5aunPuFZKzdzriSZQwWBwYmlVKLlgjTAOgSGMGCOU1ogpseUMCMboL2kgVFpptNwgC1IN1nhsszixOOewzpLrxnQIPD+9YxhO/C//F/9rfvv3Zn73548gBWsFWwXTDCL63JoR/bnFIEaoQEP0uTT9KWortAbGGFqDUvXNWxGst/pajSO4gLf6/9OeybmAQCmVInr2Go3az5YzjlYgpcpf/LMfyQv8z/6n/2NKLfez1SrU2p+j0M+gfn5aP3O10mqhlkKrhdYStab7WWk0mjRAEGn6bqzBiKEWiLHQmjBOE+M4I2LIOVNroVR9pzkXcinUWvj48YV//9//v/Dv/Y/+G4wPA6VVmjT9rK3qZ7ud/4beiVb1768gTRA8EIh7ZlsipIiXgpWCMQ2cIF5u3wLvLOMQcIPHiNAQahViKjRjoBlak34ZhVYrUgHj+T/9H/4ffPv9O/7RP/4v6RmwesduN1SEfn/0l/7MhVIKuWSoTZ95AyrUXPrPpGenUfW/9XME+n62dWfdNjCG+TDjfYBmKLVRW+v3DcTqHcm18Zd/8Tf81b/4a/57/+6/jRGNA6Vmcq6kUki5knIhlULVS6p3tzXon7mUSgWqGAr691ELB2/4/mnmu29OHKZRf5baoGocqDRyrZSG3oPWSFnPfhOLiMEZQwieEBxbyfxv/4//If/uP/4HfP/uEWP02TpjMMZgreCd0XtsQBD9uXuc1GfcqE3u4bG2pveyCSJgpMcBDVgYEcQKxhjE3KK73gturx/p8bFCa0iD1OA/+L//M3LL/Fv/1m8xrmG83F4amH4ejOk5wyAi9/esD0HPeOv3yhqDtQaLoaZKWhM0gwseNzgqhVQzuVZyaWR91Br/+2dsNIwxeG/xzmIQUm78/IdX/ul//K80mf3ud9/xD//hn/VgXyh7IeekAaXUHqD0glVED0jO5JyoteDEEUJgGII+ODSY6S/BGvmaoMwtGRRq6RGwP2QNiq1fiEqTSm2V1jSB/pN/9i8JjPw3/2v/Nq3pyzIY/f1Gg570Qy216YNsFWoBKvKri9Fq07+1FqQVTSwp6+Eu7R6EaQY9PxoIMV9bWf353D3JYQzGGoy1zPNf8r+X/4h/+N/6++RxI+YNYyy1OEoCyY0KlJpoNVNrRarBFIvF4I2h5kZcEsbA8XRgng4006i16WUtVZO4MThnGZzHGkOLhRqFukONhZYNkg3BOLwPiDHs6cLD48y3v/2B08OR/83/KvDtn4z8+T9+pJWKx+KyQaqGsSaAhWaNHuIGpQr94VD7s60USk9mtepzBHDW4gfXL64n2AHvHA2IWyTtmYYQY6K0TJNKqYVaBG+9BrcKey58+OkLuxH+nf/6f4U97uS8a2GVG60KIhaxFmMNznh9T2iiqSXRSu6JLNPI1Bqh5Xsh1O4VUU+UvfiJqbEvhZxhOh6YDweMCNu26ecohVb0/eTaqDVxODgA/o3/8g+cvjlRit6DWjQJaHKwPdBqNqo1axK7P9+AqSOX8871dcXlyMkXmonklmlekNEiAqVVrLXM48A4Dlhne8HlSEkTM2KhmZ70hVorrYJxhv/r//n/xXe/eeIf/zt/jhWDtbaXLCBNC8FbTKMns1YrJWdyybRa0TBj9GfMRe9a6+eIW6DTIFxLo7TK2/nK5brgguP5+YkQRlo15FwpTc+WsYKxmlpTrVzervz4X/yBf/C732qmo5JyIqXCnjJbzOwpkUrVzykaY2qplJIpSYuOAmQxVLHU2jCt8m6y/P0fnvjT3zxymCek9kTXC45SG7FkYq2UXkQt66ZxyTqMtQTvmaaRMAa2tCEi/Oa7Z/7en/ygxb8TnAgGMFYITgO+60mtaTbQ89KfWa09krZ+52ovvDC9qNWkdmskjLUYY8D0auSehOR+3jX6atKU/vP9P//Jv2RrO3/25+8QC85bMF+TYBODvSczvR+tn9dbc1BKpRUtUp01eGewxlL3xv6WoBjCNOKPliSRvSRiruTcSAVyzwetn7daK8YYhsEzDQEnhtxzyD/9j3tndj6/8enjz9QCOSfinshZK8sYI6VXzc45rHWUph+01oLQmMJIq5nWimZMo5WsMYIRQxEQ6f/eAqZRb3lM6++/V
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAcEAAAHBCAYAAAARuwDoAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9ya9tW5beh/1mtapdnPre+96LeJGRycykSSUpypZg2YYbAgzZFOCe/wX/CW4Y7hlwS7BgqCn/A3bHHQPuGDBMwoJA0iLMTDGryChedatT7WoVs3RjzH3uS8FmGswEBIN3RdyI+945Z5+915pzjjG+8X3fUKUUPl+fr8/X5+vz9fn6N/HS/02/gc/X5+vz9fn6fH2+/pu6PgfBz9fn6/P1+fp8/Rt7fQ6Cn6/P1+fr8/X5+jf2+hwEP1+fr8/X5+vz9W/s9TkIfr4+X5+vz9fn69/Y63MQ/Hx9vj5fn6/P17+xl/1XfVEp9Vk/8fn6fH2+Pl+fr/+/vkop6v/b1/6VQRDgf/gf/Ef89OvfJedIiB6tC8s8ogpc37zi5uYGZQyzD+ScQQFKAQqtNCiF1gpnDNYYtFEoraAUUAql5O8KhaJAUfUlFChFKQUfIiknFJBRaKXIGXxOeO857p74R//X/zOm6fmP/mf/c4zWkCEl+Zl13+CcwmqDolAKlFLIuZAVFKVIqRB8QJEYWktrLVopiiqUnIgpI+8sQUlSQuuEKrWcrjW10vWT5IJ6KbQ1SmnqzeHP/uJP+M/+s/+U/8n/9D/AtZZCoZRMyvI7jDUYpcnJM86BlBVaOUpWpJQJPhKCfH8ukHKSO6M1Xe9wjaEAOStCAOcsKs9MxwPTvGCalmG9oW1aSk6oklEkSsnklPBhIeeEsRqnDf/0P/8j/tZvf8nPfus1aE3JhRATMRWUAmcM5AwpUYqioAFNzpqYMjkXSiloVaAUckpoo2msxqpEjhFfHEkNGDfgTKH4PcFPpBgoKVO05vbmhle31ygyP7x9z9P+iCqZwTmcc3Su4b/69XcE0/G/+F/+r8g5ylpU1JUDhfOzz2QyyFuSz04i50IqiZwzISb205HTPFG0Zt2vsNpitUYr+aOUPGVFQVFIOROiJ8VAjAEfRorSFGPR2tE0a4xt0Mpw/8N7/vf/yX/Mz3/751jrZO1o2as/zj6N1p/+XS6kHM87m5yL7JX6E0oh+7B+2lJK/ZI6Hwb168i6y4WifnSbzpdSqCLrVSmFVfDd9z/wu3/rp/wP/rt/j6urSy4urmmHS7TrMcaCsugiaz1T0FqBVi+vm0uhfjzZH38px67vV8nnKhmKku87v9fz5yjUvajk85Qi31dyJhV5zv/oH/8/+Mf/93/Ef/y//U/Q5E9nDaBfzhZ55Vz3sNK67tDzGSavH1PieX/g4WlHVobXt9cYq6FkcslQkvw9yzqSvVfIpZBSIWXIJZHr8ypFfmepv+d0PPG/+9/8r1ltNrimRRuNPPLCj8E6rbWsV4q8y1Lfs5LVp+u5qhU42+BcDyh88MTkoUDOUdYECqXqveD8nDWpnNdMopBln6Qo90hplIL90yPXb77i7/yD/z5+GVnmPTlFNJr1MPDmzRvubu+wbc8cFbHIDgm5MM6RmAqNNfStobEGq3R9Dlk+m5LzXx5BkT8vX/vRuarkPeciZ10qhRKz/D0VSkp884t/wf/t//J/5F91/ZVB8Cdf/x6//3f/Pt6fOJ0OpBIYTwcM8PXXv81Pv/45ru+ZFk9KSW6UkY2glaFQMFrTGIO1Bms0aAkWWsv3qRpeeNmwBY1GK0gkFu+JOUAucqgmSKmwpMA0TTx8fMd/+U/+Me1wzb/33/sfY5UmAzFGrILtymGNBE/ZarluSsgKMLLg59MJ4szF0OFcg0aRSKQU66bVGDIqJzn0dMacF5JR9SCR18o5U4psPK1M3YAKg2G9WoFSfPH1HV3Too1BKwkYSilc43BWQ44cjjNLLIClREVOsPjEsgSWxRNzJqZILhFlYLvtWK1arLPEDPv9gjUGVRZO+5bTOOK6ge3lFV3TkUNE64TWmZIzMUa8XygloZ3CWIX5J4ar6xVf//SVPLOiSRlilEPAWo2KEsxSKuSsZWkpC2hSgsUvlByhZFKMmMay6Rwrm8khMOaemTXKDTQmg38iLydSWkgh4TPcvbrhZz/9CqMzJUasNuhS2LQt1jWsupZvPzwS+w3/o//wPyQmT6pBopDq5pbgl7ME45Ll3ycSqQRC8vjkySXjY2Y/7ngeT6Si2ay2tLbBKoPVBlMPTTkta2JUFDFFUgrEuDD7E0UXkjJgGtr2Ems6rHa8/dV3AGwvLxn6HrRGaQBFLqUeBJ+CSCnIYVDfc6kHbf0q50BYSn75gVIKOdV7cE7+Sq7/LwGx1ACuAG3qWlVagmCugcMU9Lv3rNcNr99subvbcHtzw7C9o11d0ndb2m6NUQ5dg6BC4VqLtRaFqQkinMOtNQpVD/acS/3X5yRV1SBf3+v5I9bAfg78WbIaOaxzlm/S8OHDB/7ZP/2n/MN/+A/R6tPPKM6Hq5IkoD4zCQhyyJ8TdDkjMuM488d/9mf84R/9S2zb8+/+u/8Ol5cXkiQXSR4lgZQgnIskQznXAJglUU3pnLgWcpHfr5Rmt3vmP9UK1/R0XY+xilo/ICeK/lFQOD/HLGenUihtAAmc54Kjbzq6dkWmME0T8zISYpT7VLIkcbUooCZ01GIgZUn0IZOSJISSjCjQoM2OfnXJF1//Ltkf8f5AzhmrDdfbDT//+id88eUX6H7LIVpSdmhjCKVwGD0xQttYVq2h1QqNliKgZFBF3pdBEqZzUCaTKaAzqhg5a+uZmimkpD/tiQTeR1TM6BL/qhD3VwdBqxUqRcI4cdztOU4HxvFI2zRcXZ8Y55neOaTyMy+V0DncSFaXyVqRMvIha5WYc0Hp8qny45zZFbSSCsUHzzzPxLQQg2eeFhbvSTHho2ccR54fPuLnkWG4oS0ag6ZoUNpQciT4SLE1E1TyHs43UCmF1pC9Zz49kZYjg71FaQg+Mo0TPgW6tqVpLFaBLDmpd/I5KcnyGcrLx9YvG7soqZhkIxZICQqMxxOpTRhjMcqSUsa5BqsVRWm0sjjb4FPE+0QKpf7mTC6RlCMheFLOkunpSCmWthkYhp6YC9M04peJ6BdC8qALWidyWlimRIoBa8BYOYhSSpKgYEHJgQmFHCNxWTDaYjBYZSRJyIriEzlFYkikmEkRUklYm3GuxShFSYnFB3IKqJTQqtBvB7Zdy3Sa8R4okZgmUkywLLjosSqjjWFJmcM487jbYbVif5hYJo/VhkkXVPLEUvAxoik1oAViqgdjyZSSgEShZuS5kFMkFU8skVgWfJpYgiemSEgJH2cKMynDtHhK7nG6IWuD1poYAykFSskYNFY3KGXlGZExRoEpaHWuNCdiKWQiIS6ydoxBO1sBFPVS3ela7SgEEZEDS9V1Xcg6o3KtZv/SrtU1YspiVKp8CpFaKrySMtRKOGc5ZKzWGGMw1tbipsjBI99JAfb7I3/xy1/z/kPPavOOtl8xbC64vHjFdn1J43pJfougQv3QMQxrmm6gbzt0vW/WOkw/YLV5Ce4SnFU9A2o1W5MAVXgJ3ufXBsipBk95EamdiiR0CtD6Jba+BNGcz8Gvfr/S9cSV31mKHPqlFEKMPD7c84f/r3/Bf/5f/Bes1wO3ly2//3v/LdbrNfXQ+pTAZyAXVFEoKVxQCow2KLSsg5JJNbkoSsIxnAuD8+tpicUA+lz5qZcPc04nSlHy+2ragVI0xjK0jq7RhBQIOkv9nBIpZ7lV+owWgFJZgpsyL4jG+RwuuaBf1let1IESPHnaoUqkMwblHNY6nGugZFRcYB4Js6LYgabrcRqSKywUrE4ApKxkX9Tkh5rwaGr+VaDU5EaQAYWm1HulXpI7SoEsFbkio/IiCGP+GwiCvbWsXMeCo4TM8/0j+8Mzm82GcRo5niawUs0YVautc7A530gg5yQfpGSMlqgtwSjXQKRfFnrOBVUSISzsnh95fHrgeNwxjyPzNBH8TAiBGGe8X5jHiXk8o
},
"metadata": {
"needs_background": "light"
}
}
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
]
}