object_detection.ipynb 841 KB
{"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"name":"object_detection.ipynb","provenance":[],"collapsed_sections":[],"toc_visible":true,"machine_shape":"hm","authorship_tag":"ABX9TyN7QVzgZeuKvTwxcckt6Px3"},"kernelspec":{"name":"python3","display_name":"Python 3"}},"cells":[{"cell_type":"code","metadata":{"id":"UalaVE2ossJE","colab_type":"code","outputId":"6e4236ee-785a-4ec6-d67c-2a41f917d665","executionInfo":{"status":"ok","timestamp":1592062126309,"user_tz":-540,"elapsed":24101,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":127}},"source":["# 드라이브 마운트\n","from google.colab import drive\n","drive.mount('/content/gdrive')"],"execution_count":1,"outputs":[{"output_type":"stream","text":["Go to this URL in a browser: https://accounts.google.com/o/oauth2/auth?client_id=947318989803-6bn6qk8qdgf4n4g3pfee6491hc0brc4i.apps.googleusercontent.com&redirect_uri=urn%3aietf%3awg%3aoauth%3a2.0%3aoob&response_type=code&scope=email%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdocs.test%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive.photos.readonly%20https%3a%2f%2fwww.googleapis.com%2fauth%2fpeopleapi.readonly\n","\n","Enter your authorization code:\n","··········\n","Mounted at /content/gdrive\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"SHfSGzBOP5Ug","colab_type":"text"},"source":["# 1단계. Tensorflow 설치"]},{"cell_type":"code","metadata":{"id":"EBXiBT8G8tQt","colab_type":"code","outputId":"d5613ab0-0f0e-49ab-bc76-2c1ff52cc48d","executionInfo":{"status":"ok","timestamp":1592062266924,"user_tz":-540,"elapsed":839,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":35}},"source":["%tensorflow_version 1.x"],"execution_count":1,"outputs":[{"output_type":"stream","text":["TensorFlow 1.x selected.\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"cRXDOxIWzlgi","colab_type":"code","outputId":"1ac71a76-576f-4550-dd84-7122c4071d11","executionInfo":{"status":"ok","timestamp":1592062365151,"user_tz":-540,"elapsed":97169,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":1000}},"source":["# 설치 방법: https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/installation.md\n","\n","%cd /content/gdrive/My Drive/capstone\n","\n","#!pip install git+https://github.com/google-research/tf-slim\n","\n","# Dependencies\n","!apt-get install -qq protobuf-compiler python-pil python-lxml python-tk\n","!pip install -q Cython contextlib2 pillow lxml matplotlib tf-slim\n","\n","# Download the tensorflow/models repository\n","!git clone --quiet https://github.com/tensorflow/models.git\n","\n","# COCO API installation\n","!pip install -q pycocotools\n","\n","# Protobuf Compilation\n","%cd /content/gdrive/My Drive/capstone/models/research\n","!protoc object_detection/protos/*.proto --python_out=.\n","\n","# Manual protobuf-compiler installation and usage\n","!wget -O protobuf.zip https://github.com/google/protobuf/releases/download/v3.0.0/protoc-3.0.0-linux-x86_64.zip\n","!unzip protobuf.zip\n","%cd /content/gdrive/My Drive/capstone/models/research\n","!protoc object_detection/protos/*.proto --python_out=.\n","\n","# Add Libraries to PYTHONPATH\n","import os\n","os.environ['PYTHONPATH'] += ':/content/gdrive/My Drive/capstone/models/research/:/content/gdrive/My Drive/capstone/models/research/slim/'\n","\n","!source ~/.bashrc\n","\n","# Testing the Installation\n","!python /content/gdrive/My\\ Drive/capstone/models/research/object_detection/builders/model_builder_tf1_test.py\n","#!python /content/gdrive/My\\ Drive/capstone/models/research/object_detection/builders/model_builder_test.py"],"execution_count":2,"outputs":[{"output_type":"stream","text":["/content/gdrive/My Drive/capstone\n","Selecting previously unselected package python-bs4.\n","(Reading database ... 144328 files and directories currently installed.)\n","Preparing to unpack .../0-python-bs4_4.6.0-1_all.deb ...\n","Unpacking python-bs4 (4.6.0-1) ...\n","Selecting previously unselected package python-pkg-resources.\n","Preparing to unpack .../1-python-pkg-resources_39.0.1-2_all.deb ...\n","Unpacking python-pkg-resources (39.0.1-2) ...\n","Selecting previously unselected package python-chardet.\n","Preparing to unpack .../2-python-chardet_3.0.4-1_all.deb ...\n","Unpacking python-chardet (3.0.4-1) ...\n","Selecting previously unselected package python-six.\n","Preparing to unpack .../3-python-six_1.11.0-2_all.deb ...\n","Unpacking python-six (1.11.0-2) ...\n","Selecting previously unselected package python-webencodings.\n","Preparing to unpack .../4-python-webencodings_0.5-2_all.deb ...\n","Unpacking python-webencodings (0.5-2) ...\n","Selecting previously unselected package python-html5lib.\n","Preparing to unpack .../5-python-html5lib_0.999999999-1_all.deb ...\n","Unpacking python-html5lib (0.999999999-1) ...\n","Selecting previously unselected package python-lxml:amd64.\n","Preparing to unpack .../6-python-lxml_4.2.1-1ubuntu0.1_amd64.deb ...\n","Unpacking python-lxml:amd64 (4.2.1-1ubuntu0.1) ...\n","Selecting previously unselected package python-olefile.\n","Preparing to unpack .../7-python-olefile_0.45.1-1_all.deb ...\n","Unpacking python-olefile (0.45.1-1) ...\n","Selecting previously unselected package python-pil:amd64.\n","Preparing to unpack .../8-python-pil_5.1.0-1ubuntu0.2_amd64.deb ...\n","Unpacking python-pil:amd64 (5.1.0-1ubuntu0.2) ...\n","Setting up python-pkg-resources (39.0.1-2) ...\n","Setting up python-six (1.11.0-2) ...\n","Setting up python-bs4 (4.6.0-1) ...\n","Setting up python-lxml:amd64 (4.2.1-1ubuntu0.1) ...\n","Setting up python-olefile (0.45.1-1) ...\n","Setting up python-pil:amd64 (5.1.0-1ubuntu0.2) ...\n","Setting up python-webencodings (0.5-2) ...\n","Setting up python-chardet (3.0.4-1) ...\n","Setting up python-html5lib (0.999999999-1) ...\n","Processing triggers for man-db (2.8.3-2ubuntu0.1) ...\n","fatal: destination path 'models' already exists and is not an empty directory.\n","/content/gdrive/My Drive/capstone/models/research\n","object_detection/protos/input_reader.proto: warning: Import object_detection/protos/image_resizer.proto but not used.\n","--2020-06-13 15:31:41--  https://github.com/google/protobuf/releases/download/v3.0.0/protoc-3.0.0-linux-x86_64.zip\n","Resolving github.com (github.com)... 140.82.112.4\n","Connecting to github.com (github.com)|140.82.112.4|:443... connected.\n","HTTP request sent, awaiting response... 301 Moved Permanently\n","Location: https://github.com/protocolbuffers/protobuf/releases/download/v3.0.0/protoc-3.0.0-linux-x86_64.zip [following]\n","--2020-06-13 15:31:41--  https://github.com/protocolbuffers/protobuf/releases/download/v3.0.0/protoc-3.0.0-linux-x86_64.zip\n","Reusing existing connection to github.com:443.\n","HTTP request sent, awaiting response... 302 Found\n","Location: https://github-production-release-asset-2e65be.s3.amazonaws.com/23357588/c692d808-54ca-11e6-90f6-ef943b0908bf?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAIWNJYAX4CSVEH53A%2F20200613%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20200613T153141Z&X-Amz-Expires=300&X-Amz-Signature=f0f1806620e74d8c89cd8cbbd323a01e2b801334b243e7f17c9b3d125fee85fc&X-Amz-SignedHeaders=host&actor_id=0&repo_id=23357588&response-content-disposition=attachment%3B%20filename%3Dprotoc-3.0.0-linux-x86_64.zip&response-content-type=application%2Foctet-stream [following]\n","--2020-06-13 15:31:41--  https://github-production-release-asset-2e65be.s3.amazonaws.com/23357588/c692d808-54ca-11e6-90f6-ef943b0908bf?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAIWNJYAX4CSVEH53A%2F20200613%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20200613T153141Z&X-Amz-Expires=300&X-Amz-Signature=f0f1806620e74d8c89cd8cbbd323a01e2b801334b243e7f17c9b3d125fee85fc&X-Amz-SignedHeaders=host&actor_id=0&repo_id=23357588&response-content-disposition=attachment%3B%20filename%3Dprotoc-3.0.0-linux-x86_64.zip&response-content-type=application%2Foctet-stream\n","Resolving github-production-release-asset-2e65be.s3.amazonaws.com (github-production-release-asset-2e65be.s3.amazonaws.com)... 54.231.115.43\n","Connecting to github-production-release-asset-2e65be.s3.amazonaws.com (github-production-release-asset-2e65be.s3.amazonaws.com)|54.231.115.43|:443... connected.\n","HTTP request sent, awaiting response... 200 OK\n","Length: 1296281 (1.2M) [application/octet-stream]\n","Saving to: ‘protobuf.zip’\n","\n","protobuf.zip        100%[===================>]   1.24M  --.-KB/s    in 0.09s   \n","\n","2020-06-13 15:31:41 (13.9 MB/s) - ‘protobuf.zip’ saved [1296281/1296281]\n","\n","Archive:  protobuf.zip\n","replace include/google/protobuf/struct.proto? [y]es, [n]o, [A]ll, [N]one, [r]ename: ㅁ\n","error:  invalid response [ㅁ]\n","replace include/google/protobuf/struct.proto? [y]es, [n]o, [A]ll, [N]one, [r]ename: A\n","  inflating: include/google/protobuf/struct.proto  \n","  inflating: include/google/protobuf/type.proto  \n","  inflating: include/google/protobuf/descriptor.proto  \n","  inflating: include/google/protobuf/api.proto  \n","  inflating: include/google/protobuf/empty.proto  \n","  inflating: include/google/protobuf/compiler/plugin.proto  \n","  inflating: include/google/protobuf/any.proto  \n","  inflating: include/google/protobuf/field_mask.proto  \n","  inflating: include/google/protobuf/wrappers.proto  \n","  inflating: include/google/protobuf/timestamp.proto  \n","  inflating: include/google/protobuf/duration.proto  \n","  inflating: include/google/protobuf/source_context.proto  \n","  inflating: bin/protoc              \n","  inflating: readme.txt              \n","/content/gdrive/My Drive/capstone/models/research\n","object_detection/protos/input_reader.proto: warning: Import object_detection/protos/image_resizer.proto but not used.\n","Running tests under Python 3.6.9: /usr/bin/python3\n","[ RUN      ] ModelBuilderTF1Test.test_create_experimental_model\n","[       OK ] ModelBuilderTF1Test.test_create_experimental_model\n","[ RUN      ] ModelBuilderTF1Test.test_create_faster_rcnn_from_config_with_crop_feature(True)\n","[       OK ] ModelBuilderTF1Test.test_create_faster_rcnn_from_config_with_crop_feature(True)\n","[ RUN      ] ModelBuilderTF1Test.test_create_faster_rcnn_from_config_with_crop_feature(False)\n","[       OK ] ModelBuilderTF1Test.test_create_faster_rcnn_from_config_with_crop_feature(False)\n","[ RUN      ] ModelBuilderTF1Test.test_create_faster_rcnn_model_from_config_with_example_miner\n","[       OK ] ModelBuilderTF1Test.test_create_faster_rcnn_model_from_config_with_example_miner\n","[ RUN      ] ModelBuilderTF1Test.test_create_faster_rcnn_models_from_config_faster_rcnn_with_matmul\n","[       OK ] ModelBuilderTF1Test.test_create_faster_rcnn_models_from_config_faster_rcnn_with_matmul\n","[ RUN      ] ModelBuilderTF1Test.test_create_faster_rcnn_models_from_config_faster_rcnn_without_matmul\n","[       OK ] ModelBuilderTF1Test.test_create_faster_rcnn_models_from_config_faster_rcnn_without_matmul\n","[ RUN      ] ModelBuilderTF1Test.test_create_faster_rcnn_models_from_config_mask_rcnn_with_matmul\n","[       OK ] ModelBuilderTF1Test.test_create_faster_rcnn_models_from_config_mask_rcnn_with_matmul\n","[ RUN      ] ModelBuilderTF1Test.test_create_faster_rcnn_models_from_config_mask_rcnn_without_matmul\n","[       OK ] ModelBuilderTF1Test.test_create_faster_rcnn_models_from_config_mask_rcnn_without_matmul\n","[ RUN      ] ModelBuilderTF1Test.test_create_rfcn_model_from_config\n","[       OK ] ModelBuilderTF1Test.test_create_rfcn_model_from_config\n","[ RUN      ] ModelBuilderTF1Test.test_create_ssd_fpn_model_from_config\n","[       OK ] ModelBuilderTF1Test.test_create_ssd_fpn_model_from_config\n","[ RUN      ] ModelBuilderTF1Test.test_create_ssd_models_from_config\n","[       OK ] ModelBuilderTF1Test.test_create_ssd_models_from_config\n","[ RUN      ] ModelBuilderTF1Test.test_invalid_faster_rcnn_batchnorm_update\n","[       OK ] ModelBuilderTF1Test.test_invalid_faster_rcnn_batchnorm_update\n","[ RUN      ] ModelBuilderTF1Test.test_invalid_first_stage_nms_iou_threshold\n","[       OK ] ModelBuilderTF1Test.test_invalid_first_stage_nms_iou_threshold\n","[ RUN      ] ModelBuilderTF1Test.test_invalid_model_config_proto\n","[       OK ] ModelBuilderTF1Test.test_invalid_model_config_proto\n","[ RUN      ] ModelBuilderTF1Test.test_invalid_second_stage_batch_size\n","[       OK ] ModelBuilderTF1Test.test_invalid_second_stage_batch_size\n","[ RUN      ] ModelBuilderTF1Test.test_session\n","[  SKIPPED ] ModelBuilderTF1Test.test_session\n","[ RUN      ] ModelBuilderTF1Test.test_unknown_faster_rcnn_feature_extractor\n","[       OK ] ModelBuilderTF1Test.test_unknown_faster_rcnn_feature_extractor\n","[ RUN      ] ModelBuilderTF1Test.test_unknown_meta_architecture\n","[       OK ] ModelBuilderTF1Test.test_unknown_meta_architecture\n","[ RUN      ] ModelBuilderTF1Test.test_unknown_ssd_feature_extractor\n","[       OK ] ModelBuilderTF1Test.test_unknown_ssd_feature_extractor\n","----------------------------------------------------------------------\n","Ran 19 tests in 0.158s\n","\n","OK (skipped=1)\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"TFgbLAgyLQSQ","colab_type":"text"},"source":["# 2단계. Preprocessing & Augmentation"]},{"cell_type":"markdown","metadata":{"id":"fkTzmv2nWqms","colab_type":"text"},"source":["1) /content/gdrive/My Drive/capstone/code/augmentation.ipynb로 데이터 부풀리기\n","\n","2) /content/gdrive/My Drive/capstone/code/txt_to_csv.ipynb로 adult_train_labels.csv랑 adult_test_labels.csv 만듦\n","\n","3) /content/gdrive/My Drive/capstone/data/annotations에 저장"]},{"cell_type":"code","metadata":{"id":"tP1_5kj-1l-f","colab_type":"code","outputId":"bbca1e17-2651-4643-8ade-de21edf9f887","executionInfo":{"status":"ok","timestamp":1591987967691,"user_tz":-540,"elapsed":353759,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":89}},"source":["# LabelImg tool로 annotation한 xml파일을 csv파일로 변환 (sign_child_labels.csv)\n","# label_map.pbtxt 파일 생성\n","\n","%cd /content/gdrive/My Drive/capstone\n","!python code/xml_to_csv.py -i data/images/train -o data/annotations/sign_child_train_labels.csv -l data/annotations\n","!python code/xml_to_csv.py -i data/images/test -o data/annotations/sign_child_test_labels.csv"],"execution_count":0,"outputs":[{"output_type":"stream","text":["/content/gdrive/My Drive/capstone\n","Successfully converted xml to csv.\n","Generate `data/annotations/label_map.pbtxt`\n","Successfully converted xml to csv.\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"gfPLhgYFEi5_","colab_type":"code","outputId":"09dbbaff-54b3-4ad6-98a5-af919810e361","executionInfo":{"status":"ok","timestamp":1591988029670,"user_tz":-540,"elapsed":1115,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":53}},"source":["# adult_train_labels.csv와 sign_child_train_labels.csv 합쳐서 train_labels.csv 생성\n","import csv\n","import glob\n","import os\n","\n","output_path = '/content/gdrive/My Drive/capstone/data/annotations/train_labels.csv'\n","\n","# 합칠 csv 파일\n","file_lst=['/content/gdrive/My Drive/capstone/data/annotations/sign_child_train_labels.csv','/content/gdrive/My Drive/capstone/data/annotations/adult_train_labels.csv']\n","first_file = True\n","for input_file in file_lst:\n","    print(os.path.basename(input_file)) # 불러온 파일명을 print해서 확인할 수 있게 한다\n","    with open(input_file, 'r', newline='') as csv_in_file: # 불러온 csv파일을 연다\n","        with open(output_path, 'a', newline='') as csv_out_file: # 합칠 csv파일을 'a'로 해서 연다\n","            filereader = csv.reader(csv_in_file) # csv.reader()로 읽은 내용을 filereader에 저장한다\n","            filewriter = csv.writer(csv_out_file)\n","            if first_file: # 첫번째 파일의 경우, header와 같이 복사되도록 한다\n","                for row in filereader:\n","                    filewriter.writerow(row)\n","                first_file = False # 복사가 끝나면 첫번째 파일이 아니기 때문에 False로 명명한다\n","            else:\n","                header = next(filereader) # 첫번째 파일이 아닐경우, 머릿글을 header에 저장한다\n","                for row in filereader:\n","                    filewriter.writerow(row) # header를 제외하고 읽은 내용을 쓴다(이때 붙여진 내용은 이전 내용과 띄어쓰기 없이 붙여진다"],"execution_count":0,"outputs":[{"output_type":"stream","text":["sign_child_train_labels.csv\n","adult_train_labels.csv\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"BSP3P69QnRS8","colab_type":"code","outputId":"25ee797e-20dc-49da-9b1a-423e12a14ed1","executionInfo":{"status":"ok","timestamp":1591988040191,"user_tz":-540,"elapsed":1238,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":53}},"source":["# adult_test_labels.csv와 sign_child_test_labels.csv 합쳐서 test_labels.csv 생성\n","import csv\n","import glob\n","import os\n","\n","output_path = '/content/gdrive/My Drive/capstone/data/annotations/test_labels.csv'\n","\n","# 합칠 csv 파일\n","file_lst=['/content/gdrive/My Drive/capstone/data/annotations/sign_child_test_labels.csv','/content/gdrive/My Drive/capstone/data/annotations/adult_test_labels.csv']\n","first_file = True\n","for input_file in file_lst:\n","    print(os.path.basename(input_file)) # 불러온 파일명을 print해서 확인할 수 있게 한다\n","    with open(input_file, 'r', newline='') as csv_in_file: # 불러온 csv파일을 연다\n","        with open(output_path, 'a', newline='') as csv_out_file: # 합칠 csv파일을 'a'로 해서 연다\n","            filereader = csv.reader(csv_in_file) # csv.reader()로 읽은 내용을 filereader에 저장한다\n","            filewriter = csv.writer(csv_out_file)\n","            if first_file: # 첫번째 파일의 경우, header와 같이 복사되도록 한다\n","                for row in filereader:\n","                    filewriter.writerow(row)\n","                first_file = False # 복사가 끝나면 첫번째 파일이 아니기 때문에 False로 명명한다\n","            else:\n","                header = next(filereader) # 첫번째 파일이 아닐경우, 머릿글을 header에 저장한다\n","                for row in filereader:\n","                    filewriter.writerow(row) # header를 제외하고 읽은 내용을 쓴다(이때 붙여진 내용은 이전 내용과 띄어쓰기 없이 붙여진다"],"execution_count":0,"outputs":[{"output_type":"stream","text":["sign_child_test_labels.csv\n","adult_test_labels.csv\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"_eq0l6TZEkI1","colab_type":"code","colab":{}},"source":["# label_map.pbtxt에 adult를 추가한다.\n","pbtxt_content = \"\\nitem {{\\n    id: {0}\\n    name: '{1}'\\n}}\\n\\n\".format(4, 'adult')\n","with open('/content/gdrive/My Drive/capstone/data/annotations/label_map.pbtxt', \"a\") as f:\n","  f.write(pbtxt_content)"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"x4_hAbYO8cnq","colab_type":"code","outputId":"0af41e76-86f7-46ac-94d5-e3cbcd530c89","executionInfo":{"status":"ok","timestamp":1591988436500,"user_tz":-540,"elapsed":382294,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":742}},"source":["# train.record, test.record 생성\n","\n","%cd /content/gdrive/My Drive/capstone\n","!python code/generate_tfrecord.py --csv_input=data/annotations/train_labels.csv --output_path=data/annotations/train.record --img_path=data/images/train --label_map data/annotations/label_map.pbtxt\n","!python code/generate_tfrecord.py --csv_input=data/annotations/test_labels.csv --output_path=data/annotations/test.record --img_path=data/images/test --label_map data/annotations/label_map.pbtxt"],"execution_count":0,"outputs":[{"output_type":"stream","text":["/content/gdrive/My Drive/capstone\n","WARNING:tensorflow:From code/generate_tfrecord.py:133: The name tf.app.run is deprecated. Please use tf.compat.v1.app.run instead.\n","\n","WARNING:tensorflow:From code/generate_tfrecord.py:107: The name tf.python_io.TFRecordWriter is deprecated. Please use tf.io.TFRecordWriter instead.\n","\n","W0612 18:54:17.592498 140094160181120 module_wrapper.py:139] From code/generate_tfrecord.py:107: The name tf.python_io.TFRecordWriter is deprecated. Please use tf.io.TFRecordWriter instead.\n","\n","WARNING:tensorflow:From code/generate_tfrecord.py:53: The name tf.gfile.GFile is deprecated. Please use tf.io.gfile.GFile instead.\n","\n","W0612 18:54:18.062600 140094160181120 module_wrapper.py:139] From code/generate_tfrecord.py:53: The name tf.gfile.GFile is deprecated. Please use tf.io.gfile.GFile instead.\n","\n","Traceback (most recent call last):\n","  File \"code/generate_tfrecord.py\", line 133, in <module>\n","    tf.app.run()\n","  File \"/tensorflow-1.15.2/python3.6/tensorflow_core/python/platform/app.py\", line 40, in run\n","    _run(main=main, argv=argv, flags_parser=_parse_flags_tolerate_undef)\n","  File \"/usr/local/lib/python3.6/dist-packages/absl/app.py\", line 299, in run\n","    _run_main(main, args)\n","  File \"/usr/local/lib/python3.6/dist-packages/absl/app.py\", line 250, in _run_main\n","    sys.exit(main(argv))\n","  File \"code/generate_tfrecord.py\", line 124, in main\n","    tf_example = create_tf_example(group, path, label_map)\n","  File \"code/generate_tfrecord.py\", line 54, in create_tf_example\n","    encoded_jpg = fid.read()\n","  File \"/tensorflow-1.15.2/python3.6/tensorflow_core/python/lib/io/file_io.py\", line 122, in read\n","    self._preread_check()\n","  File \"/tensorflow-1.15.2/python3.6/tensorflow_core/python/lib/io/file_io.py\", line 84, in _preread_check\n","    compat.as_bytes(self.__name), 1024 * 512)\n","tensorflow.python.framework.errors_impl.NotFoundError: /content/gdrive/My Drive/capstone/data/images/train/pedestrian(150).jppedestrian(150).jpg; No such file or directory\n","WARNING:tensorflow:From code/generate_tfrecord.py:133: The name tf.app.run is deprecated. Please use tf.compat.v1.app.run instead.\n","\n","WARNING:tensorflow:From code/generate_tfrecord.py:107: The name tf.python_io.TFRecordWriter is deprecated. Please use tf.io.TFRecordWriter instead.\n","\n","W0612 19:00:17.424602 140559428482944 module_wrapper.py:139] From code/generate_tfrecord.py:107: The name tf.python_io.TFRecordWriter is deprecated. Please use tf.io.TFRecordWriter instead.\n","\n","WARNING:tensorflow:From code/generate_tfrecord.py:53: The name tf.gfile.GFile is deprecated. Please use tf.io.gfile.GFile instead.\n","\n","W0612 19:00:17.465687 140559428482944 module_wrapper.py:139] From code/generate_tfrecord.py:53: The name tf.gfile.GFile is deprecated. Please use tf.io.gfile.GFile instead.\n","\n","Successfully created the TFRecords: /content/gdrive/My Drive/capstone/data/annotations/test.record\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"v6plYnWv3nSk","colab_type":"code","colab":{}},"source":["test_record_fname = '/content/gdrive/My Drive/capstone/data/annotations/test.record'\n","train_record_fname = '/content/gdrive/My Drive/capstone/data/annotations/train.record'\n","label_map_pbtxt_fname = '/content/gdrive/My Drive/capstone/data/annotations/label_map.pbtxt'"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"ixIS8SkK3oHq","colab_type":"code","outputId":"5f5fd625-27a1-4ca7-e8a5-71434a5c6d06","executionInfo":{"status":"ok","timestamp":1591988472516,"user_tz":-540,"elapsed":2505,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":1000}},"source":["!cat /content/gdrive/My\\ Drive/capstone/data/annotations/test_labels.csv"],"execution_count":0,"outputs":[{"output_type":"stream","text":["filename,width,height,class,xmin,ymin,xmax,ymax\r\n","long_sign(6).jpg,620,501,long_sign,238,172,340,354\r\n","long_sign(7).jpg,620,473,long_sign,355,149,454,303\r\n","wide_sign(87).jpg,1364,550,wide_sign,404,145,876,504\r\n","wide_sign(86).jpg,1024,768,wide_sign,236,172,765,568\r\n","dark_wide_sign(60).jpg,960,1280,wide_sign,487,243,662,371\r\n","dark_wide_sign(59).jpg,450,300,wide_sign,105,76,359,230\r\n","bright_wide_sign(3).jpg,345,221,wide_sign,104,42,273,150\r\n","bright_wide_sign(4).jpg,600,450,wide_sign,108,96,444,309\r\n","bright_long_sign(3).jpg,560,420,long_sign,253,70,491,353\r\n","bright_long_sign(2).jpg,560,364,long_sign,355,8,485,236\r\n","sign34.png,1069,680,wide_sign,718,62,946,192\r\n","sign35.png,858,718,wide_sign,517,58,806,225\r\n","sign43.png,684,694,long_sign,535,73,619,238\r\n","sign45.png,1219,680,long_sign,1041,68,1166,259\r\n","IMG_2182.PNG,1242,2208,wide_sign,725,114,1090,541\r\n","IMG_2183.PNG,1242,2208,wide_sign,447,467,734,749\r\n","IMG_2231.PNG,1242,2208,long_sign,921,697,1038,984\r\n","IMG_2232.PNG,1242,2208,long_sign,677,719,860,1062\r\n","pedestrian(8).jpg,1300,956,child,343,21,656,768\r\n","pedestrian(8).jpg,1300,956,child,773,21,1053,808\r\n","pedestrian(16).jpg,1200,1800,child,437,719,725,1469\r\n","pedestrian(15).jpg,750,550,child,140,101,236,438\r\n","pedestrian(15).jpg,750,550,child,275,138,375,438\r\n","pedestrian(9).jpg,1300,977,child,486,135,736,831\r\n","pedestrian(11).jpg,852,480,child,227,40,338,427\r\n","pedestrian(11).jpg,852,480,child,369,53,482,435\r\n","pedestrian(12).jpg,1908,1146,child,484,158,814,1128\r\n","pedestrian(12).jpg,1908,1146,child,869,368,1099,1088\r\n","pedestrian(12).jpg,1908,1146,child,1139,158,1433,1146\r\n","pedestrian(10).jpg,852,480,child,186,35,292,329\r\n","pedestrian(10).jpg,852,480,child,329,122,386,329\r\n","pedestrian(10).jpg,852,480,child,412,59,510,329\r\n","pedestrian(10).jpg,852,480,child,525,170,592,338\r\n","pedestrian(13).jpg,400,267,child,28,46,94,264\r\n","pedestrian(13).jpg,400,267,child,109,67,161,258\r\n","pedestrian(13).jpg,400,267,child,167,54,226,252\r\n","pedestrian(13).jpg,400,267,child,251,40,323,241\r\n","pedestrian(14).jpg,1200,630,child,40,37,184,630\r\n","pedestrian(14).jpg,1200,630,child,196,68,349,630\r\n","pedestrian(14).jpg,1200,630,child,412,99,571,630\r\n","pedestrian(14).jpg,1200,630,child,612,124,805,630\r\n","pedestrian(14).jpg,1200,630,child,925,49,1108,630\r\n","pedestrian(1).jpg,1600,614,child,958,20,1146,595\r\n","pedestrian(1).jpg,1600,614,child,1200,40,1408,603\r\n","dark_sign27.png,755,685,long_sign,581,50,707,238\r\n","dark_sign22.png,971,707,wide_sign,618,31,843,171\r\n","dark_sign24.png,723,690,wide_sign,387,50,624,194\r\n","dark_sign23.png,900,523,long_sign,799,47,871,156\r\n","bright_sign2.png,979,628,long_sign,819,47,957,262\r\n","bright_sign11.png,847,518,wide_sign,660,44,782,161\r\n","bright_sign24.png,723,690,wide_sign,387,50,624,194\r\n","bright_sign3.png,1042,766,long_sign,861,45,1021,291\r\n","bright_IMG_2141.PNG,1242,2208,wide_sign,647,645,786,754\r\n","bright_IMG_2143.PNG,1242,2208,wide_sign,442,719,638,858\r\n","bright_IMG_2170.PNG,1242,2208,long_sign,636,679,719,813\r\n","bright_IMG_2171.PNG,1242,2208,long_sign,712,328,903,680\r\n","dark_IMG_2146.PNG,1242,2208,wide_sign,790,741,1168,988\r\n","dark_IMG_2145.PNG,1242,2208,wide_sign,416,784,751,1028\r\n","dark_IMG_2149.PNG,1242,2208,long_sign,803,593,894,736\r\n","dark_IMG_2150.PNG,1242,2208,long_sign,881,271,1055,601\r\n","PennPed00087.jpg,325,343,adult,23,16,148,322\r\n","PennPed00088.jpg,375,320,adult,73,18,215,312\r\n","PennPed00089.jpg,428,317,adult,85,25,165,294\r\n","PennPed00089.jpg,428,317,adult,140,6,297,309\r\n","PennPed00091.jpg,372,324,adult,212,26,336,321\r\n","PennPed00090.jpg,500,370,adult,80,23,212,358\r\n","PennPed00090.jpg,500,370,adult,43,39,122,346\r\n","PennPed00090.jpg,500,370,adult,206,38,366,326\r\n","PennPed00092.jpg,600,447,adult,106,76,311,434\r\n","PennPed00092.jpg,600,447,adult,288,94,464,390\r\n","PennPed00093.jpg,368,311,adult,51,6,222,302\r\n","PennPed00094.jpg,422,349,adult,122,27,256,322\r\n","PennPed00095.jpg,512,375,adult,193,50,300,337\r\n","PennPed00095.jpg,512,375,adult,320,55,432,335\r\n","PennPed00096.jpg,294,331,adult,6,38,103,324\r\n","PennPed00096.jpg,294,331,adult,101,26,206,323\r\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"-SseK7ko9TZ7","colab_type":"code","outputId":"0f687302-f005-45f0-be65-a10fdc04f154","executionInfo":{"status":"ok","timestamp":1591988481557,"user_tz":-540,"elapsed":3360,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":1000}},"source":["!cat /content/gdrive/My\\ Drive/capstone/data/annotations/train_labels.csv"],"execution_count":0,"outputs":[{"output_type":"stream","text":["filename,width,height,class,xmin,ymin,xmax,ymax\r\n","sign15.png,910,590,long_sign,787,38,857,150\r\n","sign16.png,503,741,long_sign,128,58,271,283\r\n","sign17.png,643,787,long_sign,389,66,577,336\r\n","sign18.png,451,493,long_sign,349,99,403,212\r\n","sign19.png,703,693,long_sign,548,33,680,239\r\n","sign20.png,1136,732,long_sign,920,48,1100,362\r\n","sign21.png,1127,578,long_sign,993,24,1084,150\r\n","sign22.png,971,707,wide_sign,618,31,843,171\r\n","sign23.png,900,523,long_sign,799,47,871,156\r\n","sign24.png,723,690,wide_sign,387,50,624,194\r\n","sign25.png,871,707,wide_sign,646,42,819,149\r\n","sign26.png,1213,691,long_sign,1025,29,1182,301\r\n","sign27.png,755,685,long_sign,581,50,707,238\r\n","sign28.png,1374,750,long_sign,1058,39,1280,310\r\n","sign29.png,1282,739,wide_sign,837,69,1170,249\r\n","sign30.png,1059,730,long_sign,840,32,1018,265\r\n","sign31.png,1039,778,long_sign,743,81,978,367\r\n","sign32.png,817,691,long_sign,655,60,787,266\r\n","sign33.png,807,711,long_sign,95,51,191,215\r\n","sign36.png,999,742,wide_sign,152,55,452,237\r\n","sign37.png,649,625,long_sign,522,54,617,217\r\n","sign38.png,868,782,wide_sign,209,41,594,268\r\n","sign39.png,1049,607,long_sign,899,65,978,235\r\n","sign40.png,813,687,long_sign,654,56,779,260\r\n","sign41.png,899,709,long_sign,712,59,821,254\r\n","sign42.png,582,745,long_sign,470,277,523,445\r\n","sign44.png,895,737,wide_sign,512,82,754,235\r\n","sign46.png,1217,717,wide_sign,902,121,1109,277\r\n","sign47.png,1163,723,long_sign,860,52,1039,336\r\n","sign48.png,989,763,long_sign,827,75,935,273\r\n","sign49.png,935,720,wide_sign,574,50,906,250\r\n","IMG_2141.PNG,1242,2208,wide_sign,647,645,786,754\r\n","IMG_2142.PNG,1242,2208,wide_sign,681,541,894,697\r\n","IMG_2143.PNG,1242,2208,wide_sign,442,719,638,858\r\n","IMG_2144.PNG,1242,2208,wide_sign,538,441,894,714\r\n","IMG_2145.PNG,1242,2208,wide_sign,416,784,751,1028\r\n","IMG_2146.PNG,1242,2208,wide_sign,790,741,1168,988\r\n","IMG_2147.PNG,1242,2208,wide_sign,807,619,1186,854\r\n","IMG_2149.PNG,1242,2208,long_sign,803,593,894,736\r\n","IMG_2150.PNG,1242,2208,long_sign,881,271,1055,601\r\n","IMG_2151.PNG,1242,2208,wide_sign,168,980,355,1106\r\n","IMG_2154.PNG,1242,2208,wide_sign,194,865,322,951\r\n","IMG_2156.PNG,1242,2208,wide_sign,829,654,1103,849\r\n","IMG_2157.PNG,1242,2208,wide_sign,542,788,781,971\r\n","IMG_2158.PNG,1242,2208,wide_sign,916,719,1212,901\r\n","IMG_2159.PNG,1242,2208,wide_sign,668,1062,899,1228\r\n","IMG_2160.PNG,1242,2208,wide_sign,551,719,707,832\r\n","IMG_2163.PNG,1242,2208,wide_sign,729,815,865,904\r\n","IMG_2164.PNG,1242,2208,wide_sign,772,655,996,811\r\n","IMG_2165.PNG,1242,2208,wide_sign,510,778,716,927\r\n","IMG_2166.PNG,1242,2208,wide_sign,932,666,1183,832\r\n","IMG_2167.PNG,1242,2208,wide_sign,648,1017,848,1145\r\n","IMG_2168.PNG,1242,2208,wide_sign,616,284,1116,754\r\n","IMG_2170.PNG,1242,2208,long_sign,636,679,719,813\r\n","IMG_2171.PNG,1242,2208,long_sign,712,328,903,680\r\n","IMG_2172.PNG,1242,2208,long_sign,594,549,755,854\r\n","IMG_2175.PNG,1242,2208,wide_sign,842,565,960,654\r\n","IMG_2177.PNG,1242,2208,wide_sign,810,614,967,739\r\n","IMG_2178.PNG,1242,2208,wide_sign,518,715,666,826\r\n","IMG_2179.PNG,1242,2208,wide_sign,667,366,837,507\r\n","IMG_2180.PNG,1242,2208,wide_sign,877,628,1160,832\r\n","IMG_2181.PNG,1242,2208,wide_sign,456,735,686,941\r\n","IMG_2184.PNG,1242,2208,wide_sign,207,723,490,967\r\n","IMG_2185.PNG,1242,2208,wide_sign,129,62,494,488\r\n","IMG_2188.PNG,1242,2208,wide_sign,400,538,556,657\r\n","IMG_2189.PNG,1242,2208,wide_sign,229,371,481,628\r\n","IMG_2192.PNG,1242,2208,wide_sign,691,648,810,746\r\n","IMG_2193.PNG,1242,2208,wide_sign,674,532,874,685\r\n","IMG_2194.PNG,1242,2208,wide_sign,529,110,964,523\r\n","IMG_2195.PNG,1242,2208,wide_sign,842,497,1055,680\r\n","IMG_2196.PNG,1242,2208,wide_sign,247,449,455,623\r\n","IMG_2197.PNG,1242,2208,wide_sign,464,93,903,458\r\n","IMG_2198.PNG,1242,2208,wide_sign,542,597,890,858\r\n","IMG_2199.PNG,1242,2208,wide_sign,747,549,1173,888\r\n","IMG_2200.PNG,1242,2208,wide_sign,90,636,473,919\r\n","IMG_2203.PNG,1242,2208,wide_sign,529,658,681,780\r\n","IMG_2204.PNG,1242,2208,wide_sign,394,393,673,641\r\n","IMG_2205.PNG,1242,2208,wide_sign,629,493,929,732\r\n","IMG_2206.PNG,1242,2208,wide_sign,512,784,777,962\r\n","IMG_2207.PNG,1242,2208,wide_sign,138,693,421,932\r\n","IMG_2210.PNG,1242,2208,wide_sign,794,706,986,836\r\n","IMG_2211.PNG,1242,2208,wide_sign,555,849,738,988\r\n","IMG_2212.PNG,1242,2208,wide_sign,394,697,573,819\r\n","IMG_2213.PNG,1242,2208,wide_sign,729,506,1134,784\r\n","IMG_2215.PNG,1242,2208,wide_sign,483,507,620,620\r\n","IMG_2216.PNG,1242,2208,wide_sign,468,323,734,532\r\n","IMG_2217.PNG,1242,2208,wide_sign,707,519,947,701\r\n","IMG_2218.PNG,1242,2208,wide_sign,181,536,425,741\r\n","IMG_2221.PNG,1242,2208,wide_sign,474,489,641,624\r\n","IMG_2222.PNG,1242,2208,wide_sign,260,106,621,462\r\n","IMG_2223.PNG,1242,2208,wide_sign,51,401,412,671\r\n","IMG_2224.PNG,1242,2208,wide_sign,581,441,886,706\r\n","IMG_2227.PNG,1242,2208,wide_sign,400,802,549,914\r\n","IMG_2228.PNG,1242,2208,wide_sign,194,514,503,719\r\n","IMG_2229.PNG,1242,2208,wide_sign,521,584,803,780\r\n","IMG_2235.PNG,1242,2208,wide_sign,703,801,873,923\r\n","IMG_2236.PNG,1242,2208,wide_sign,738,619,999,801\r\n","IMG_2237.PNG,1242,2208,wide_sign,734,501,1051,714\r\n","IMG_2238.PNG,1242,2208,wide_sign,403,610,699,862\r\n","IMG_2239.PNG,1242,2208,wide_sign,599,810,873,1006\r\n","IMG_2242.PNG,1242,2208,wide_sign,859,743,1013,858\r\n","IMG_2243.PNG,1242,2208,wide_sign,599,514,864,723\r\n","IMG_2244.PNG,1242,2208,wide_sign,255,554,494,762\r\n","IMG_2245.PNG,1242,2208,wide_sign,699,362,986,736\r\n","IMG_2247.PNG,1242,2208,wide_sign,210,684,398,844\r\n","IMG_2249.PNG,1242,2208,wide_sign,256,419,489,600\r\n","IMG_2250.PNG,1242,2208,wide_sign,556,614,754,773\r\n","IMG_2251.PNG,1242,2208,wide_sign,294,262,712,701\r\n","IMG_2252.PNG,1242,2208,wide_sign,247,588,573,841\r\n","IMG_2254.PNG,1242,2208,wide_sign,721,732,848,820\r\n","IMG_2255.PNG,1242,2208,wide_sign,770,560,981,724\r\n","IMG_2256.PNG,1242,2208,wide_sign,494,775,683,925\r\n","IMG_2257.PNG,1242,2208,wide_sign,255,666,460,843\r\n","IMG_2258.PNG,1242,2208,wide_sign,517,542,913,826\r\n","IMG_2259.PNG,1242,2208,wide_sign,712,341,1129,688\r\n","IMG_2262.PNG,1242,2208,wide_sign,407,454,621,614\r\n","IMG_2263.PNG,1242,2208,wide_sign,416,319,742,562\r\n","IMG_2264.PNG,1242,2208,wide_sign,212,201,638,584\r\n","IMG_2268.PNG,1242,2208,wide_sign,592,485,811,670\r\n","IMG_2269.PNG,1242,2208,wide_sign,549,429,860,654\r\n","IMG_2273.PNG,1242,2208,wide_sign,292,537,568,765\r\n","IMG_2274.PNG,1242,2208,wide_sign,164,580,621,971\r\n","IMG_2277.PNG,1242,2208,long_sign,706,881,771,997\r\n","IMG_2278.PNG,1242,2208,long_sign,777,570,925,825\r\n","IMG_2280.PNG,1242,2208,wide_sign,617,641,719,721\r\n","IMG_2280.PNG,1242,2208,long_sign,949,673,1038,810\r\n","IMG_2281.PNG,1242,2208,wide_sign,234,443,426,589\r\n","IMG_2281.PNG,1242,2208,long_sign,864,526,991,753\r\n","IMG_2282.PNG,1242,2208,long_sign,501,581,729,949\r\n","IMG_2283.PNG,1242,2208,wide_sign,516,632,855,867\r\n","IMG_2284.PNG,1242,2208,wide_sign,665,579,832,687\r\n","IMG_2285.PNG,1242,2208,wide_sign,573,344,808,516\r\n","IMG_2289.PNG,1242,2208,wide_sign,432,411,622,556\r\n","IMG_2290.PNG,1242,2208,wide_sign,394,511,600,668\r\n","IMG_2291.PNG,1242,2208,wide_sign,349,419,685,642\r\n","IMG_2292.PNG,1242,2208,wide_sign,92,394,492,692\r\n","IMG_2294.PNG,1242,2208,wide_sign,832,517,967,636\r\n","IMG_2295.PNG,1242,2208,wide_sign,687,342,974,575\r\n","IMG_2296.PNG,1242,2208,wide_sign,811,668,992,827\r\n","IMG_2298.PNG,1242,2208,wide_sign,311,428,487,562\r\n","IMG_2299.PNG,1242,2208,wide_sign,303,286,626,544\r\n","IMG_2302.PNG,1242,2208,wide_sign,424,513,559,646\r\n","IMG_2303.PNG,1242,2208,wide_sign,637,384,835,516\r\n","IMG_2304.PNG,1242,2208,wide_sign,720,757,863,863\r\n","IMG_2305.PNG,1242,2208,wide_sign,355,551,630,770\r\n","IMG_2308.PNG,1242,2208,wide_sign,362,729,503,843\r\n","IMG_2309.PNG,1242,2208,wide_sign,359,519,626,753\r\n","IMG_2311.PNG,1242,2208,wide_sign,336,479,517,613\r\n","IMG_2315.PNG,1242,2208,wide_sign,542,136,851,397\r\n","IMG_2318.PNG,1242,2208,wide_sign,765,215,963,383\r\n","IMG_2320.PNG,1242,2208,wide_sign,621,280,947,584\r\n","IMG_2322.PNG,1242,2208,wide_sign,498,592,663,716\r\n","IMG_2323.PNG,1242,2208,wide_sign,411,234,691,455\r\n","IMG_2324.PNG,1242,2208,wide_sign,292,186,664,498\r\n","IMG_2325.PNG,1242,2208,wide_sign,602,468,874,668\r\n","IMG_2326.PNG,1242,2208,wide_sign,596,153,902,423\r\n","IMG_2327.PNG,1242,2208,wide_sign,794,317,1108,562\r\n","IMG_2328.PNG,1242,2208,wide_sign,238,315,542,577\r\n","IMG_2329.PNG,1242,2208,wide_sign,639,596,781,717\r\n","pedestrian(79).jpg,1800,1200,child,618,261,818,1004\r\n","pedestrian(79).jpg,1800,1200,child,818,290,1028,923\r\n","pedestrian(79).jpg,1800,1200,child,1071,161,1295,1057\r\n","pedestrian(69).jpg,1600,1423,child,366,494,541,1107\r\n","pedestrian(69).jpg,1600,1423,child,641,603,850,1103\r\n","pedestrian(69).jpg,1600,1423,child,912,644,1066,1107\r\n","pedestrian(69).jpg,1600,1423,child,1033,540,1200,1103\r\n","pedestrian(70).jpg,1200,630,child,121,159,268,559\r\n","pedestrian(70).jpg,1200,630,child,337,140,450,568\r\n","pedestrian(70).jpg,1200,630,child,471,134,593,571\r\n","pedestrian(70).jpg,1200,630,child,600,78,746,609\r\n","pedestrian(70).jpg,1200,630,child,781,121,937,578\r\n","pedestrian(73).jpg,800,534,child,57,165,171,485\r\n","pedestrian(73).jpg,800,534,child,167,7,322,499\r\n","pedestrian(73).jpg,800,534,child,302,165,420,507\r\n","pedestrian(73).jpg,800,534,child,488,183,563,499\r\n","pedestrian(73).jpg,800,534,child,571,207,653,495\r\n","pedestrian(71).jpg,775,425,child,317,94,423,412\r\n","pedestrian(71).jpg,775,425,child,449,8,590,425\r\n","pedestrian(76).jpg,1600,1155,child,475,173,687,990\r\n","pedestrian(76).jpg,1600,1155,child,716,465,883,956\r\n","pedestrian(77).jpg,1000,667,child,210,163,387,645\r\n","pedestrian(77).jpg,1000,667,child,387,204,548,630\r\n","pedestrian(77).jpg,1000,667,child,546,217,697,625\r\n","pedestrian(77).jpg,1000,667,child,682,194,812,625\r\n","pedestrian(72).jpg,920,613,child,212,121,386,594\r\n","pedestrian(72).jpg,920,613,child,419,128,591,609\r\n","pedestrian(72).jpg,920,613,child,567,63,722,506\r\n","pedestrian(78).jpg,1800,1200,child,395,319,633,1014\r\n","pedestrian(78).jpg,1800,1200,child,676,71,952,1028\r\n","pedestrian(78).jpg,1800,1200,child,980,528,1185,1081\r\n","pedestrian(75).jpg,400,300,child,83,23,170,280\r\n","pedestrian(75).jpg,400,300,child,219,14,330,294\r\n","pedestrian(74).jpg,1300,956,child,176,271,409,768\r\n","pedestrian(74).jpg,1300,956,child,383,235,556,711\r\n","pedestrian(74).jpg,1300,956,child,629,285,759,701\r\n","pedestrian(74).jpg,1300,956,child,779,255,936,695\r\n","pedestrian(74).jpg,1300,956,child,966,258,1116,675\r\n","pedestrian(68).jpg,1066,1700,child,246,477,566,1194\r\n","pedestrian(63).jpg,1200,801,child,446,200,615,713\r\n","pedestrian(63).jpg,1200,801,child,668,231,821,728\r\n","pedestrian(59).jpg,1300,953,child,179,203,396,759\r\n","pedestrian(59).jpg,1300,953,child,489,66,773,806\r\n","pedestrian(56).jpg,579,262,child,295,81,336,239\r\n","pedestrian(56).jpg,579,262,child,356,51,418,236\r\n","pedestrian(65).jpg,1023,682,child,408,69,592,680\r\n","pedestrian(65).jpg,1023,682,child,661,80,871,659\r\n","pedestrian(64).jpg,1024,678,child,383,260,499,599\r\n","pedestrian(64).jpg,1024,678,child,562,189,701,599\r\n","pedestrian(67).jpg,1023,682,child,437,93,579,551\r\n","pedestrian(67).jpg,1023,682,child,648,83,787,556\r\n","pedestrian(66).jpg,1023,682,child,427,96,579,554\r\n","pedestrian(66).jpg,1023,682,child,632,91,795,538\r\n","pedestrian(47).jpg,1320,770,child,246,129,446,746\r\n","pedestrian(47).jpg,1320,770,child,587,291,718,753\r\n","pedestrian(47).jpg,1320,770,child,687,105,825,701\r\n","pedestrian(57).jpg,1300,957,child,393,125,559,725\r\n","pedestrian(57).jpg,1300,957,child,576,141,746,701\r\n","pedestrian(54).jpg,800,534,child,290,156,410,520\r\n","pedestrian(54).jpg,800,534,child,376,214,482,516\r\n","pedestrian(53).jpg,800,534,child,235,154,337,497\r\n","pedestrian(53).jpg,800,534,child,359,189,484,485\r\n","pedestrian(55).jpg,960,638,child,8,182,90,492\r\n","pedestrian(55).jpg,960,638,child,93,202,170,487\r\n","pedestrian(55).jpg,960,638,child,195,214,260,487\r\n","pedestrian(55).jpg,960,638,child,295,227,365,479\r\n","pedestrian(55).jpg,960,638,child,368,207,448,489\r\n","pedestrian(55).jpg,960,638,child,445,232,525,482\r\n","pedestrian(55).jpg,960,638,child,548,252,628,462\r\n","pedestrian(49).jpg,700,559,child,389,61,575,523\r\n","pedestrian(60).jpg,2737,1827,child,1761,206,2339,1591\r\n","pedestrian(46).jpg,626,417,child,56,166,136,384\r\n","pedestrian(46).jpg,626,417,child,186,179,249,389\r\n","pedestrian(46).jpg,626,417,child,280,189,354,386\r\n","pedestrian(46).jpg,626,417,child,361,142,443,405\r\n","pedestrian(44).jpg,634,407,child,256,57,325,360\r\n","pedestrian(44).jpg,634,407,child,329,47,444,403\r\n","pedestrian(44).jpg,634,407,child,461,32,577,386\r\n","pedestrian(58).jpg,1300,957,child,386,141,576,718\r\n","pedestrian(58).jpg,1300,957,child,593,141,749,705\r\n","pedestrian(48).jpg,860,574,child,445,95,623,544\r\n","pedestrian(45).jpg,626,417,child,62,166,157,383\r\n","pedestrian(45).jpg,626,417,child,162,179,254,389\r\n","pedestrian(45).jpg,626,417,child,270,179,352,389\r\n","pedestrian(45).jpg,626,417,child,352,144,444,399\r\n","pedestrian(50).jpg,865,1390,child,249,388,456,1141\r\n","pedestrian(50).jpg,865,1390,child,476,370,700,1159\r\n","pedestrian(43).jpg,560,373,child,1,41,116,363\r\n","pedestrian(43).jpg,560,373,child,167,127,234,347\r\n","pedestrian(43).jpg,560,373,child,254,143,301,313\r\n","pedestrian(43).jpg,560,373,child,314,137,353,293\r\n","pedestrian(43).jpg,560,373,child,390,163,423,251\r\n","pedestrian(43).jpg,560,373,child,463,103,511,288\r\n","pedestrian(39).jpg,620,349,child,131,108,266,326\r\n","pedestrian(39).jpg,620,349,child,366,99,479,320\r\n","pedestrian(52).jpg,1200,800,child,171,209,271,597\r\n","pedestrian(52).jpg,1200,800,child,278,359,378,594\r\n","pedestrian(52).jpg,1200,800,child,528,250,609,603\r\n","pedestrian(52).jpg,1200,800,child,843,216,1000,609\r\n","pedestrian(35).jpg,1300,873,child,373,419,503,766\r\n","pedestrian(35).jpg,1300,873,child,866,409,1056,759\r\n","pedestrian(41).jpg,720,432,child,134,67,249,411\r\n","pedestrian(41).jpg,720,432,child,241,69,369,413\r\n","pedestrian(36).jpg,2122,1194,child,828,191,1183,1180\r\n","pedestrian(36).jpg,2122,1194,child,1200,241,1555,1185\r\n","pedestrian(33).jpg,1300,866,child,443,386,576,796\r\n","pedestrian(33).jpg,1300,866,child,616,373,786,816\r\n","pedestrian(33).jpg,1300,866,child,903,113,1119,830\r\n","pedestrian(42).jpg,424,283,child,102,38,171,246\r\n","pedestrian(42).jpg,424,283,child,201,59,255,249\r\n","pedestrian(42).jpg,424,283,child,271,46,327,252\r\n","pedestrian(51).jpg,758,500,child,102,287,172,411\r\n","pedestrian(51).jpg,758,500,child,196,289,255,417\r\n","pedestrian(51).jpg,758,500,child,257,228,337,426\r\n","pedestrian(51).jpg,758,500,child,359,254,437,422\r\n","pedestrian(51).jpg,758,500,child,500,275,563,413\r\n","pedestrian(51).jpg,758,500,child,555,281,619,424\r\n","pedestrian(51).jpg,758,500,child,616,246,655,421\r\n","pedestrian(51).jpg,758,500,child,667,285,719,419\r\n","pedestrian(37).jpg,1500,1184,child,242,327,496,1023\r\n","pedestrian(37).jpg,1500,1184,child,446,296,665,965\r\n","pedestrian(34).jpg,1300,866,child,209,70,487,866\r\n","pedestrian(34).jpg,1300,866,child,529,226,786,823\r\n","pedestrian(34).jpg,1300,866,child,863,160,1156,843\r\n","pedestrian(26).jpg,1300,867,child,539,236,809,743\r\n","pedestrian(26).jpg,1300,867,child,819,226,1123,770\r\n","pedestrian(24).jpg,1500,1404,child,481,187,877,1271\r\n","pedestrian(24).jpg,1500,1404,child,892,237,1292,1240\r\n","pedestrian(20).jpg,533,800,child,156,58,373,647\r\n","pedestrian(32).jpg,1300,866,child,353,206,530,866\r\n","pedestrian(32).jpg,1300,866,child,523,186,788,866\r\n","pedestrian(32).jpg,1300,866,child,819,190,1056,863\r\n","pedestrian(22).jpg,980,653,child,475,196,520,476\r\n","pedestrian(22).jpg,980,653,child,495,199,655,481\r\n","pedestrian(27).jpg,1300,866,child,196,260,316,626\r\n","pedestrian(27).jpg,1300,866,child,343,233,499,660\r\n","pedestrian(27).jpg,1300,866,child,516,210,669,663\r\n","pedestrian(27).jpg,1300,866,child,679,256,816,670\r\n","pedestrian(27).jpg,1300,866,child,799,213,936,630\r\n","pedestrian(27).jpg,1300,866,child,993,210,1149,670\r\n","pedestrian(29).jpg,1300,866,child,343,56,589,800\r\n","pedestrian(29).jpg,1300,866,child,593,56,839,800\r\n","pedestrian(40).jpg,720,432,child,138,58,251,413\r\n","pedestrian(40).jpg,720,432,child,238,67,364,423\r\n","pedestrian(21).jpg,1510,892,child,35,106,203,678\r\n","pedestrian(21).jpg,1510,892,child,255,62,463,734\r\n","pedestrian(21).jpg,1510,892,child,527,18,747,746\r\n","pedestrian(21).jpg,1510,892,child,771,122,951,750\r\n","pedestrian(21).jpg,1510,892,child,955,26,1163,686\r\n","pedestrian(21).jpg,1510,892,child,1259,26,1459,706\r\n","pedestrian(38).jpg,1500,816,child,561,96,708,631\r\n","pedestrian(38).jpg,1500,816,child,692,108,896,658\r\n","pedestrian(30).jpg,1300,866,child,393,266,566,770\r\n","pedestrian(30).jpg,1300,866,child,586,26,793,773\r\n","pedestrian(30).jpg,1300,866,child,776,293,1016,836\r\n","pedestrian(23).jpg,1200,749,child,256,93,536,749\r\n","pedestrian(23).jpg,1200,749,child,590,93,831,627\r\n","pedestrian(31).jpg,1300,866,child,7,130,169,866\r\n","pedestrian(31).jpg,1300,866,child,279,213,479,866\r\n","pedestrian(31).jpg,1300,866,child,469,263,676,816\r\n","pedestrian(31).jpg,1300,866,child,689,273,839,763\r\n","pedestrian(31).jpg,1300,866,child,816,260,943,806\r\n","pedestrian(25).jpg,1300,1300,child,233,80,573,1243\r\n","pedestrian(25).jpg,1300,1300,child,649,110,1039,1247\r\n","pedestrian(18).jpg,500,334,child,58,15,147,328\r\n","pedestrian(18).jpg,500,334,child,169,24,242,323\r\n","pedestrian(148).jpg,320,212,child,120,81,153,184\r\n","pedestrian(148).jpg,320,212,child,170,57,229,189\r\n","pedestrian(28).jpg,450,300,child,107,119,167,293\r\n","pedestrian(28).jpg,450,300,child,174,60,269,294\r\n","pedestrian(150).jppedestrian(150).jpg,1300,955,child,469,167,789,814\r\n","pedestrian(17).jpg,2592,1728,child,282,591,622,1657\r\n","pedestrian(17).jpg,2592,1728,child,722,497,1142,1664\r\n","pedestrian(17).jpg,2592,1728,child,1309,457,1682,1677\r\n","pedestrian(17).jpg,2592,1728,child,1736,337,2202,1697\r\n","pedestrian(147).jpg,615,409,child,118,59,199,380\r\n","pedestrian(147).jpg,615,409,child,215,67,304,381\r\n","pedestrian(147).jpg,615,409,child,302,73,391,375\r\n","pedestrian(147).jpg,615,409,child,404,81,510,378\r\n","pedestrian(19).jpg,800,480,child,245,23,394,466\r\n","pedestrian(19).jpg,800,480,child,410,23,582,468\r\n","pedestrian(87).jpg,766,425,child,137,77,286,363\r\n","pedestrian(87).jpg,766,425,child,249,32,427,361\r\n","pedestrian(96).jpg,626,417,child,109,58,204,323\r\n","pedestrian(96).jpg,626,417,child,202,42,291,323\r\n","pedestrian(96).jpg,626,417,child,294,57,386,324\r\n","pedestrian(96).jpg,626,417,child,428,95,517,384\r\n","pedestrian(94).jpg,800,533,child,143,109,288,398\r\n","pedestrian(94).jpg,800,533,child,282,62,416,388\r\n","pedestrian(93).jpg,975,650,child,245,235,375,603\r\n","pedestrian(93).jpg,975,650,child,410,213,572,595\r\n","pedestrian(95).jpg,1080,450,child,3,80,159,436\r\n","pedestrian(95).jpg,1080,450,child,195,64,321,450\r\n","pedestrian(95).jpg,1080,450,child,298,11,438,450\r\n","pedestrian(95).jpg,1080,450,child,459,8,609,450\r\n","pedestrian(95).jpg,1080,450,child,631,100,729,450\r\n","pedestrian(95).jpg,1080,450,child,773,105,903,449\r\n","pedestrian(95).jpg,1080,450,child,973,122,1075,450\r\n","pedestrian(146).jpg,2560,1707,child,906,553,1186,1433\r\n","pedestrian(146).jpg,2560,1707,child,1273,453,1566,1466\r\n","pedestrian(146).jpg,2560,1707,child,1560,500,1873,1526\r\n","pedestrian(92).jpg,1086,723,child,465,269,604,672\r\n","pedestrian(92).jpg,1086,723,child,604,327,715,661\r\n","pedestrian(92).jpg,1086,723,child,729,300,859,652\r\n","pedestrian(97).jpg,259,195,child,51,15,108,191\r\n","pedestrian(97).jpg,259,195,child,135,38,179,189\r\n","pedestrian(89).jpg,500,333,child,61,73,120,262\r\n","pedestrian(89).jpg,500,333,child,145,86,202,266\r\n","pedestrian(89).jpg,500,333,child,211,77,271,268\r\n","pedestrian(89).jpg,500,333,child,277,59,340,271\r\n","pedestrian(89).jpg,500,333,child,353,70,412,270\r\n","pedestrian(99).jpg,979,652,child,87,161,222,546\r\n","pedestrian(99).jpg,979,652,child,244,116,354,551\r\n","pedestrian(99).jpg,979,652,child,412,159,552,554\r\n","pedestrian(99).jpg,979,652,child,589,149,714,566\r\n","pedestrian(91).jpg,700,434,child,145,70,263,367\r\n","pedestrian(91).jpg,700,434,child,221,56,320,408\r\n","pedestrian(91).jpg,700,434,child,304,69,405,393\r\n","pedestrian(91).jpg,700,434,child,396,54,482,406\r\n","pedestrian(91).jpg,700,434,child,536,10,695,354\r\n","pedestrian(90).jpg,400,599,child,45,98,176,537\r\n","pedestrian(90).jpg,400,599,child,186,92,332,539\r\n","pedestrian(98).jpg,259,195,child,49,15,107,195\r\n","pedestrian(98).jpg,259,195,child,135,36,188,191\r\n","pedestrian(88).jpg,1400,875,child,304,152,464,698\r\n","pedestrian(88).jpg,1400,875,child,482,295,614,748\r\n","pedestrian(88).jpg,1400,875,child,604,248,732,741\r\n","pedestrian(88).jpg,1400,875,child,725,212,875,766\r\n","pedestrian(88).jpg,1400,875,child,861,255,1050,798\r\n","pedestrian(82).jpg,1100,1540,child,347,549,613,1526\r\n","pedestrian(82).jpg,1100,1540,child,550,509,891,1540\r\n","pedestrian(62).jpg,921,1200,child,231,299,469,1054\r\n","pedestrian(62).jpg,921,1200,child,557,356,774,1014\r\n","pedestrian(83).jpg,1000,650,child,169,66,325,602\r\n","pedestrian(83).jpg,1000,650,child,300,178,415,545\r\n","pedestrian(83).jpg,1000,650,child,428,166,561,537\r\n","pedestrian(83).jpg,1000,650,child,548,143,676,566\r\n","pedestrian(83).jpg,1000,650,child,679,132,828,568\r\n","pedestrian(61).jpg,1200,838,child,409,357,546,650\r\n","pedestrian(61).jpg,1200,838,child,562,247,718,663\r\n","pedestrian(81).jpg,275,183,child,131,65,154,147\r\n","pedestrian(81).jpg,275,183,child,166,68,198,153\r\n","pedestrian(84).jpg,849,565,child,320,101,446,486\r\n","pedestrian(80).jpg,275,183,child,136,29,183,183\r\n","pedestrian(85).jpg,612,408,child,147,113,258,390\r\n","pedestrian(85).jpg,612,408,child,316,76,412,388\r\n","pedestrian(86).jpg,509,339,child,44,140,122,308\r\n","pedestrian(86).jpg,509,339,child,146,145,207,314\r\n","pedestrian(86).jpg,509,339,child,233,148,291,318\r\n","pedestrian(86).jpg,509,339,child,287,118,361,317\r\n","pedestrian(102).jpg,2716,1810,child,737,526,1158,1755\r\n","pedestrian(102).jpg,2716,1810,child,1208,426,1679,1762\r\n","pedestrian(103).jpg,626,417,child,54,37,162,368\r\n","pedestrian(103).jpg,626,417,child,169,36,307,404\r\n","pedestrian(103).jpg,626,417,child,307,20,456,415\r\n","pedestrian(103).jpg,626,417,child,496,16,617,402\r\n","pedestrian(104).jpg,640,427,child,168,157,231,353\r\n","pedestrian(104).jpg,640,427,child,350,113,437,340\r\n","pedestrian(100).jpg,420,276,child,5,20,68,262\r\n","pedestrian(100).jpg,420,276,child,64,33,130,256\r\n","pedestrian(100).jpg,420,276,child,119,43,183,264\r\n","pedestrian(100).jpg,420,276,child,188,31,310,268\r\n","pedestrian(100).jpg,420,276,child,336,34,420,257\r\n","pedestrian(105).jpg,866,1390,child,213,172,661,1232\r\n","pedestrian(108).jpg,2121,1414,child,699,845,938,1323\r\n","pedestrian(108).jpg,2121,1414,child,932,317,1282,1284\r\n","pedestrian(101).jpg,400,267,child,55,29,104,218\r\n","pedestrian(101).jpg,400,267,child,114,53,162,223\r\n","pedestrian(101).jpg,400,267,child,178,55,228,231\r\n","pedestrian(101).jpg,400,267,child,229,39,297,237\r\n","pedestrian(107).jpg,1280,720,child,566,117,746,683\r\n","pedestrian(107).jpg,1280,720,child,263,90,499,687\r\n","pedestrian(106).jpg,1024,684,child,285,102,559,657\r\n","pedestrian(106).jpg,1024,684,child,538,202,775,642\r\n","pedestrian(111).jpg,640,358,child,254,211,314,352\r\n","pedestrian(111).jpg,640,358,child,316,136,395,342\r\n","pedestrian(113).jpg,300,450,child,49,197,148,430\r\n","pedestrian(113).jpg,300,450,child,191,226,257,434\r\n","pedestrian(109).jpg,1300,956,child,273,305,423,705\r\n","pedestrian(109).jpg,1300,956,child,426,138,603,698\r\n","pedestrian(109).jpg,1300,956,child,589,371,743,681\r\n","pedestrian(110).jpg,720,480,child,384,147,527,434\r\n","pedestrian(110).jpg,720,480,child,506,213,619,443\r\n","pedestrian(116).jpg,1300,956,child,343,321,476,805\r\n","pedestrian(116).jpg,1300,956,child,449,345,599,805\r\n","pedestrian(116).jpg,1300,956,child,579,381,689,735\r\n","pedestrian(116).jpg,1300,956,child,676,308,846,765\r\n","pedestrian(117).jpg,1300,956,child,323,335,476,811\r\n","pedestrian(117).jpg,1300,956,child,443,348,579,801\r\n","pedestrian(117).jpg,1300,956,child,569,375,693,741\r\n","pedestrian(117).jpg,1300,956,child,689,318,829,728\r\n","pedestrian(112).jpg,650,433,child,166,56,291,413\r\n","pedestrian(112).jpg,650,433,child,283,113,388,391\r\n","pedestrian(115).jpg,948,632,child,200,142,346,601\r\n","pedestrian(115).jpg,948,632,child,363,123,529,572\r\n","pedestrian(114).jpg,650,433,child,105,148,226,353\r\n","pedestrian(114).jpg,650,433,child,228,121,420,424\r\n","pedestrian(114).jpg,650,433,child,435,129,585,346\r\n","pedestrian(118).jpg,1280,1920,child,466,630,806,1670\r\n","pedestrian(122).jpg,653,490,child,91,43,224,437\r\n","pedestrian(122).jpg,653,490,child,232,48,352,432\r\n","pedestrian(122).jpg,653,490,child,382,103,494,445\r\n","pedestrian(119).jpg,1300,956,child,306,335,443,798\r\n","pedestrian(119).jpg,1300,956,child,453,358,593,825\r\n","pedestrian(119).jpg,1300,956,child,593,405,709,775\r\n","pedestrian(119).jpg,1300,956,child,763,338,926,801\r\n","pedestrian(120).jpg,620,387,child,185,85,279,362\r\n","pedestrian(120).jpg,620,387,child,295,35,411,372\r\n","pedestrian(123).jpg,1300,1390,child,66,25,473,1098\r\n","pedestrian(123).jpg,1300,1390,child,626,232,996,1138\r\n","pedestrian(126).jpg,1300,956,child,46,71,316,878\r\n","pedestrian(126).jpg,1300,956,child,439,61,679,895\r\n","pedestrian(126).jpg,1300,956,child,726,131,1033,871\r\n","pedestrian(121).jpg,3700,2470,child,1100,575,1750,2085\r\n","pedestrian(124).jpg,800,533,child,194,102,300,431\r\n","pedestrian(124).jpg,800,533,child,296,113,437,433\r\n","pedestrian(125).jpg,315,315,child,39,22,143,311\r\n","pedestrian(125).jpg,315,315,child,161,29,276,310\r\n","pedestrian(127).jpg,1300,953,child,423,166,586,676\r\n","pedestrian(127).jpg,1300,953,child,589,213,746,699\r\n","pedestrian(127).jpg,1300,953,child,759,236,906,679\r\n","pedestrian(129).jpg,866,1390,child,81,365,366,1161\r\n","pedestrian(129).jpg,866,1390,child,404,292,710,1179\r\n","pedestrian(130).jpg,1000,750,child,474,195,625,654\r\n","pedestrian(130).jpg,1000,750,child,661,198,818,649\r\n","pedestrian(131).jpg,1300,956,child,493,151,696,731\r\n","pedestrian(131).jpg,1300,956,child,803,131,1023,748\r\n","pedestrian(133).jpg,1300,956,child,256,211,463,798\r\n","pedestrian(133).jpg,1300,956,child,499,165,733,791\r\n","pedestrian(128).jpg,1000,667,child,212,240,356,661\r\n","pedestrian(128).jpg,1000,667,child,382,276,489,663\r\n","pedestrian(128).jpg,1000,667,child,494,266,646,663\r\n","pedestrian(128).jpg,1000,667,child,664,269,771,666\r\n","pedestrian(136).jpg,1536,768,child,416,24,632,756\r\n","pedestrian(136).jpg,1536,768,child,668,20,896,744\r\n","pedestrian(136).jpg,1536,768,child,900,12,1142,768\r\n","pedestrian(141).jpg,1280,961,child,573,463,769,843\r\n","pedestrian(135).jpg,288,512,child,53,76,179,376\r\n","pedestrian(135).jpg,288,512,child,192,5,228,139\r\n","pedestrian(134).jpg,512,342,child,151,102,231,316\r\n","pedestrian(134).jpg,512,342,child,265,112,362,310\r\n","pedestrian(132).jpg,1300,954,child,553,180,776,764\r\n","pedestrian(132).jpg,1300,954,child,849,150,1089,804\r\n","pedestrian(143).jpg,727,545,child,102,156,227,470\r\n","pedestrian(143).jpg,727,545,child,274,170,410,462\r\n","pedestrian(143).jpg,727,545,child,412,194,535,453\r\n","pedestrian(138).jpg,1300,954,child,406,164,639,767\r\n","pedestrian(140).jpg,1500,1200,child,484,127,754,1096\r\n","pedestrian(140).jpg,1500,1200,child,738,150,984,1077\r\n","pedestrian(137).jpg,1200,800,child,225,316,371,609\r\n","pedestrian(137).jpg,1200,800,child,403,253,531,597\r\n","pedestrian(137).jpg,1200,800,child,462,322,603,647\r\n","pedestrian(137).jpg,1200,800,child,575,203,678,528\r\n","pedestrian(137).jpg,1200,800,child,693,234,943,800\r\n","pedestrian(139).jpg,702,521,child,145,142,256,517\r\n","pedestrian(139).jpg,702,521,child,252,133,358,509\r\n","pedestrian(139).jpg,702,521,child,403,187,501,517\r\n","pedestrian(142).jpg,1880,1255,child,455,577,680,1097\r\n","pedestrian(142).jpg,1880,1255,child,680,262,980,1132\r\n","pedestrian(142).jpg,1880,1255,child,1000,427,1275,1112\r\n","pedestrian(142).jpg,1880,1255,child,1320,687,1525,1167\r\n","pedestrian(144).jpg,1880,1255,child,450,577,695,1122\r\n","pedestrian(144).jpg,1880,1255,child,675,257,970,1132\r\n","pedestrian(144).jpg,1880,1255,child,1015,397,1260,1137\r\n","pedestrian(144).jpg,1880,1255,child,1315,702,1505,1152\r\n","pedestrian(145).jpg,1200,900,child,184,134,371,838\r\n","pedestrian(145).jpg,1200,900,child,362,144,584,834\r\n","pedestrian(145).jpg,1200,900,child,587,156,812,853\r\n","pedestrian(145).jpg,1200,900,child,809,184,1037,869\r\n","dark_sign18.png,451,493,long_sign,349,99,403,212\r\n","dark_sign31.png,1039,778,long_sign,743,81,978,367\r\n","dark_sign1.png,929,582,wide_sign,695,43,871,155\r\n","dark_sign43.png,684,694,long_sign,535,73,619,238\r\n","dark_sign37.png,649,625,long_sign,522,54,617,217\r\n","dark_sign45.png,1219,680,long_sign,1041,68,1166,259\r\n","dark_sign29.png,1282,739,wide_sign,837,69,1170,249\r\n","dark_sign9.png,423,565,long_sign,320,105,391,252\r\n","dark_sign21.png,1127,578,long_sign,993,24,1084,150\r\n","dark_sign8.png,783,698,long_sign,607,133,709,297\r\n","dark_sign25.png,871,707,wide_sign,646,42,819,149\r\n","dark_sign35.png,858,718,wide_sign,517,58,806,225\r\n","dark_sign12.png,718,650,wide_sign,405,65,602,182\r\n","dark_sign40.png,813,687,long_sign,654,56,779,260\r\n","dark_sign36.png,999,742,wide_sign,152,55,452,237\r\n","dark_sign34.png,1069,680,wide_sign,718,62,946,192\r\n","dark_sign44.png,895,737,wide_sign,512,82,754,235\r\n","dark_sign49.png,935,720,wide_sign,574,50,906,250\r\n","dark_sign16.png,503,741,long_sign,128,58,271,283\r\n","dark_sign47.png,1163,723,long_sign,860,52,1039,336\r\n","dark_sign13.png,826,603,wide_sign,78,71,208,169\r\n","dark_sign48.png,989,763,long_sign,827,75,935,273\r\n","dark_sign46.png,1217,717,wide_sign,902,121,1109,277\r\n","dark_sign19.png,703,693,long_sign,548,33,680,239\r\n","dark_sign42.png,582,745,long_sign,470,277,523,445\r\n","dark_sign38.png,868,782,wide_sign,209,41,594,268\r\n","dark_sign3.png,1042,766,long_sign,861,45,1021,291\r\n","dark_sign2.png,979,628,long_sign,819,47,957,262\r\n","dark_sign5.png,704,571,long_sign,600,34,682,223\r\n","dark_sign32.png,817,691,long_sign,655,60,787,266\r\n","dark_sign20.png,1136,732,long_sign,920,48,1100,362\r\n","dark_sign7.png,523,713,long_sign,357,61,468,244\r\n","dark_sign14.png,492,695,long_sign,326,81,435,288\r\n","dark_sign28.png,1374,750,long_sign,1058,39,1280,310\r\n","dark_sign15.png,910,590,long_sign,787,38,857,150\r\n","dark_sign11.png,847,518,wide_sign,660,44,782,161\r\n","dark_sign41.png,899,709,long_sign,712,59,821,254\r\n","dark_sign39.png,1049,607,long_sign,899,65,978,235\r\n","bright_sign45.png,1219,680,long_sign,1041,68,1166,259\r\n","bright_sign9.png,423,565,long_sign,320,105,391,252\r\n","bright_sign31.png,1039,778,long_sign,743,81,978,367\r\n","bright_sign40.png,813,687,long_sign,654,56,779,260\r\n","bright_sign43.png,684,694,long_sign,535,73,619,238\r\n","bright_sign27.png,755,685,long_sign,581,50,707,238\r\n","bright_sign36.png,999,742,wide_sign,152,55,452,237\r\n","bright_sign34.png,1069,680,wide_sign,718,62,946,192\r\n","bright_sign29.png,1282,739,wide_sign,837,69,1170,249\r\n","bright_sign33.png,807,711,long_sign,95,51,191,215\r\n","bright_sign44.png,895,737,wide_sign,512,82,754,235\r\n","bright_sign35.png,858,718,wide_sign,517,58,806,225\r\n","bright_sign30.png,1059,730,long_sign,840,32,1018,265\r\n","bright_sign48.png,989,763,long_sign,827,75,935,273\r\n","bright_sign47.png,1163,723,long_sign,860,52,1039,336\r\n","bright_sign6.png,652,700,long_sign,131,63,276,315\r\n","bright_sign38.png,868,782,wide_sign,209,41,594,268\r\n","bright_sign42.png,582,745,long_sign,470,277,523,445\r\n","bright_sign7.png,523,713,long_sign,357,61,468,244\r\n","bright_sign17.png,643,787,long_sign,389,66,577,336\r\n","bright_sign20.png,1136,732,long_sign,920,48,1100,362\r\n","bright_sign46.png,1217,717,wide_sign,902,121,1109,277\r\n","bright_sign16.png,503,741,long_sign,128,58,271,283\r\n","bright_sign5.png,704,571,long_sign,600,34,682,223\r\n","bright_sign4.png,721,677,long_sign,566,58,691,244\r\n","bright_sign14.png,492,695,long_sign,326,81,435,288\r\n","bright_sign41.png,899,709,long_sign,712,59,821,254\r\n","bright_sign32.png,817,691,long_sign,655,60,787,266\r\n","bright_sign18.png,451,493,long_sign,349,99,403,212\r\n","bright_sign10.png,749,644,long_sign,584,48,728,237\r\n","bright_sign23.png,900,523,long_sign,799,47,871,156\r\n","bright_sign19.png,703,693,long_sign,548,33,680,239\r\n","bright_sign8.png,783,698,long_sign,607,133,709,297\r\n","bright_sign49.png,935,720,wide_sign,574,50,906,250\r\n","bright_sign39.png,1049,607,long_sign,899,65,978,235\r\n","bright_sign28.png,1374,750,long_sign,1058,39,1280,310\r\n","bright_IMG_2146.PNG,1242,2208,wide_sign,790,741,1168,988\r\n","bright_IMG_2144.PNG,1242,2208,wide_sign,538,441,894,714\r\n","bright_IMG_2145.PNG,1242,2208,wide_sign,416,784,751,1028\r\n","bright_IMG_2154.PNG,1242,2208,wide_sign,194,865,322,951\r\n","bright_IMG_2157.PNG,1242,2208,wide_sign,542,788,781,971\r\n","bright_IMG_2158.PNG,1242,2208,wide_sign,916,719,1212,901\r\n","bright_IMG_2156.PNG,1242,2208,wide_sign,829,654,1103,849\r\n","bright_IMG_2147.PNG,1242,2208,wide_sign,807,619,1186,854\r\n","bright_IMG_2159.PNG,1242,2208,wide_sign,668,1062,899,1228\r\n","bright_IMG_2166.PNG,1242,2208,wide_sign,932,666,1183,832\r\n","bright_IMG_2164.PNG,1242,2208,wide_sign,772,655,996,811\r\n","bright_IMG_2168.PNG,1242,2208,wide_sign,616,284,1116,754\r\n","bright_IMG_2160.PNG,1242,2208,wide_sign,551,719,707,832\r\n","bright_IMG_2167.PNG,1242,2208,wide_sign,648,1017,848,1145\r\n","bright_IMG_2163.PNG,1242,2208,wide_sign,729,815,865,904\r\n","bright_IMG_2165.PNG,1242,2208,wide_sign,510,778,716,927\r\n","bright_IMG_2181.PNG,1242,2208,wide_sign,456,735,686,941\r\n","bright_IMG_2182.PNG,1242,2208,wide_sign,725,114,1090,541\r\n","bright_IMG_2177.PNG,1242,2208,wide_sign,810,614,967,739\r\n","bright_IMG_2178.PNG,1242,2208,wide_sign,518,715,666,826\r\n","bright_IMG_2179.PNG,1242,2208,wide_sign,667,366,837,507\r\n","bright_IMG_2180.PNG,1242,2208,wide_sign,877,628,1160,832\r\n","bright_IMG_2188.PNG,1242,2208,wide_sign,400,538,556,657\r\n","bright_IMG_2189.PNG,1242,2208,wide_sign,229,371,481,628\r\n","bright_IMG_2185.PNG,1242,2208,wide_sign,129,62,494,488\r\n","bright_IMG_2183.PNG,1242,2208,wide_sign,447,467,734,749\r\n","bright_IMG_2194.PNG,1242,2208,wide_sign,529,110,964,523\r\n","bright_IMG_2184.PNG,1242,2208,wide_sign,207,723,490,967\r\n","bright_IMG_2196.PNG,1242,2208,wide_sign,247,449,455,623\r\n","bright_IMG_2199.PNG,1242,2208,wide_sign,747,549,1173,888\r\n","bright_IMG_2203.PNG,1242,2208,wide_sign,529,658,681,780\r\n","bright_IMG_2197.PNG,1242,2208,wide_sign,464,93,903,458\r\n","bright_IMG_2204.PNG,1242,2208,wide_sign,394,393,673,641\r\n","bright_IMG_2200.PNG,1242,2208,wide_sign,90,636,473,919\r\n","bright_IMG_2198.PNG,1242,2208,wide_sign,542,597,890,858\r\n","bright_IMG_2207.PNG,1242,2208,wide_sign,138,693,421,932\r\n","bright_IMG_2210.PNG,1242,2208,wide_sign,794,706,986,836\r\n","bright_IMG_2206.PNG,1242,2208,wide_sign,512,784,777,962\r\n","bright_IMG_2212.PNG,1242,2208,wide_sign,394,697,573,819\r\n","bright_IMG_2227.PNG,1242,2208,wide_sign,400,802,549,914\r\n","bright_IMG_2218.PNG,1242,2208,wide_sign,181,536,425,741\r\n","bright_IMG_2222.PNG,1242,2208,wide_sign,260,106,621,462\r\n","bright_IMG_2239.PNG,1242,2208,wide_sign,599,810,873,1006\r\n","bright_IMG_2217.PNG,1242,2208,wide_sign,707,519,947,701\r\n","bright_IMG_2223.PNG,1242,2208,wide_sign,51,401,412,671\r\n","bright_IMG_2221.PNG,1242,2208,wide_sign,474,489,641,624\r\n","bright_IMG_2224.PNG,1242,2208,wide_sign,581,441,886,706\r\n","bright_IMG_2238.PNG,1242,2208,wide_sign,403,610,699,862\r\n","bright_IMG_2237.PNG,1242,2208,wide_sign,734,501,1051,714\r\n","bright_IMG_2231.PNG,1242,2208,long_sign,921,697,1038,984\r\n","bright_IMG_2235.PNG,1242,2208,wide_sign,703,801,873,923\r\n","bright_IMG_2232.PNG,1242,2208,long_sign,677,719,860,1062\r\n","bright_IMG_2262.PNG,1242,2208,wide_sign,407,454,621,614\r\n","bright_IMG_2263.PNG,1242,2208,wide_sign,416,319,742,562\r\n","bright_IMG_2245.PNG,1242,2208,wide_sign,699,362,986,736\r\n","bright_IMG_2244.PNG,1242,2208,wide_sign,255,554,494,762\r\n","bright_IMG_2242.PNG,1242,2208,wide_sign,859,743,1013,858\r\n","bright_IMG_2247.PNG,1242,2208,wide_sign,210,684,398,844\r\n","bright_IMG_2243.PNG,1242,2208,wide_sign,599,514,864,723\r\n","bright_IMG_2259.PNG,1242,2208,wide_sign,712,341,1129,688\r\n","bright_IMG_2255.PNG,1242,2208,wide_sign,770,560,981,724\r\n","bright_IMG_2258.PNG,1242,2208,wide_sign,517,542,913,826\r\n","bright_IMG_2277.PNG,1242,2208,long_sign,706,881,771,997\r\n","bright_IMG_2257.PNG,1242,2208,wide_sign,255,666,460,843\r\n","bright_IMG_2256.PNG,1242,2208,wide_sign,494,775,683,925\r\n","bright_IMG_2254.PNG,1242,2208,wide_sign,721,732,848,820\r\n","bright_IMG_2274.PNG,1242,2208,wide_sign,164,580,621,971\r\n","bright_IMG_2282.PNG,1242,2208,long_sign,501,581,729,949\r\n","bright_IMG_2264.PNG,1242,2208,wide_sign,212,201,638,584\r\n","bright_IMG_2285.PNG,1242,2208,wide_sign,573,344,808,516\r\n","bright_IMG_2269.PNG,1242,2208,wide_sign,549,429,860,654\r\n","bright_IMG_2273.PNG,1242,2208,wide_sign,292,537,568,765\r\n","bright_IMG_2298.PNG,1242,2208,wide_sign,311,428,487,562\r\n","bright_IMG_2299.PNG,1242,2208,wide_sign,303,286,626,544\r\n","bright_IMG_2280.PNG,1242,2208,wide_sign,617,641,719,721\r\n","bright_IMG_2280.PNG,1242,2208,long_sign,949,673,1038,810\r\n","bright_IMG_2278.PNG,1242,2208,long_sign,777,570,925,825\r\n","bright_IMG_2281.PNG,1242,2208,wide_sign,234,443,426,589\r\n","bright_IMG_2281.PNG,1242,2208,long_sign,864,526,991,753\r\n","bright_IMG_2292.PNG,1242,2208,wide_sign,92,394,492,692\r\n","bright_IMG_2290.PNG,1242,2208,wide_sign,394,511,600,668\r\n","bright_IMG_2305.PNG,1242,2208,wide_sign,355,551,630,770\r\n","bright_IMG_2296.PNG,1242,2208,wide_sign,811,668,992,827\r\n","bright_IMG_2291.PNG,1242,2208,wide_sign,349,419,685,642\r\n","bright_IMG_2308.PNG,1242,2208,wide_sign,362,729,503,843\r\n","bright_IMG_2295.PNG,1242,2208,wide_sign,687,342,974,575\r\n","bright_IMG_2294.PNG,1242,2208,wide_sign,832,517,967,636\r\n","bright_IMG_2323.PNG,1242,2208,wide_sign,411,234,691,455\r\n","bright_IMG_2324.PNG,1242,2208,wide_sign,292,186,664,498\r\n","bright_IMG_2315.PNG,1242,2208,wide_sign,542,136,851,397\r\n","bright_IMG_2320.PNG,1242,2208,wide_sign,621,280,947,584\r\n","bright_IMG_2303.PNG,1242,2208,wide_sign,637,384,835,516\r\n","bright_IMG_2304.PNG,1242,2208,wide_sign,720,757,863,863\r\n","bright_IMG_2322.PNG,1242,2208,wide_sign,498,592,663,716\r\n","bright_IMG_2327.PNG,1242,2208,wide_sign,794,317,1108,562\r\n","bright_IMG_2309.PNG,1242,2208,wide_sign,359,519,626,753\r\n","bright_IMG_2326.PNG,1242,2208,wide_sign,596,153,902,423\r\n","bright_IMG_2328.PNG,1242,2208,wide_sign,238,315,542,577\r\n","bright_IMG_2325.PNG,1242,2208,wide_sign,602,468,874,668\r\n","dark_IMG_2158.PNG,1242,2208,wide_sign,916,719,1212,901\r\n","dark_IMG_2142.PNG,1242,2208,wide_sign,681,541,894,697\r\n","dark_IMG_2143.PNG,1242,2208,wide_sign,442,719,638,858\r\n","dark_IMG_2144.PNG,1242,2208,wide_sign,538,441,894,714\r\n","dark_IMG_2141.PNG,1242,2208,wide_sign,647,645,786,754\r\n","dark_IMG_2157.PNG,1242,2208,wide_sign,542,788,781,971\r\n","dark_IMG_2154.PNG,1242,2208,wide_sign,194,865,322,951\r\n","dark_IMG_2170.PNG,1242,2208,long_sign,636,679,719,813\r\n","dark_IMG_2147.PNG,1242,2208,wide_sign,807,619,1186,854\r\n","dark_IMG_2168.PNG,1242,2208,wide_sign,616,284,1116,754\r\n","dark_IMG_2151.PNG,1242,2208,wide_sign,168,980,355,1106\r\n","dark_IMG_2156.PNG,1242,2208,wide_sign,829,654,1103,849\r\n","dark_IMG_2166.PNG,1242,2208,wide_sign,932,666,1183,832\r\n","dark_IMG_2163.PNG,1242,2208,wide_sign,729,815,865,904\r\n","dark_IMG_2160.PNG,1242,2208,wide_sign,551,719,707,832\r\n","dark_IMG_2165.PNG,1242,2208,wide_sign,510,778,716,927\r\n","dark_IMG_2182.PNG,1242,2208,wide_sign,725,114,1090,541\r\n","dark_IMG_2159.PNG,1242,2208,wide_sign,668,1062,899,1228\r\n","dark_IMG_2164.PNG,1242,2208,wide_sign,772,655,996,811\r\n","dark_IMG_2177.PNG,1242,2208,wide_sign,810,614,967,739\r\n","dark_IMG_2175.PNG,1242,2208,wide_sign,842,565,960,654\r\n","dark_IMG_2171.PNG,1242,2208,long_sign,712,328,903,680\r\n","dark_IMG_2181.PNG,1242,2208,wide_sign,456,735,686,941\r\n","dark_IMG_2179.PNG,1242,2208,wide_sign,667,366,837,507\r\n","dark_IMG_2194.PNG,1242,2208,wide_sign,529,110,964,523\r\n","dark_IMG_2178.PNG,1242,2208,wide_sign,518,715,666,826\r\n","dark_IMG_2172.PNG,1242,2208,long_sign,594,549,755,854\r\n","dark_IMG_2189.PNG,1242,2208,wide_sign,229,371,481,628\r\n","dark_IMG_2204.PNG,1242,2208,wide_sign,394,393,673,641\r\n","dark_IMG_2183.PNG,1242,2208,wide_sign,447,467,734,749\r\n","dark_IMG_2193.PNG,1242,2208,wide_sign,674,532,874,685\r\n","dark_IMG_2188.PNG,1242,2208,wide_sign,400,538,556,657\r\n","dark_IMG_2184.PNG,1242,2208,wide_sign,207,723,490,967\r\n","dark_IMG_2185.PNG,1242,2208,wide_sign,129,62,494,488\r\n","dark_IMG_2192.PNG,1242,2208,wide_sign,691,648,810,746\r\n","dark_IMG_2198.PNG,1242,2208,wide_sign,542,597,890,858\r\n","dark_IMG_2197.PNG,1242,2208,wide_sign,464,93,903,458\r\n","dark_IMG_2199.PNG,1242,2208,wide_sign,747,549,1173,888\r\n","dark_IMG_2215.PNG,1242,2208,wide_sign,483,507,620,620\r\n","dark_IMG_2200.PNG,1242,2208,wide_sign,90,636,473,919\r\n","dark_IMG_2195.PNG,1242,2208,wide_sign,842,497,1055,680\r\n","dark_IMG_2203.PNG,1242,2208,wide_sign,529,658,681,780\r\n","dark_IMG_2196.PNG,1242,2208,wide_sign,247,449,455,623\r\n","dark_IMG_2227.PNG,1242,2208,wide_sign,400,802,549,914\r\n","dark_IMG_2211.PNG,1242,2208,wide_sign,555,849,738,988\r\n","dark_IMG_2212.PNG,1242,2208,wide_sign,394,697,573,819\r\n","dark_IMG_2210.PNG,1242,2208,wide_sign,794,706,986,836\r\n","dark_IMG_2205.PNG,1242,2208,wide_sign,629,493,929,732\r\n","dark_IMG_2206.PNG,1242,2208,wide_sign,512,784,777,962\r\n","dark_IMG_2213.PNG,1242,2208,wide_sign,729,506,1134,784\r\n","dark_IMG_2207.PNG,1242,2208,wide_sign,138,693,421,932\r\n","dark_IMG_2217.PNG,1242,2208,wide_sign,707,519,947,701\r\n","dark_IMG_2218.PNG,1242,2208,wide_sign,181,536,425,741\r\n","dark_IMG_2239.PNG,1242,2208,wide_sign,599,810,873,1006\r\n","dark_IMG_2238.PNG,1242,2208,wide_sign,403,610,699,862\r\n","dark_IMG_2224.PNG,1242,2208,wide_sign,581,441,886,706\r\n","dark_IMG_2216.PNG,1242,2208,wide_sign,468,323,734,532\r\n","dark_IMG_2222.PNG,1242,2208,wide_sign,260,106,621,462\r\n","dark_IMG_2223.PNG,1242,2208,wide_sign,51,401,412,671\r\n","dark_IMG_2221.PNG,1242,2208,wide_sign,474,489,641,624\r\n","dark_IMG_2232.PNG,1242,2208,long_sign,677,719,860,1062\r\n","dark_IMG_2237.PNG,1242,2208,wide_sign,734,501,1051,714\r\n","dark_IMG_2236.PNG,1242,2208,wide_sign,738,619,999,801\r\n","dark_IMG_2235.PNG,1242,2208,wide_sign,703,801,873,923\r\n","dark_IMG_2229.PNG,1242,2208,wide_sign,521,584,803,780\r\n","dark_IMG_2249.PNG,1242,2208,wide_sign,256,419,489,600\r\n","dark_IMG_2251.PNG,1242,2208,wide_sign,294,262,712,701\r\n","dark_IMG_2244.PNG,1242,2208,wide_sign,255,554,494,762\r\n","dark_IMG_2228.PNG,1242,2208,wide_sign,194,514,503,719\r\n","dark_IMG_2247.PNG,1242,2208,wide_sign,210,684,398,844\r\n","dark_IMG_2245.PNG,1242,2208,wide_sign,699,362,986,736\r\n","dark_IMG_2250.PNG,1242,2208,wide_sign,556,614,754,773\r\n","dark_IMG_2263.PNG,1242,2208,wide_sign,416,319,742,562\r\n","dark_IMG_2257.PNG,1242,2208,wide_sign,255,666,460,843\r\n","dark_IMG_2243.PNG,1242,2208,wide_sign,599,514,864,723\r\n","dark_IMG_2242.PNG,1242,2208,wide_sign,859,743,1013,858\r\n","dark_IMG_2259.PNG,1242,2208,wide_sign,712,341,1129,688\r\n","dark_IMG_2262.PNG,1242,2208,wide_sign,407,454,621,614\r\n","dark_IMG_2258.PNG,1242,2208,wide_sign,517,542,913,826\r\n","dark_IMG_2273.PNG,1242,2208,wide_sign,292,537,568,765\r\n","dark_IMG_2277.PNG,1242,2208,long_sign,706,881,771,997\r\n","dark_IMG_2254.PNG,1242,2208,wide_sign,721,732,848,820\r\n","dark_IMG_2256.PNG,1242,2208,wide_sign,494,775,683,925\r\n","dark_IMG_2269.PNG,1242,2208,wide_sign,549,429,860,654\r\n","dark_IMG_2274.PNG,1242,2208,wide_sign,164,580,621,971\r\n","dark_IMG_2252.PNG,1242,2208,wide_sign,247,588,573,841\r\n","dark_IMG_2255.PNG,1242,2208,wide_sign,770,560,981,724\r\n","dark_IMG_2281.PNG,1242,2208,wide_sign,234,443,426,589\r\n","dark_IMG_2281.PNG,1242,2208,long_sign,864,526,991,753\r\n","dark_IMG_2283.PNG,1242,2208,wide_sign,516,632,855,867\r\n","dark_IMG_2268.PNG,1242,2208,wide_sign,592,485,811,670\r\n","dark_IMG_2264.PNG,1242,2208,wide_sign,212,201,638,584\r\n","dark_IMG_2282.PNG,1242,2208,long_sign,501,581,729,949\r\n","dark_IMG_2280.PNG,1242,2208,wide_sign,617,641,719,721\r\n","dark_IMG_2280.PNG,1242,2208,long_sign,949,673,1038,810\r\n","dark_IMG_2289.PNG,1242,2208,wide_sign,432,411,622,556\r\n","dark_IMG_2285.PNG,1242,2208,wide_sign,573,344,808,516\r\n","dark_IMG_2302.PNG,1242,2208,wide_sign,424,513,559,646\r\n","dark_IMG_2294.PNG,1242,2208,wide_sign,832,517,967,636\r\n","dark_IMG_2296.PNG,1242,2208,wide_sign,811,668,992,827\r\n","dark_IMG_2284.PNG,1242,2208,wide_sign,665,579,832,687\r\n","dark_IMG_2295.PNG,1242,2208,wide_sign,687,342,974,575\r\n","dark_IMG_2292.PNG,1242,2208,wide_sign,92,394,492,692\r\n","dark_IMG_2278.PNG,1242,2208,long_sign,777,570,925,825\r\n","dark_IMG_2291.PNG,1242,2208,wide_sign,349,419,685,642\r\n","dark_IMG_2308.PNG,1242,2208,wide_sign,362,729,503,843\r\n","dark_IMG_2324.PNG,1242,2208,wide_sign,292,186,664,498\r\n","dark_IMG_2303.PNG,1242,2208,wide_sign,637,384,835,516\r\n","dark_IMG_2304.PNG,1242,2208,wide_sign,720,757,863,863\r\n","dark_IMG_2290.PNG,1242,2208,wide_sign,394,511,600,668\r\n","dark_IMG_2305.PNG,1242,2208,wide_sign,355,551,630,770\r\n","dark_IMG_2320.PNG,1242,2208,wide_sign,621,280,947,584\r\n","dark_IMG_2309.PNG,1242,2208,wide_sign,359,519,626,753\r\n","dark_IMG_2315.PNG,1242,2208,wide_sign,542,136,851,397\r\n","dark_IMG_2323.PNG,1242,2208,wide_sign,411,234,691,455\r\n","dark_IMG_2311.PNG,1242,2208,wide_sign,336,479,517,613\r\n","dark_IMG_2322.PNG,1242,2208,wide_sign,498,592,663,716\r\n","dark_IMG_2318.PNG,1242,2208,wide_sign,765,215,963,383\r\n","dark_IMG_2327.PNG,1242,2208,wide_sign,794,317,1108,562\r\n","dark_IMG_2328.PNG,1242,2208,wide_sign,238,315,542,577\r\n","dark_IMG_2326.PNG,1242,2208,wide_sign,596,153,902,423\r\n","dark_IMG_2325.PNG,1242,2208,wide_sign,602,468,874,668\r\n","dark_IMG_2329.PNG,1242,2208,wide_sign,639,596,781,717\r\n","wide_sign(101).jpg,640,480,wide_sign,37,17,635,432\r\n","long_sign(1).jpg,499,331,long_sign,248,126,293,214\r\n","long_sign(4).jpg,699,393,long_sign,348,71,441,213\r\n","long_sign(3).jpg,560,420,long_sign,253,70,491,353\r\n","long_sign(5).jpg,546,819,long_sign,162,28,454,502\r\n","long_sign(2).jpg,560,364,long_sign,355,8,485,236\r\n","long_sign(8).jpg,580,326,long_sign,209,70,328,244\r\n","long_sign(10).jpg,650,650,long_sign,224,194,414,462\r\n","long_sign(9).jpg,580,434,long_sign,327,94,399,211\r\n","long_sign(12).jpg,433,650,long_sign,1,45,431,582\r\n","long_sign(11).jpg,650,650,long_sign,176,82,496,537\r\n","long_sign(14).jpg,970,722,long_sign,69,157,260,515\r\n","long_sign(14).jpg,970,722,long_sign,350,325,399,431\r\n","long_sign(19).jpg,236,330,long_sign,129,1,235,174\r\n","long_sign(17).jpg,583,327,long_sign,255,62,347,215\r\n","long_sign(18).jpg,583,327,long_sign,336,78,425,213\r\n","long_sign(16).jpg,630,481,long_sign,344,2,619,481\r\n","long_sign(15).jpg,600,400,long_sign,469,128,540,385\r\n","long_sign(15).jpg,600,400,wide_sign,68,58,239,195\r\n","long_sign(21).jpg,583,327,long_sign,244,79,339,230\r\n","long_sign(22).jpg,683,300,long_sign,437,72,515,238\r\n","long_sign(20).jpg,583,327,long_sign,333,32,401,151\r\n","long_sign(23).jpg,330,495,long_sign,34,83,295,457\r\n","long_sign(26).jpg,550,367,long_sign,3,65,262,361\r\n","long_sign(24).jpg,580,387,long_sign,163,21,499,343\r\n","long_sign(25).jpg,550,361,long_sign,183,48,359,317\r\n","long_sign(28).jpg,1312,1227,long_sign,718,186,929,617\r\n","long_sign(27).jpg,550,367,long_sign,265,174,290,216\r\n","long_sign(30).jpg,1223,819,long_sign,556,158,763,533\r\n","long_sign(29).jpg,600,423,long_sign,480,109,594,290\r\n","long_sign(32).jpg,970,722,long_sign,69,163,269,513\r\n","long_sign(31).jpg,1223,746,long_sign,687,144,852,320\r\n","long_sign(33).jpg,600,450,long_sign,325,52,547,367\r\n","long_sign(37).jpg,600,401,long_sign,176,57,248,162\r\n","long_sign(35).jpg,600,400,long_sign,225,5,266,79\r\n","long_sign(36).jpg,600,400,long_sign,90,1,198,175\r\n","long_sign(34).jpg,550,367,long_sign,240,63,310,207\r\n","long_sign(38).jpg,600,361,long_sign,62,86,187,266\r\n","long_sign(38).jpg,600,361,long_sign,438,97,512,212\r\n","long_sign(40).jpg,600,400,long_sign,200,16,443,362\r\n","long_sign(39).jpg,500,297,long_sign,57,40,175,238\r\n","long_sign(39).jpg,500,297,long_sign,316,26,457,263\r\n","long_sign(45).jpg,690,443,long_sign,405,80,544,353\r\n","long_sign(43).jpg,680,382,long_sign,286,69,391,244\r\n","long_sign(44).jpg,530,362,long_sign,282,9,362,143\r\n","long_sign(42).jpg,960,540,long_sign,381,98,585,406\r\n","long_sign(41).jpg,300,225,long_sign,109,47,185,150\r\n","long_sign(46).jpg,600,1066,long_sign,319,588,427,778\r\n","long_sign(48).jpg,600,902,long_sign,258,128,596,765\r\n","long_sign(49).jpg,600,337,long_sign,242,50,348,211\r\n","long_sign(47).jpg,640,360,long_sign,239,7,416,343\r\n","long_sign(51).jpg,600,360,long_sign,51,88,129,217\r\n","long_sign(51).jpg,600,360,wide_sign,138,2,244,67\r\n","long_sign(50).jpg,400,602,long_sign,267,1,400,264\r\n","long_sign(52).jpg,550,825,long_sign,357,89,473,370\r\n","long_sign(56).jpg,600,338,long_sign,109,2,155,84\r\n","long_sign(53).jpg,790,1237,long_sign,521,462,637,674\r\n","long_sign(53).jpg,790,1237,wide_sign,178,169,480,405\r\n","long_sign(55).jpg,680,498,long_sign,87,123,231,370\r\n","long_sign(55).jpg,680,498,long_sign,443,142,575,354\r\n","long_sign(54).jpg,1500,1139,long_sign,36,11,318,493\r\n","long_sign(58).jpg,480,448,long_sign,180,84,254,243\r\n","long_sign(57).jpg,300,452,long_sign,61,5,252,306\r\n","long_sign(59).jpg,720,530,long_sign,499,28,669,295\r\n","long_sign(63).jpg,348,236,long_sign,162,37,202,121\r\n","long_sign(62).jpg,803,558,long_sign,36,11,369,526\r\n","long_sign(61).jpg,620,349,long_sign,91,1,260,250\r\n","long_sign(60).jpg,290,237,long_sign,111,62,222,177\r\n","long_sign(66).jpg,300,329,long_sign,89,31,216,268\r\n","long_sign(64).jpg,500,334,long_sign,205,100,312,255\r\n","long_sign(65).jpg,302,540,long_sign,110,96,228,336\r\n","long_sign(67).jpg,601,401,long_sign,310,7,458,244\r\n","long_sign(68).jpg,318,240,long_sign,119,51,199,155\r\n","long_sign(72).jpg,600,400,long_sign,64,42,233,283\r\n","long_sign(72).jpg,600,400,long_sign,422,75,524,249\r\n","long_sign(69).jpg,580,435,long_sign,381,193,454,302\r\n","long_sign(69).jpg,580,435,wide_sign,189,67,299,155\r\n","long_sign(71).jpg,700,394,long_sign,282,46,408,249\r\n","long_sign(70).jpg,600,402,long_sign,150,14,415,402\r\n","long_sign(73).jpg,680,473,long_sign,39,6,136,168\r\n","long_sign(73).jpg,680,473,long_sign,254,53,314,160\r\n","long_sign(75).jpg,680,420,long_sign,103,27,185,174\r\n","long_sign(74).jpg,680,415,wide_sign,118,40,254,150\r\n","long_sign(74).jpg,680,415,long_sign,547,269,609,373\r\n","long_sign(79).jpg,1203,675,long_sign,507,243,650,499\r\n","long_sign(76).jpg,500,281,long_sign,336,3,427,141\r\n","long_sign(78).jpg,818,532,long_sign,228,6,553,460\r\n","long_sign(77).jpg,740,493,long_sign,438,38,667,376\r\n","long_sign(84).jpg,655,435,long_sign,263,96,344,232\r\n","long_sign(82).jpg,600,407,long_sign,412,14,549,238\r\n","long_sign(80).jpg,821,627,long_sign,539,105,713,353\r\n","long_sign(81).jpg,550,550,long_sign,418,30,472,131\r\n","long_sign(83).jpg,800,1020,long_sign,343,66,627,583\r\n","long_sign(88).jpg,560,756,long_sign,309,23,543,465\r\n","long_sign(87).jpg,997,468,long_sign,594,1,768,363\r\n","long_sign(86).jpg,596,637,long_sign,141,76,359,433\r\n","long_sign(85).jpg,500,350,long_sign,28,8,224,337\r\n","long_sign(85).jpg,500,350,long_sign,279,8,481,332\r\n","long_sign(89).jpg,318,424,long_sign,55,44,263,369\r\n","long_sign(90).jpg,540,960,long_sign,106,112,493,795\r\n","long_sign(95).jpg,640,853,long_sign,191,432,396,707\r\n","long_sign(93).jpg,759,524,long_sign,258,45,598,405\r\n","long_sign(92).jpg,662,565,long_sign,242,20,474,535\r\n","long_sign(94).jpg,650,487,long_sign,227,45,316,234\r\n","long_sign(94).jpg,650,487,long_sign,322,211,373,279\r\n","long_sign(91).jpg,1000,667,long_sign,560,166,730,498\r\n","long_sign(91).jpg,1000,667,wide_sign,221,16,407,153\r\n","long_sign(96).jpg,4032,3024,long_sign,1585,1158,2591,1708\r\n","long_sign(97).jpg,4032,3024,long_sign,2222,964,3635,1839\r\n","long_sign(102).jpg,600,300,long_sign,109,11,187,127\r\n","long_sign(102).jpg,600,300,long_sign,420,9,484,130\r\n","long_sign(98).jpg,4032,3024,long_sign,1091,1758,1691,2283\r\n","long_sign(101).jpg,500,375,long_sign,149,16,399,330\r\n","long_sign(99).jpg,700,526,long_sign,188,84,304,368\r\n","long_sign(100).jpg,400,559,long_sign,31,15,172,276\r\n","wide_sign(1).jpg,818,532,wide_sign,199,84,586,386\r\n","wide_sign(2).jpg,441,293,wide_sign,36,20,406,258\r\n","wide_sign(4).jpg,600,450,wide_sign,108,96,444,309\r\n","wide_sign(5).jpg,600,678,wide_sign,191,162,333,280\r\n","wide_sign(6).jpg,790,1237,wide_sign,178,177,485,408\r\n","wide_sign(7).jpg,600,400,wide_sign,285,48,410,138\r\n","wide_sign(3).jpg,345,221,wide_sign,104,42,273,150\r\n","wide_sign(9).jpg,640,476,wide_sign,212,129,386,231\r\n","wide_sign(8).jpg,1174,724,wide_sign,53,90,836,583\r\n","wide_sign(11).jpg,980,653,wide_sign,126,165,443,439\r\n","wide_sign(12).jpg,600,362,wide_sign,139,1,247,68\r\n","wide_sign(10).jpg,600,370,wide_sign,64,85,379,306\r\n","wide_sign(13).jpg,600,400,wide_sign,324,24,442,108\r\n","wide_sign(14).jpg,700,525,wide_sign,275,71,399,166\r\n","wide_sign(16).jpg,790,1237,wide_sign,180,179,478,386\r\n","wide_sign(17).jpg,640,509,wide_sign,235,1,436,133\r\n","wide_sign(17).jpg,640,509,wide_sign,96,216,174,277\r\n","wide_sign(15).jpg,511,304,wide_sign,10,1,481,271\r\n","wide_sign(19).jpg,500,745,wide_sign,219,300,390,423\r\n","wide_sign(21).jpg,510,340,wide_sign,275,23,374,92\r\n","wide_sign(20).jpg,600,736,wide_sign,141,121,351,278\r\n","wide_sign(18).jpg,580,327,wide_sign,253,5,368,82\r\n","wide_sign(24).jpg,392,551,wide_sign,80,52,222,161\r\n","wide_sign(23).jpg,700,400,wide_sign,138,74,529,305\r\n","wide_sign(22).jpg,580,435,wide_sign,193,69,301,156\r\n","wide_sign(27).jpg,960,720,wide_sign,508,88,632,186\r\n","wide_sign(26).jpg,1280,720,wide_sign,253,134,918,659\r\n","wide_sign(25).jpg,1400,1050,wide_sign,398,170,623,338\r\n","wide_sign(28).jpg,4128,2322,wide_sign,445,76,3233,1970\r\n","wide_sign(31).jpg,900,675,wide_sign,198,251,578,481\r\n","wide_sign(30).jpg,900,675,wide_sign,152,197,646,481\r\n","wide_sign(29).jpg,600,450,wide_sign,289,19,413,110\r\n","wide_sign(32).jpg,900,437,wide_sign,550,152,639,211\r\n","wide_sign(35).jpg,300,200,wide_sign,149,4,216,44\r\n","wide_sign(33).jpg,900,675,wide_sign,50,142,428,508\r\n","wide_sign(36).jpg,680,415,wide_sign,109,37,256,147\r\n","wide_sign(34).jpg,267,172,wide_sign,4,28,248,171\r\n","wide_sign(37).jpg,1280,720,wide_sign,168,24,1035,688\r\n","wide_sign(38).jpg,820,493,wide_sign,482,20,707,175\r\n","wide_sign(39).jpg,530,298,wide_sign,108,21,405,274\r\n","wide_sign(42).jpg,500,375,wide_sign,8,3,500,363\r\n","wide_sign(40).jpg,740,555,wide_sign,304,65,564,249\r\n","wide_sign(41).jpg,500,342,wide_sign,39,57,437,279\r\n","wide_sign(43).jpg,648,392,wide_sign,33,18,616,379\r\n","wide_sign(46).jpg,600,450,wide_sign,180,92,458,295\r\n","wide_sign(44).jpg,740,555,wide_sign,103,5,297,159\r\n","wide_sign(47).jpg,818,532,wide_sign,184,106,591,396\r\n","wide_sign(45).jpg,740,986,wide_sign,246,184,592,452\r\n","wide_sign(49).jpg,540,304,wide_sign,81,5,460,252\r\n","wide_sign(48).jpg,818,532,wide_sign,180,59,633,463\r\n","wide_sign(51).jpg,533,400,wide_sign,198,139,374,265\r\n","wide_sign(50).jpg,745,400,wide_sign,160,20,636,351\r\n","wide_sign(53).jpg,640,400,wide_sign,35,52,620,336\r\n","wide_sign(52).jpg,711,400,wide_sign,78,23,622,389\r\n","wide_sign(55).jpg,533,400,wide_sign,172,62,280,146\r\n","wide_sign(59).jpg,450,300,wide_sign,105,76,359,230\r\n","wide_sign(58).jpg,720,540,wide_sign,156,145,452,409\r\n","wide_sign(56).jpg,599,400,wide_sign,365,68,480,175\r\n","wide_sign(57).jpg,695,400,wide_sign,221,27,474,212\r\n","wide_sign(54).jpg,533,400,wide_sign,143,92,390,250\r\n","wide_sign(61).jpg,500,500,wide_sign,38,33,469,473\r\n","wide_sign(60).jpg,960,1280,wide_sign,487,243,662,371\r\n","wide_sign(63).jpg,650,487,wide_sign,384,110,451,183\r\n","wide_sign(62).jpg,500,687,wide_sign,284,55,470,213\r\n","wide_sign(65).jpg,5184,3456,wide_sign,1642,624,4525,2590\r\n","wide_sign(64).jpg,550,323,wide_sign,86,8,179,66\r\n","wide_sign(67).jpg,1490,1049,wide_sign,322,230,1038,771\r\n","wide_sign(66).jpg,800,450,wide_sign,260,189,521,346\r\n","wide_sign(69).jpg,720,960,wide_sign,167,169,358,327\r\n","wide_sign(68).jpg,5184,3456,wide_sign,642,532,1958,1524\r\n","wide_sign(71).jpg,740,900,wide_sign,61,206,473,453\r\n","wide_sign(73).jpg,960,720,wide_sign,510,79,636,189\r\n","wide_sign(70).jpg,886,506,wide_sign,453,29,772,338\r\n","wide_sign(72).jpg,344,622,wide_sign,28,61,225,196\r\n","wide_sign(75).jpg,420,386,wide_sign,43,90,350,298\r\n","wide_sign(77).jpg,700,400,wide_sign,136,77,530,321\r\n","wide_sign(76).jpg,187,269,wide_sign,86,5,157,60\r\n","wide_sign(74).jpg,896,748,wide_sign,281,208,576,457\r\n","wide_sign(78).jpg,1440,1080,wide_sign,734,569,982,736\r\n","wide_sign(79).jpg,3024,4032,wide_sign,978,1120,1970,1962\r\n","wide_sign(80).jpg,494,318,wide_sign,19,18,477,300\r\n","wide_sign(84).jpg,640,480,wide_sign,216,75,343,172\r\n","wide_sign(83).jpg,500,308,wide_sign,32,40,355,246\r\n","wide_sign(82).jpg,500,366,wide_sign,113,104,369,315\r\n","wide_sign(81).jpg,800,600,wide_sign,274,94,453,182\r\n","wide_sign(88).jpg,600,450,wide_sign,87,71,447,318\r\n","wide_sign(85).jpg,900,900,wide_sign,271,369,608,604\r\n","wide_sign(92).jpg,512,332,wide_sign,98,45,383,227\r\n","wide_sign(91).jpg,617,300,wide_sign,200,57,369,183\r\n","wide_sign(93).jpg,512,342,wide_sign,87,77,409,279\r\n","wide_sign(89).jpg,1200,627,wide_sign,103,23,970,592\r\n","wide_sign(90).jpg,450,580,wide_sign,139,50,318,176\r\n","wide_sign(95).jpg,512,384,wide_sign,115,100,324,300\r\n","wide_sign(97).jpg,600,400,wide_sign,12,74,188,202\r\n","wide_sign(94).jpg,300,185,wide_sign,43,22,261,169\r\n","wide_sign(96).jpg,716,524,wide_sign,1,4,689,456\r\n","wide_sign(99).jpg,886,506,wide_sign,176,61,653,420\r\n","wide_sign(98).jpg,600,400,wide_sign,71,57,241,190\r\n","long_sign(103).jpg,451,660,long_sign,115,124,336,478\r\n","wide_sign(100).jpg,400,372,wide_sign,94,75,153,115\r\n","dark_long_sign(36).jpg,600,400,long_sign,90,1,198,175\r\n","dark_long_sign(45).jpg,690,443,long_sign,405,80,544,353\r\n","dark_long_sign(38).jpg,600,361,long_sign,62,86,187,266\r\n","dark_long_sign(38).jpg,600,361,long_sign,438,97,512,212\r\n","dark_long_sign(31).jpg,1223,746,long_sign,687,144,852,320\r\n","dark_long_sign(37).jpg,600,401,long_sign,176,57,248,162\r\n","dark_long_sign(42).jpg,960,540,long_sign,381,98,585,406\r\n","dark_long_sign(33).jpg,600,450,long_sign,325,52,547,367\r\n","dark_long_sign(48).jpg,600,902,long_sign,258,128,596,765\r\n","dark_long_sign(34).jpg,550,367,long_sign,240,63,310,207\r\n","dark_long_sign(44).jpg,530,362,long_sign,282,9,362,143\r\n","dark_long_sign(32).jpg,970,722,long_sign,69,163,269,513\r\n","dark_long_sign(39).jpg,500,297,long_sign,57,40,175,238\r\n","dark_long_sign(39).jpg,500,297,long_sign,316,26,457,263\r\n","dark_long_sign(47).jpg,640,360,long_sign,239,7,416,343\r\n","dark_long_sign(49).jpg,600,337,long_sign,242,50,348,211\r\n","dark_long_sign(97).jpg,3024,4032,long_sign,2222,964,3635,1839\r\n","dark_long_sign(35).jpg,600,400,long_sign,225,5,266,79\r\n","dark_long_sign(43).jpg,680,382,long_sign,286,69,391,244\r\n","dark_long_sign(40).jpg,600,400,long_sign,200,16,443,362\r\n","dark_long_sign(46).jpg,600,1066,long_sign,319,588,427,778\r\n","dark_long_sign(65).jpg,302,540,long_sign,110,96,228,336\r\n","dark_long_sign(62).jpg,803,558,long_sign,36,11,369,526\r\n","dark_long_sign(28).jpg,1312,1227,long_sign,718,186,929,617\r\n","dark_long_sign(15).jpg,600,400,long_sign,469,128,540,385\r\n","dark_long_sign(15).jpg,600,400,wide_sign,68,58,239,195\r\n","dark_long_sign(67).jpg,601,401,long_sign,310,7,458,244\r\n","dark_wide_sign(68).jpg,5184,3456,wide_sign,642,532,1958,1524\r\n","dark_long_sign(96).jpg,3024,4032,long_sign,1585,1158,2591,1708\r\n","dark_long_sign(12).jpg,433,650,long_sign,1,45,431,582\r\n","dark_long_sign(21).jpg,583,327,long_sign,244,79,339,230\r\n","dark_long_sign(22).jpg,683,300,long_sign,437,72,515,238\r\n","dark_long_sign(14).jpg,970,722,long_sign,69,157,260,515\r\n","dark_long_sign(14).jpg,970,722,long_sign,350,325,399,431\r\n","dark_long_sign(24).jpg,580,387,long_sign,163,21,499,343\r\n","dark_long_sign(69).jpg,580,435,long_sign,381,193,454,302\r\n","dark_long_sign(69).jpg,580,435,wide_sign,189,67,299,155\r\n","dark_long_sign(20).jpg,583,327,long_sign,333,32,401,151\r\n","dark_long_sign(30).jpg,1223,819,long_sign,556,158,763,533\r\n","dark_long_sign(29).jpg,600,423,long_sign,480,109,594,290\r\n","dark_long_sign(64).jpg,500,334,long_sign,205,100,312,255\r\n","dark_long_sign(70).jpg,600,402,long_sign,150,14,415,402\r\n","dark_long_sign(17).jpg,583,327,long_sign,255,62,347,215\r\n","dark_long_sign(60).jpg,290,237,long_sign,111,62,222,177\r\n","dark_long_sign(63).jpg,348,236,long_sign,162,37,202,121\r\n","dark_long_sign(23).jpg,330,495,long_sign,34,83,295,457\r\n","dark_long_sign(16).jpg,630,481,long_sign,344,2,619,481\r\n","dark_long_sign(68).jpg,318,240,long_sign,119,51,199,155\r\n","dark_long_sign(66).jpg,300,329,long_sign,89,31,216,268\r\n","dark_long_sign(10).jpg,650,650,long_sign,224,194,414,462\r\n","dark_long_sign(61).jpg,620,349,long_sign,91,1,260,250\r\n","dark_long_sign(19).jpg,236,330,long_sign,129,1,235,174\r\n","dark_long_sign(11).jpg,650,650,long_sign,176,82,496,537\r\n","dark_long_sign(9).jpg,580,434,long_sign,327,94,399,211\r\n","dark_wide_sign(28).jpg,4128,2322,wide_sign,445,76,3233,1970\r\n","dark_long_sign(59).jpg,720,530,long_sign,499,28,669,295\r\n","dark_wide_sign(65).jpg,5184,3456,wide_sign,1642,624,4525,2590\r\n","dark_long_sign(57).jpg,300,452,long_sign,61,5,252,306\r\n","dark_long_sign(55).jpg,680,498,long_sign,87,123,231,370\r\n","dark_long_sign(55).jpg,680,498,long_sign,443,142,575,354\r\n","dark_wide_sign(14).jpg,700,525,wide_sign,275,71,399,166\r\n","dark_long_sign(56).jpg,600,338,long_sign,109,2,155,84\r\n","dark_wide_sign(19).jpg,500,745,wide_sign,219,300,390,423\r\n","dark_long_sign(53).jpg,790,1237,long_sign,521,462,637,674\r\n","dark_long_sign(53).jpg,790,1237,wide_sign,178,169,480,405\r\n","dark_long_sign(54).jpg,1500,1139,long_sign,36,11,318,493\r\n","dark_long_sign(51).jpg,600,360,long_sign,51,88,129,217\r\n","dark_long_sign(51).jpg,600,360,wide_sign,138,2,244,67\r\n","dark_wide_sign(18).jpg,580,327,wide_sign,253,5,368,82\r\n","dark_wide_sign(12).jpg,600,362,wide_sign,139,1,247,68\r\n","dark_long_sign(98).jpg,3024,4032,long_sign,1091,1758,1691,2283\r\n","dark_wide_sign(17).jpg,640,509,wide_sign,235,1,436,133\r\n","dark_wide_sign(17).jpg,640,509,wide_sign,96,216,174,277\r\n","dark_wide_sign(11).jpg,980,653,wide_sign,126,165,443,439\r\n","dark_long_sign(50).jpg,400,602,long_sign,267,1,400,264\r\n","dark_long_sign(58).jpg,480,448,long_sign,180,84,254,243\r\n","dark_wide_sign(16).jpg,790,1237,wide_sign,180,179,478,386\r\n","dark_long_sign(52).jpg,550,825,long_sign,357,89,473,370\r\n","dark_long_sign(101).jpg,500,375,long_sign,149,16,399,330\r\n","dark_long_sign(82).jpg,600,407,long_sign,412,14,549,238\r\n","dark_long_sign(84).jpg,655,435,long_sign,263,96,344,232\r\n","dark_wide_sign(79).jpg,3024,4032,wide_sign,978,1120,1970,1962\r\n","dark_wide_sign(35).jpg,300,200,wide_sign,149,4,216,44\r\n","dark_wide_sign(6).jpg,790,1237,wide_sign,178,177,485,408\r\n","dark_long_sign(78).jpg,818,532,long_sign,228,6,553,460\r\n","dark_long_sign(74).jpg,680,415,wide_sign,118,40,254,150\r\n","dark_long_sign(74).jpg,680,415,long_sign,547,269,609,373\r\n","dark_long_sign(89).jpg,318,424,long_sign,55,44,263,369\r\n","dark_long_sign(77).jpg,740,493,long_sign,438,38,667,376\r\n","dark_long_sign(73).jpg,680,473,long_sign,39,6,136,168\r\n","dark_long_sign(73).jpg,680,473,long_sign,254,53,314,160\r\n","dark_long_sign(79).jpg,1203,675,long_sign,507,243,650,499\r\n","dark_long_sign(83).jpg,800,1020,long_sign,343,66,627,583\r\n","dark_long_sign(88).jpg,560,756,long_sign,309,23,543,465\r\n","dark_long_sign(87).jpg,997,468,long_sign,594,1,768,363\r\n","dark_wide_sign(36).jpg,680,415,wide_sign,109,37,256,147\r\n","dark_long_sign(71).jpg,700,394,long_sign,282,46,408,249\r\n","dark_wide_sign(10).jpg,600,370,wide_sign,64,85,379,306\r\n","dark_long_sign(95).jpg,640,853,long_sign,191,432,396,707\r\n","dark_long_sign(72).jpg,600,400,long_sign,64,42,233,283\r\n","dark_long_sign(72).jpg,600,400,long_sign,422,75,524,249\r\n","dark_long_sign(85).jpg,500,350,long_sign,28,8,224,337\r\n","dark_long_sign(85).jpg,500,350,long_sign,279,8,481,332\r\n","dark_long_sign(92).jpg,662,565,long_sign,242,20,474,535\r\n","dark_long_sign(90).jpg,540,960,long_sign,106,112,493,795\r\n","dark_long_sign(103).jpg,451,660,long_sign,115,124,336,478\r\n","dark_wide_sign(7).jpg,600,400,wide_sign,285,48,410,138\r\n","dark_long_sign(99).jpg,700,526,long_sign,188,84,304,368\r\n","dark_wide_sign(1).jpg,818,532,wide_sign,199,84,586,386\r\n","dark_wide_sign(15).jpg,511,304,wide_sign,10,1,481,271\r\n","dark_wide_sign(32).jpg,900,437,wide_sign,550,152,639,211\r\n","dark_wide_sign(2).jpg,441,293,wide_sign,36,20,406,258\r\n","dark_long_sign(102).jpg,600,300,long_sign,109,11,187,127\r\n","dark_long_sign(102).jpg,600,300,long_sign,420,9,484,130\r\n","dark_long_sign(91).jpg,1000,667,long_sign,560,166,730,498\r\n","dark_long_sign(91).jpg,1000,667,wide_sign,221,16,407,153\r\n","dark_wide_sign(8).jpg,1174,724,wide_sign,53,90,836,583\r\n","dark_long_sign(75).jpg,680,420,long_sign,103,27,185,174\r\n","dark_wide_sign(13).jpg,600,400,wide_sign,324,24,442,108\r\n","dark_long_sign(76).jpg,500,281,long_sign,336,3,427,141\r\n","dark_wide_sign(37).jpg,1280,720,wide_sign,168,24,1035,688\r\n","dark_wide_sign(4).jpg,600,450,wide_sign,108,96,444,309\r\n","dark_wide_sign(9).jpg,640,476,wide_sign,212,129,386,231\r\n","dark_long_sign(81).jpg,550,550,long_sign,418,30,472,131\r\n","dark_wide_sign(33).jpg,900,675,wide_sign,50,142,428,508\r\n","dark_wide_sign(3).jpg,345,221,wide_sign,104,42,273,150\r\n","dark_long_sign(93).jpg,759,524,long_sign,258,45,598,405\r\n","dark_long_sign(86).jpg,596,637,long_sign,141,76,359,433\r\n","dark_long_sign(80).jpg,821,627,long_sign,539,105,713,353\r\n","dark_long_sign(94).jpg,650,487,long_sign,227,45,316,234\r\n","dark_long_sign(94).jpg,650,487,long_sign,322,211,373,279\r\n","dark_long_sign(100).jpg,400,559,long_sign,31,15,172,276\r\n","dark_wide_sign(22).jpg,580,435,wide_sign,193,69,301,156\r\n","dark_wide_sign(27).jpg,960,720,wide_sign,508,88,632,186\r\n","dark_wide_sign(82).jpg,500,366,wide_sign,113,104,369,315\r\n","dark_wide_sign(74).jpg,896,748,wide_sign,281,208,576,457\r\n","dark_wide_sign(21).jpg,510,340,wide_sign,275,23,374,92\r\n","dark_wide_sign(69).jpg,720,960,wide_sign,167,169,358,327\r\n","dark_wide_sign(38).jpg,820,493,wide_sign,482,20,707,175\r\n","dark_long_sign(2).jpg,560,364,long_sign,355,8,485,236\r\n","dark_wide_sign(39).jpg,530,298,wide_sign,108,21,405,274\r\n","dark_wide_sign(48).jpg,818,532,wide_sign,180,59,633,463\r\n","dark_wide_sign(47).jpg,818,532,wide_sign,184,106,591,396\r\n","dark_wide_sign(53).jpg,640,400,wide_sign,35,52,620,336\r\n","dark_wide_sign(23).jpg,700,400,wide_sign,138,74,529,305\r\n","dark_wide_sign(49).jpg,540,304,wide_sign,81,5,460,252\r\n","dark_wide_sign(77).jpg,700,400,wide_sign,136,77,530,321\r\n","dark_wide_sign(42).jpg,500,375,wide_sign,8,3,500,363\r\n","dark_wide_sign(43).jpg,648,392,wide_sign,33,18,616,379\r\n","dark_wide_sign(55).jpg,533,400,wide_sign,172,62,280,146\r\n","dark_wide_sign(99).jpg,886,506,wide_sign,176,61,653,420\r\n","dark_wide_sign(20).jpg,600,736,wide_sign,141,121,351,278\r\n","dark_wide_sign(52).jpg,711,400,wide_sign,78,23,622,389\r\n","dark_wide_sign(76).jpg,187,269,wide_sign,86,5,157,60\r\n","dark_wide_sign(31).jpg,900,675,wide_sign,198,251,578,481\r\n","dark_wide_sign(66).jpg,800,450,wide_sign,260,189,521,346\r\n","dark_wide_sign(24).jpg,392,551,wide_sign,80,52,222,161\r\n","dark_wide_sign(58).jpg,720,540,wide_sign,156,145,452,409\r\n","dark_wide_sign(34).jpg,267,172,wide_sign,4,28,248,171\r\n","dark_wide_sign(46).jpg,600,450,wide_sign,180,92,458,295\r\n","dark_wide_sign(80).jpg,494,318,wide_sign,19,18,477,300\r\n","dark_wide_sign(44).jpg,740,555,wide_sign,103,5,297,159\r\n","dark_wide_sign(67).jpg,1490,1049,wide_sign,322,230,1038,771\r\n","dark_wide_sign(61).jpg,500,500,wide_sign,38,33,469,473\r\n","dark_wide_sign(72).jpg,344,622,wide_sign,28,61,225,196\r\n","dark_wide_sign(78).jpg,1440,1080,wide_sign,734,569,982,736\r\n","dark_wide_sign(64).jpg,550,323,wide_sign,86,8,179,66\r\n","dark_wide_sign(45).jpg,740,986,wide_sign,246,184,592,452\r\n","dark_wide_sign(26).jpg,1280,720,wide_sign,253,134,918,659\r\n","dark_wide_sign(83).jpg,500,308,wide_sign,32,40,355,246\r\n","dark_wide_sign(56).jpg,599,400,wide_sign,365,68,480,175\r\n","dark_wide_sign(41).jpg,500,342,wide_sign,39,57,437,279\r\n","dark_wide_sign(40).jpg,740,555,wide_sign,304,65,564,249\r\n","dark_wide_sign(62).jpg,500,687,wide_sign,284,55,470,213\r\n","dark_wide_sign(63).jpg,650,487,wide_sign,384,110,451,183\r\n","dark_wide_sign(29).jpg,600,450,wide_sign,289,19,413,110\r\n","dark_wide_sign(50).jpg,745,400,wide_sign,160,20,636,351\r\n","dark_wide_sign(98).jpg,600,400,wide_sign,71,57,241,190\r\n","dark_wide_sign(100).jpg,400,372,wide_sign,94,75,153,115\r\n","dark_wide_sign(30).jpg,900,675,wide_sign,152,197,646,481\r\n","dark_wide_sign(51).jpg,533,400,wide_sign,198,139,374,265\r\n","dark_wide_sign(57).jpg,695,400,wide_sign,221,27,474,212\r\n","dark_wide_sign(75).jpg,420,386,wide_sign,43,90,350,298\r\n","dark_wide_sign(54).jpg,533,400,wide_sign,143,92,390,250\r\n","dark_long_sign(8).jpg,580,326,long_sign,209,70,328,244\r\n","dark_wide_sign(84).jpg,640,480,wide_sign,216,75,343,172\r\n","dark_wide_sign(85).jpg,900,900,wide_sign,271,369,608,604\r\n","dark_long_sign(3).jpg,560,420,long_sign,253,70,491,353\r\n","dark_wide_sign(93).jpg,512,342,wide_sign,87,77,409,279\r\n","dark_long_sign(4).jpg,699,393,long_sign,348,71,441,213\r\n","dark_wide_sign(95).jpg,512,384,wide_sign,115,100,324,300\r\n","dark_wide_sign(88).jpg,600,450,wide_sign,87,71,447,318\r\n","dark_wide_sign(89).jpg,1200,627,wide_sign,103,23,970,592\r\n","dark_wide_sign(86).jpg,1024,768,wide_sign,236,172,765,568\r\n","dark_wide_sign(91).jpg,617,300,wide_sign,200,57,369,183\r\n","dark_wide_sign(97).jpg,600,400,wide_sign,12,74,188,202\r\n","dark_long_sign(7).jpg,620,473,long_sign,355,149,454,303\r\n","dark_long_sign(5).jpg,546,819,long_sign,162,28,454,502\r\n","dark_wide_sign(94).jpg,300,185,wide_sign,43,22,261,169\r\n","dark_wide_sign(92).jpg,512,332,wide_sign,98,45,383,227\r\n","dark_wide_sign(96).jpg,716,524,wide_sign,1,4,689,456\r\n","dark_long_sign(6).jpg,620,501,long_sign,238,172,340,354\r\n","dark_wide_sign(90).jpg,450,580,wide_sign,139,50,318,176\r\n","bright_long_sign(12).jpg,433,650,long_sign,1,45,431,582\r\n","bright_long_sign(15).jpg,600,400,long_sign,469,128,540,385\r\n","bright_long_sign(15).jpg,600,400,wide_sign,68,58,239,195\r\n","bright_long_sign(9).jpg,580,434,long_sign,327,94,399,211\r\n","bright_long_sign(17).jpg,583,327,long_sign,255,62,347,215\r\n","bright_long_sign(14).jpg,970,722,long_sign,69,157,260,515\r\n","bright_long_sign(14).jpg,970,722,long_sign,350,325,399,431\r\n","bright_long_sign(10).jpg,650,650,long_sign,224,194,414,462\r\n","bright_long_sign(16).jpg,630,481,long_sign,344,2,619,481\r\n","bright_long_sign(11).jpg,650,650,long_sign,176,82,496,537\r\n","bright_long_sign(27).jpg,550,367,long_sign,265,174,290,216\r\n","bright_long_sign(33).jpg,600,450,long_sign,325,52,547,367\r\n","bright_long_sign(34).jpg,550,367,long_sign,240,63,310,207\r\n","bright_long_sign(24).jpg,580,387,long_sign,163,21,499,343\r\n","bright_long_sign(29).jpg,600,423,long_sign,480,109,594,290\r\n","bright_long_sign(25).jpg,550,361,long_sign,183,48,359,317\r\n","bright_long_sign(21).jpg,583,327,long_sign,244,79,339,230\r\n","bright_long_sign(18).jpg,583,327,long_sign,336,78,425,213\r\n","bright_long_sign(19).jpg,236,330,long_sign,129,1,235,174\r\n","bright_long_sign(22).jpg,683,300,long_sign,437,72,515,238\r\n","bright_long_sign(20).jpg,583,327,long_sign,333,32,401,151\r\n","bright_long_sign(26).jpg,550,367,long_sign,3,65,262,361\r\n","bright_long_sign(32).jpg,970,722,long_sign,69,163,269,513\r\n","bright_long_sign(28).jpg,1312,1227,long_sign,718,186,929,617\r\n","bright_long_sign(23).jpg,330,495,long_sign,34,83,295,457\r\n","bright_long_sign(30).jpg,1223,819,long_sign,556,158,763,533\r\n","bright_long_sign(48).jpg,600,902,long_sign,258,128,596,765\r\n","bright_long_sign(46).jpg,600,1066,long_sign,319,588,427,778\r\n","bright_long_sign(44).jpg,530,362,long_sign,282,9,362,143\r\n","bright_long_sign(45).jpg,690,443,long_sign,405,80,544,353\r\n","bright_long_sign(41).jpg,300,225,long_sign,109,47,185,150\r\n","bright_long_sign(43).jpg,680,382,long_sign,286,69,391,244\r\n","bright_long_sign(36).jpg,600,400,long_sign,90,1,198,175\r\n","bright_long_sign(47).jpg,640,360,long_sign,239,7,416,343\r\n","bright_long_sign(38).jpg,600,361,long_sign,62,86,187,266\r\n","bright_long_sign(38).jpg,600,361,long_sign,438,97,512,212\r\n","bright_long_sign(49).jpg,600,337,long_sign,242,50,348,211\r\n","bright_long_sign(40).jpg,600,400,long_sign,200,16,443,362\r\n","bright_long_sign(35).jpg,600,400,long_sign,225,5,266,79\r\n","bright_long_sign(42).jpg,960,540,long_sign,381,98,585,406\r\n","bright_long_sign(57).jpg,300,452,long_sign,61,5,252,306\r\n","bright_long_sign(51).jpg,600,360,long_sign,51,88,129,217\r\n","bright_long_sign(51).jpg,600,360,wide_sign,138,2,244,67\r\n","bright_long_sign(59).jpg,720,530,long_sign,499,28,669,295\r\n","bright_long_sign(54).jpg,1500,1139,long_sign,36,11,318,493\r\n","bright_long_sign(55).jpg,680,498,long_sign,87,123,231,370\r\n","bright_long_sign(55).jpg,680,498,long_sign,443,142,575,354\r\n","bright_long_sign(50).jpg,400,602,long_sign,267,1,400,264\r\n","bright_long_sign(52).jpg,550,825,long_sign,357,89,473,370\r\n","bright_long_sign(64).jpg,500,334,long_sign,205,100,312,255\r\n","bright_long_sign(62).jpg,803,558,long_sign,36,11,369,526\r\n","bright_long_sign(63).jpg,348,236,long_sign,162,37,202,121\r\n","bright_long_sign(61).jpg,620,349,long_sign,91,1,260,250\r\n","bright_long_sign(60).jpg,290,237,long_sign,111,62,222,177\r\n","bright_long_sign(53).jpg,790,1237,long_sign,521,462,637,674\r\n","bright_long_sign(53).jpg,790,1237,wide_sign,178,169,480,405\r\n","bright_long_sign(58).jpg,480,448,long_sign,180,84,254,243\r\n","bright_long_sign(56).jpg,600,338,long_sign,109,2,155,84\r\n","bright_long_sign(71).jpg,700,394,long_sign,282,46,408,249\r\n","bright_long_sign(72).jpg,600,400,long_sign,64,42,233,283\r\n","bright_long_sign(72).jpg,600,400,long_sign,422,75,524,249\r\n","bright_long_sign(69).jpg,580,435,long_sign,381,193,454,302\r\n","bright_long_sign(69).jpg,580,435,wide_sign,189,67,299,155\r\n","bright_long_sign(66).jpg,300,329,long_sign,89,31,216,268\r\n","bright_long_sign(68).jpg,318,240,long_sign,119,51,199,155\r\n","bright_long_sign(73).jpg,680,473,long_sign,39,6,136,168\r\n","bright_long_sign(73).jpg,680,473,long_sign,254,53,314,160\r\n","bright_long_sign(65).jpg,302,540,long_sign,110,96,228,336\r\n","bright_long_sign(75).jpg,680,420,long_sign,103,27,185,174\r\n","bright_long_sign(76).jpg,500,281,long_sign,336,3,427,141\r\n","bright_long_sign(67).jpg,601,401,long_sign,310,7,458,244\r\n","bright_long_sign(77).jpg,740,493,long_sign,438,38,667,376\r\n","bright_long_sign(78).jpg,818,532,long_sign,228,6,553,460\r\n","bright_long_sign(80).jpg,821,627,long_sign,539,105,713,353\r\n","bright_long_sign(82).jpg,600,407,long_sign,412,14,549,238\r\n","bright_long_sign(81).jpg,550,550,long_sign,418,30,472,131\r\n","bright_long_sign(83).jpg,800,1020,long_sign,343,66,627,583\r\n","bright_long_sign(97).jpg,3024,4032,long_sign,2222,964,3635,1839\r\n","bright_long_sign(89).jpg,318,424,long_sign,55,44,263,369\r\n","bright_long_sign(91).jpg,1000,667,long_sign,560,166,730,498\r\n","bright_long_sign(91).jpg,1000,667,wide_sign,221,16,407,153\r\n","bright_long_sign(95).jpg,640,853,long_sign,191,432,396,707\r\n","bright_long_sign(99).jpg,700,526,long_sign,188,84,304,368\r\n","bright_long_sign(85).jpg,500,350,long_sign,28,8,224,337\r\n","bright_long_sign(85).jpg,500,350,long_sign,279,8,481,332\r\n","bright_long_sign(86).jpg,596,637,long_sign,141,76,359,433\r\n","bright_long_sign(101).jpg,500,375,long_sign,149,16,399,330\r\n","bright_long_sign(93).jpg,759,524,long_sign,258,45,598,405\r\n","bright_long_sign(96).jpg,3024,4032,long_sign,1585,1158,2591,1708\r\n","bright_long_sign(100).jpg,400,559,long_sign,31,15,172,276\r\n","bright_long_sign(94).jpg,650,487,long_sign,227,45,316,234\r\n","bright_long_sign(94).jpg,650,487,long_sign,322,211,373,279\r\n","bright_long_sign(90).jpg,540,960,long_sign,106,112,493,795\r\n","bright_long_sign(88).jpg,560,756,long_sign,309,23,543,465\r\n","bright_long_sign(92).jpg,662,565,long_sign,242,20,474,535\r\n","bright_long_sign(102).jpg,600,300,long_sign,109,11,187,127\r\n","bright_long_sign(102).jpg,600,300,long_sign,420,9,484,130\r\n","bright_long_sign(87).jpg,997,468,long_sign,594,1,768,363\r\n","bright_wide_sign(13).jpg,600,400,wide_sign,324,24,442,108\r\n","bright_wide_sign(2).jpg,441,293,wide_sign,36,20,406,258\r\n","bright_wide_sign(8).jpg,1174,724,wide_sign,53,90,836,583\r\n","bright_wide_sign(1).jpg,818,532,wide_sign,199,84,586,386\r\n","bright_wide_sign(15).jpg,511,304,wide_sign,10,1,481,271\r\n","bright_wide_sign(17).jpg,640,509,wide_sign,235,1,436,133\r\n","bright_wide_sign(17).jpg,640,509,wide_sign,96,216,174,277\r\n","bright_wide_sign(9).jpg,640,476,wide_sign,212,129,386,231\r\n","bright_long_sign(103).jpg,451,660,long_sign,115,124,336,478\r\n","bright_wide_sign(7).jpg,600,400,wide_sign,285,48,410,138\r\n","bright_wide_sign(14).jpg,700,525,wide_sign,275,71,399,166\r\n","bright_wide_sign(11).jpg,980,653,wide_sign,126,165,443,439\r\n","bright_wide_sign(10).jpg,600,370,wide_sign,64,85,379,306\r\n","bright_wide_sign(33).jpg,900,675,wide_sign,50,142,428,508\r\n","bright_wide_sign(34).jpg,267,172,wide_sign,4,28,248,171\r\n","bright_wide_sign(27).jpg,960,720,wide_sign,508,88,632,186\r\n","bright_wide_sign(24).jpg,392,551,wide_sign,80,52,222,161\r\n","bright_wide_sign(28).jpg,4128,2322,wide_sign,445,76,3233,1970\r\n","bright_wide_sign(26).jpg,1280,720,wide_sign,253,134,918,659\r\n","bright_wide_sign(31).jpg,900,675,wide_sign,198,251,578,481\r\n","bright_wide_sign(21).jpg,510,340,wide_sign,275,23,374,92\r\n","bright_wide_sign(30).jpg,900,675,wide_sign,152,197,646,481\r\n","bright_wide_sign(18).jpg,580,327,wide_sign,253,5,368,82\r\n","bright_wide_sign(20).jpg,600,736,wide_sign,141,121,351,278\r\n","bright_wide_sign(25).jpg,1400,1050,wide_sign,398,170,623,338\r\n","bright_wide_sign(19).jpg,500,745,wide_sign,219,300,390,423\r\n","bright_wide_sign(22).jpg,580,435,wide_sign,193,69,301,156\r\n","bright_wide_sign(32).jpg,900,437,wide_sign,550,152,639,211\r\n","bright_wide_sign(23).jpg,700,400,wide_sign,138,74,529,305\r\n","bright_wide_sign(39).jpg,530,298,wide_sign,108,21,405,274\r\n","bright_wide_sign(42).jpg,500,375,wide_sign,8,3,500,363\r\n","bright_wide_sign(37).jpg,1280,720,wide_sign,168,24,1035,688\r\n","bright_wide_sign(43).jpg,648,392,wide_sign,33,18,616,379\r\n","bright_wide_sign(41).jpg,500,342,wide_sign,39,57,437,279\r\n","bright_wide_sign(38).jpg,820,493,wide_sign,482,20,707,175\r\n","bright_wide_sign(36).jpg,680,415,wide_sign,109,37,256,147\r\n","bright_wide_sign(51).jpg,533,400,wide_sign,198,139,374,265\r\n","bright_wide_sign(50).jpg,745,400,wide_sign,160,20,636,351\r\n","bright_wide_sign(48).jpg,818,532,wide_sign,180,59,633,463\r\n","bright_wide_sign(49).jpg,540,304,wide_sign,81,5,460,252\r\n","bright_wide_sign(47).jpg,818,532,wide_sign,184,106,591,396\r\n","bright_wide_sign(46).jpg,600,450,wide_sign,180,92,458,295\r\n","bright_wide_sign(56).jpg,599,400,wide_sign,365,68,480,175\r\n","bright_wide_sign(57).jpg,695,400,wide_sign,221,27,474,212\r\n","bright_wide_sign(58).jpg,720,540,wide_sign,156,145,452,409\r\n","bright_wide_sign(55).jpg,533,400,wide_sign,172,62,280,146\r\n","bright_wide_sign(59).jpg,450,300,wide_sign,105,76,359,230\r\n","bright_wide_sign(63).jpg,650,487,wide_sign,384,110,451,183\r\n","bright_wide_sign(61).jpg,500,500,wide_sign,38,33,469,473\r\n","bright_wide_sign(60).jpg,960,1280,wide_sign,487,243,662,371\r\n","bright_wide_sign(54).jpg,533,400,wide_sign,143,92,390,250\r\n","bright_wide_sign(53).jpg,640,400,wide_sign,35,52,620,336\r\n","bright_wide_sign(52).jpg,711,400,wide_sign,78,23,622,389\r\n","bright_wide_sign(62).jpg,500,687,wide_sign,284,55,470,213\r\n","bright_wide_sign(69).jpg,720,960,wide_sign,167,169,358,327\r\n","bright_wide_sign(67).jpg,1490,1049,wide_sign,322,230,1038,771\r\n","bright_wide_sign(65).jpg,5184,3456,wide_sign,1642,624,4525,2590\r\n","bright_wide_sign(77).jpg,700,400,wide_sign,136,77,530,321\r\n","bright_wide_sign(78).jpg,1440,1080,wide_sign,734,569,982,736\r\n","bright_wide_sign(74).jpg,896,748,wide_sign,281,208,576,457\r\n","bright_wide_sign(73).jpg,960,720,wide_sign,510,79,636,189\r\n","bright_wide_sign(79).jpg,3024,4032,wide_sign,978,1120,1970,1962\r\n","bright_wide_sign(71).jpg,740,900,wide_sign,61,206,473,453\r\n","bright_wide_sign(64).jpg,550,323,wide_sign,86,8,179,66\r\n","bright_wide_sign(80).jpg,494,318,wide_sign,19,18,477,300\r\n","bright_wide_sign(72).jpg,344,622,wide_sign,28,61,225,196\r\n","bright_wide_sign(70).jpg,886,506,wide_sign,453,29,772,338\r\n","bright_wide_sign(68).jpg,5184,3456,wide_sign,642,532,1958,1524\r\n","bright_wide_sign(66).jpg,800,450,wide_sign,260,189,521,346\r\n","bright_wide_sign(86).jpg,1024,768,wide_sign,236,172,765,568\r\n","bright_wide_sign(82).jpg,500,366,wide_sign,113,104,369,315\r\n","bright_wide_sign(88).jpg,600,450,wide_sign,87,71,447,318\r\n","bright_wide_sign(83).jpg,500,308,wide_sign,32,40,355,246\r\n","bright_wide_sign(85).jpg,900,900,wide_sign,271,369,608,604\r\n","bright_wide_sign(96).jpg,716,524,wide_sign,1,4,689,456\r\n","bright_wide_sign(89).jpg,1200,627,wide_sign,103,23,970,592\r\n","bright_wide_sign(91).jpg,617,300,wide_sign,200,57,369,183\r\n","bright_wide_sign(81).jpg,800,600,wide_sign,274,94,453,182\r\n","bright_wide_sign(92).jpg,512,332,wide_sign,98,45,383,227\r\n","bright_wide_sign(84).jpg,640,480,wide_sign,216,75,343,172\r\n","bright_wide_sign(95).jpg,512,384,wide_sign,115,100,324,300\r\n","bright_wide_sign(97).jpg,600,400,wide_sign,12,74,188,202\r\n","bright_wide_sign(87).jpg,1364,550,wide_sign,404,145,876,504\r\n","bright_wide_sign(93).jpg,512,342,wide_sign,87,77,409,279\r\n","bright_wide_sign(94).jpg,300,185,wide_sign,43,22,261,169\r\n","bright_long_sign(5).jpg,546,819,long_sign,162,28,454,502\r\n","bright_long_sign(6).jpg,620,501,long_sign,238,172,340,354\r\n","bright_wide_sign(98).jpg,600,400,wide_sign,71,57,241,190\r\n","bright_wide_sign(99).jpg,886,506,wide_sign,176,61,653,420\r\n","bright_long_sign(4).jpg,699,393,long_sign,348,71,441,213\r\n","bright_long_sign(8).jpg,580,326,long_sign,209,70,328,244\r\n","bright_wide_sign(100).jpg,400,372,wide_sign,94,75,153,115\r\n","bright_long_sign(7).jpg,620,473,long_sign,355,149,454,303\r\n","sign1.png,929,582,wide_sign,695,43,871,155\r\n","sign2.png,979,628,long_sign,819,47,957,262\r\n","sign3.png,1042,766,long_sign,861,45,1021,291\r\n","sign4.png,721,677,long_sign,566,58,691,244\r\n","sign5.png,704,571,long_sign,600,34,682,223\r\n","sign6.png,652,700,long_sign,131,63,276,315\r\n","sign7.png,523,713,long_sign,357,61,468,244\r\n","sign8.png,783,698,long_sign,607,133,709,297\r\n","sign9.png,423,565,long_sign,320,105,391,252\r\n","sign10.png,749,644,long_sign,584,48,728,237\r\n","sign11.png,847,518,wide_sign,660,44,782,161\r\n","sign12.png,718,650,wide_sign,405,65,602,182\r\n","sign13.png,826,603,wide_sign,78,71,208,169\r\n","sign14.png,492,695,long_sign,326,81,435,288\r\n","FudanPed00001.jpg,559,536,adult,160,182,302,431\r\n","FudanPed00001.jpg,559,536,adult,420,171,535,486\r\n","FudanPed00002.jpg,455,414,adult,68,93,191,380\r\n","FudanPed00003.jpg,479,445,adult,293,135,447,421\r\n","FudanPed00004.jpg,396,397,adult,168,60,324,338\r\n","FudanPed00004.jpg,396,397,adult,9,61,48,180\r\n","FudanPed00005.jpg,335,344,adult,188,59,320,336\r\n","FudanPed00005.jpg,335,344,adult,2,53,40,158\r\n","FudanPed00006.jpg,385,426,adult,208,108,346,385\r\n","FudanPed00006.jpg,385,426,adult,2,108,87,384\r\n","FudanPed00007.jpg,539,381,adult,112,69,218,346\r\n","FudanPed00007.jpg,539,381,adult,378,76,529,377\r\n","FudanPed00007.jpg,539,381,adult,317,108,347,192\r\n","FudanPed00008.jpg,388,454,adult,228,158,370,436\r\n","FudanPed00008.jpg,388,454,adult,39,179,115,363\r\n","FudanPed00009.jpg,465,441,adult,306,138,453,430\r\n","FudanPed00009.jpg,465,441,adult,157,124,298,398\r\n","FudanPed00010.jpg,411,393,adult,281,90,401,374\r\n","FudanPed00011.jpg,459,420,adult,278,112,438,394\r\n","FudanPed00012.jpg,468,384,adult,159,71,295,361\r\n","FudanPed00012.jpg,468,384,adult,328,58,439,327\r\n","FudanPed00013.jpg,652,498,adult,389,193,554,476\r\n","FudanPed00014.jpg,456,383,adult,234,86,405,367\r\n","FudanPed00015.jpg,336,349,adult,19,43,174,327\r\n","FudanPed00016.jpg,544,425,adult,80,86,205,383\r\n","FudanPed00016.jpg,544,425,adult,279,94,400,361\r\n","FudanPed00016.jpg,544,425,adult,411,101,495,378\r\n","FudanPed00017.jpg,266,342,adult,115,48,244,332\r\n","FudanPed00018.jpg,253,323,adult,20,19,126,304\r\n","FudanPed00019.jpg,497,442,adult,7,135,142,389\r\n","FudanPed00019.jpg,497,442,adult,194,123,339,421\r\n","FudanPed00020.jpg,555,417,adult,339,99,508,381\r\n","FudanPed00021.jpg,490,378,adult,324,76,470,367\r\n","FudanPed00021.jpg,490,378,adult,234,93,268,167\r\n","FudanPed00021.jpg,490,378,adult,47,95,69,160\r\n","FudanPed00022.jpg,536,465,adult,112,169,209,449\r\n","FudanPed00022.jpg,536,465,adult,397,171,514,455\r\n","FudanPed00023.jpg,376,378,adult,202,84,341,366\r\n","FudanPed00024.jpg,479,378,adult,103,84,270,370\r\n","FudanPed00025.jpg,425,369,adult,226,69,396,354\r\n","FudanPed00025.jpg,425,369,adult,44,87,94,256\r\n","FudanPed00025.jpg,425,369,adult,128,74,189,262\r\n","FudanPed00025.jpg,425,369,adult,180,53,228,255\r\n","FudanPed00025.jpg,425,369,adult,213,73,273,262\r\n","FudanPed00025.jpg,425,369,adult,314,62,383,266\r\n","FudanPed00026.jpg,440,427,adult,44,78,150,389\r\n","FudanPed00026.jpg,440,427,adult,185,89,279,373\r\n","FudanPed00027.jpg,302,363,adult,104,45,253,328\r\n","FudanPed00028.jpg,317,345,adult,7,16,149,303\r\n","FudanPed00028.jpg,317,345,adult,186,18,316,321\r\n","FudanPed00029.jpg,435,404,adult,246,72,368,351\r\n","FudanPed00030.jpg,462,387,adult,59,85,208,375\r\n","FudanPed00031.jpg,569,430,adult,298,122,444,408\r\n","FudanPed00032.jpg,608,474,adult,456,162,573,453\r\n","FudanPed00033.jpg,396,357,adult,165,53,304,346\r\n","FudanPed00034.jpg,309,351,adult,111,37,254,330\r\n","FudanPed00035.jpg,292,349,adult,64,39,195,331\r\n","FudanPed00036.jpg,1017,444,adult,131,98,279,394\r\n","FudanPed00036.jpg,1017,444,adult,802,120,937,381\r\n","FudanPed00036.jpg,1017,444,adult,244,133,330,363\r\n","FudanPed00036.jpg,1017,444,adult,726,145,789,345\r\n","FudanPed00037.jpg,423,361,adult,259,63,415,349\r\n","FudanPed00037.jpg,423,361,adult,190,98,209,170\r\n","FudanPed00038.jpg,422,346,adult,219,44,400,331\r\n","FudanPed00039.jpg,493,487,adult,137,137,199,311\r\n","FudanPed00039.jpg,493,487,adult,231,130,331,410\r\n","FudanPed00040.jpg,550,482,adult,201,157,279,437\r\n","FudanPed00040.jpg,550,482,adult,279,172,377,430\r\n","FudanPed00041.jpg,552,507,adult,351,161,439,437\r\n","FudanPed00041.jpg,552,507,adult,284,181,357,439\r\n","FudanPed00041.jpg,552,507,adult,42,186,110,388\r\n","FudanPed00042.jpg,588,547,adult,159,186,250,470\r\n","FudanPed00042.jpg,588,547,adult,288,156,392,449\r\n","FudanPed00043.jpg,493,518,adult,64,139,147,426\r\n","FudanPed00043.jpg,493,518,adult,156,139,248,435\r\n","FudanPed00043.jpg,493,518,adult,261,128,358,431\r\n","FudanPed00044.jpg,521,494,adult,246,170,340,462\r\n","FudanPed00044.jpg,521,494,adult,337,175,426,464\r\n","FudanPed00044.jpg,521,494,adult,37,129,181,492\r\n","FudanPed00044.jpg,521,494,adult,436,157,501,402\r\n","FudanPed00045.jpg,487,538,adult,198,198,294,481\r\n","FudanPed00045.jpg,487,538,adult,319,201,404,499\r\n","FudanPed00045.jpg,487,538,adult,395,212,469,495\r\n","FudanPed00046.jpg,567,438,adult,178,123,271,410\r\n","FudanPed00046.jpg,567,438,adult,299,114,370,335\r\n","FudanPed00046.jpg,567,438,adult,446,104,507,315\r\n","FudanPed00046.jpg,567,438,adult,521,119,565,258\r\n","FudanPed00047.jpg,472,520,adult,272,198,371,482\r\n","FudanPed00047.jpg,472,520,adult,31,195,90,427\r\n","FudanPed00047.jpg,472,520,adult,384,185,430,298\r\n","FudanPed00048.jpg,585,559,adult,6,242,84,535\r\n","FudanPed00048.jpg,585,559,adult,79,231,144,455\r\n","FudanPed00048.jpg,585,559,adult,158,219,251,507\r\n","FudanPed00048.jpg,585,559,adult,460,263,580,544\r\n","FudanPed00049.jpg,447,512,adult,127,123,213,412\r\n","FudanPed00049.jpg,447,512,adult,214,106,317,392\r\n","FudanPed00049.jpg,447,512,adult,315,107,435,387\r\n","FudanPed00050.jpg,580,516,adult,33,203,135,484\r\n","FudanPed00050.jpg,580,516,adult,291,205,369,412\r\n","FudanPed00051.jpg,448,501,adult,126,191,218,476\r\n","FudanPed00052.jpg,461,504,adult,206,189,308,492\r\n","FudanPed00053.jpg,541,580,adult,94,169,195,432\r\n","FudanPed00053.jpg,541,580,adult,389,150,499,453\r\n","FudanPed00054.jpg,533,498,adult,97,135,182,418\r\n","FudanPed00054.jpg,533,498,adult,287,114,358,332\r\n","FudanPed00054.jpg,533,498,adult,364,121,437,329\r\n","FudanPed00055.jpg,390,555,adult,148,178,277,530\r\n","FudanPed00056.jpg,565,581,adult,439,247,537,522\r\n","FudanPed00056.jpg,565,581,adult,200,259,270,529\r\n","FudanPed00056.jpg,565,581,adult,252,256,374,528\r\n","FudanPed00057.jpg,482,519,adult,22,91,143,406\r\n","FudanPed00057.jpg,482,519,adult,131,93,205,358\r\n","FudanPed00057.jpg,482,519,adult,237,83,312,356\r\n","FudanPed00057.jpg,482,519,adult,343,117,388,267\r\n","FudanPed00057.jpg,482,519,adult,386,116,427,250\r\n","FudanPed00058.jpg,522,479,adult,145,92,261,463\r\n","FudanPed00058.jpg,522,479,adult,43,123,64,211\r\n","FudanPed00058.jpg,522,479,adult,289,102,341,280\r\n","FudanPed00058.jpg,522,479,adult,353,97,406,276\r\n","FudanPed00058.jpg,522,479,adult,427,126,446,182\r\n","FudanPed00058.jpg,522,479,adult,462,126,474,171\r\n","FudanPed00058.jpg,522,479,adult,473,133,489,183\r\n","FudanPed00058.jpg,522,479,adult,492,121,517,202\r\n","FudanPed00059.jpg,576,369,adult,35,54,135,330\r\n","FudanPed00059.jpg,576,369,adult,159,36,252,329\r\n","FudanPed00059.jpg,576,369,adult,270,45,380,328\r\n","FudanPed00060.jpg,517,440,adult,188,141,291,427\r\n","FudanPed00060.jpg,517,440,adult,293,130,384,430\r\n","FudanPed00060.jpg,517,440,adult,395,149,498,427\r\n","FudanPed00061.jpg,547,407,adult,165,70,266,353\r\n","FudanPed00061.jpg,547,407,adult,343,86,429,381\r\n","FudanPed00061.jpg,547,407,adult,406,85,531,387\r\n","FudanPed00062.jpg,414,341,adult,175,25,255,321\r\n","FudanPed00062.jpg,414,341,adult,291,34,377,330\r\n","FudanPed00063.jpg,486,359,adult,29,38,114,332\r\n","FudanPed00063.jpg,486,359,adult,156,42,262,329\r\n","FudanPed00063.jpg,486,359,adult,312,54,330,99\r\n","FudanPed00063.jpg,486,359,adult,297,54,303,73\r\n","FudanPed00063.jpg,486,359,adult,305,53,310,73\r\n","FudanPed00064.jpg,546,420,adult,141,98,240,387\r\n","FudanPed00064.jpg,546,420,adult,317,101,400,395\r\n","FudanPed00064.jpg,546,420,adult,400,112,517,407\r\n","FudanPed00065.jpg,504,411,adult,29,69,147,382\r\n","FudanPed00065.jpg,504,411,adult,147,84,242,385\r\n","FudanPed00065.jpg,504,411,adult,234,70,311,393\r\n","FudanPed00065.jpg,504,411,adult,342,88,430,351\r\n","FudanPed00065.jpg,504,411,adult,13,58,64,183\r\n","FudanPed00066.jpg,360,359,adult,248,50,329,351\r\n","FudanPed00067.jpg,453,416,adult,62,79,184,375\r\n","FudanPed00069.jpg,354,379,adult,215,70,315,363\r\n","FudanPed00068.jpg,375,398,adult,249,66,318,361\r\n","FudanPed00070.jpg,390,382,adult,288,70,370,364\r\n","FudanPed00071.jpg,363,353,adult,121,44,246,330\r\n","FudanPed00071.jpg,363,353,adult,256,37,339,348\r\n","FudanPed00071.jpg,363,353,adult,226,52,255,119\r\n","FudanPed00072.jpg,375,349,adult,48,40,139,332\r\n","FudanPed00073.jpg,516,392,adult,56,66,159,360\r\n","FudanPed00073.jpg,516,392,adult,193,59,289,352\r\n","FudanPed00074.jpg,421,370,adult,48,51,126,351\r\n","FudanPed00074.jpg,421,370,adult,178,61,256,345\r\n","PennPed00001.jpg,612,406,adult,83,66,197,353\r\n","PennPed00001.jpg,612,406,adult,265,75,355,338\r\n","PennPed00001.jpg,612,406,adult,403,38,501,348\r\n","PennPed00001.jpg,612,406,adult,514,65,610,318\r\n","PennPed00001.jpg,612,406,adult,206,25,266,196\r\n","PennPed00002.jpg,745,378,adult,9,84,97,304\r\n","PennPed00002.jpg,745,378,adult,83,45,165,298\r\n","PennPed00002.jpg,745,378,adult,268,84,370,298\r\n","PennPed00002.jpg,745,378,adult,363,33,516,335\r\n","PennPed00002.jpg,745,378,adult,576,75,713,362\r\n","PennPed00002.jpg,745,378,adult,517,92,608,300\r\n","PennPed00003.jpg,670,418,adult,8,33,174,365\r\n","PennPed00003.jpg,670,418,adult,452,64,531,353\r\n","PennPed00003.jpg,670,418,adult,531,91,661,381\r\n","PennPed00004.jpg,786,436,adult,67,61,170,410\r\n","PennPed00004.jpg,786,436,adult,155,91,253,385\r\n","PennPed00004.jpg,786,436,adult,228,94,325,391\r\n","PennPed00004.jpg,786,436,adult,417,101,516,391\r\n","PennPed00004.jpg,786,436,adult,595,112,734,397\r\n","PennPed00005.jpg,767,454,adult,68,130,159,405\r\n","PennPed00005.jpg,767,454,adult,165,154,252,410\r\n","PennPed00005.jpg,767,454,adult,263,144,415,439\r\n","PennPed00005.jpg,767,454,adult,540,169,666,440\r\n","PennPed00005.jpg,767,454,adult,602,156,701,396\r\n","PennPed00006.jpg,631,436,adult,111,72,199,412\r\n","PennPed00006.jpg,631,436,adult,231,96,329,388\r\n","PennPed00006.jpg,631,436,adult,443,108,608,435\r\n","PennPed00006.jpg,631,436,adult,3,102,69,397\r\n","PennPed00007.jpg,570,412,adult,42,60,165,350\r\n","PennPed00007.jpg,570,412,adult,127,83,201,359\r\n","PennPed00007.jpg,570,412,adult,187,76,309,369\r\n","PennPed00007.jpg,570,412,adult,299,76,381,400\r\n","PennPed00007.jpg,570,412,adult,426,68,527,343\r\n","PennPed00008.jpg,452,403,adult,47,71,177,366\r\n","PennPed00008.jpg,452,403,adult,199,77,305,329\r\n","PennPed00008.jpg,452,403,adult,256,74,394,345\r\n","PennPed00008.jpg,452,403,adult,343,53,421,278\r\n","PennPed00009.jpg,648,413,adult,47,85,147,292\r\n","PennPed00009.jpg,648,413,adult,163,87,240,348\r\n","PennPed00009.jpg,648,413,adult,225,86,300,307\r\n","PennPed00009.jpg,648,413,adult,316,81,391,369\r\n","PennPed00009.jpg,648,413,adult,375,100,471,351\r\n","PennPed00009.jpg,648,413,adult,470,94,559,395\r\n","PennPed00009.jpg,648,413,adult,533,90,605,376\r\n","PennPed00010.jpg,750,495,adult,18,59,103,340\r\n","PennPed00010.jpg,750,495,adult,186,56,262,337\r\n","PennPed00010.jpg,750,495,adult,309,141,437,493\r\n","PennPed00010.jpg,750,495,adult,475,76,619,451\r\n","PennPed00010.jpg,750,495,adult,392,58,485,421\r\n","PennPed00010.jpg,750,495,adult,563,36,681,394\r\n","PennPed00011.jpg,508,376,adult,92,62,236,344\r\n","PennPed00011.jpg,508,376,adult,242,52,301,355\r\n","PennPed00012.jpg,565,429,adult,114,122,212,403\r\n","PennPed00013.jpg,564,385,adult,98,79,210,359\r\n","PennPed00013.jpg,564,385,adult,259,96,364,359\r\n","PennPed00013.jpg,564,385,adult,384,81,498,355\r\n","PennPed00013.jpg,564,385,adult,215,91,257,193\r\n","PennPed00014.jpg,542,368,adult,127,40,206,229\r\n","PennPed00014.jpg,542,368,adult,209,69,312,337\r\n","PennPed00014.jpg,542,368,adult,354,60,448,337\r\n","PennPed00014.jpg,542,368,adult,286,41,353,225\r\n","PennPed00015.jpg,906,438,adult,123,98,280,414\r\n","PennPed00015.jpg,906,438,adult,286,91,448,378\r\n","PennPed00015.jpg,906,438,adult,749,94,834,382\r\n","PennPed00016.jpg,683,399,adult,86,51,146,362\r\n","PennPed00016.jpg,683,399,adult,236,93,329,382\r\n","PennPed00016.jpg,683,399,adult,468,66,579,367\r\n","PennPed00017.jpg,492,403,adult,115,50,204,387\r\n","PennPed00017.jpg,492,403,adult,259,49,398,336\r\n","PennPed00018.jpg,460,344,adult,65,32,145,327\r\n","PennPed00018.jpg,460,344,adult,181,69,262,308\r\n","PennPed00019.jpg,734,404,adult,17,78,106,356\r\n","PennPed00019.jpg,734,404,adult,160,70,249,361\r\n","PennPed00019.jpg,734,404,adult,315,60,390,364\r\n","PennPed00019.jpg,734,404,adult,368,47,474,370\r\n","PennPed00019.jpg,734,404,adult,464,75,539,362\r\n","PennPed00019.jpg,734,404,adult,525,80,639,375\r\n","PennPed00019.jpg,734,404,adult,600,78,704,375\r\n","PennPed00020.jpg,721,428,adult,142,121,264,379\r\n","PennPed00020.jpg,721,428,adult,387,76,516,365\r\n","PennPed00020.jpg,721,428,adult,445,77,607,369\r\n","PennPed00021.jpg,712,436,adult,68,118,218,407\r\n","PennPed00021.jpg,712,436,adult,464,81,588,382\r\n","PennPed00021.jpg,712,436,adult,559,111,681,381\r\n","PennPed00022.jpg,925,486,adult,30,118,167,413\r\n","PennPed00022.jpg,925,486,adult,120,149,257,413\r\n","PennPed00022.jpg,925,486,adult,216,152,354,416\r\n","PennPed00022.jpg,925,486,adult,705,129,839,390\r\n","PennPed00022.jpg,925,486,adult,345,162,424,342\r\n","PennPed00023.jpg,536,382,adult,107,60,275,363\r\n","PennPed00024.jpg,409,384,adult,71,58,170,350\r\n","PennPed00025.jpg,450,334,adult,72,34,216,310\r\n","PennPed00025.jpg,450,334,adult,189,151,276,304\r\n","PennPed00025.jpg,450,334,adult,390,2,421,71\r\n","PennPed00026.jpg,530,410,adult,303,94,447,382\r\n","PennPed00026.jpg,530,410,adult,200,120,291,373\r\n","PennPed00027.jpg,492,391,adult,126,93,218,363\r\n","PennPed00027.jpg,492,391,adult,204,81,322,367\r\n","PennPed00027.jpg,492,391,adult,347,89,463,342\r\n","PennPed00028.jpg,416,368,adult,211,50,325,334\r\n","PennPed00029.jpg,672,418,adult,114,65,214,370\r\n","PennPed00029.jpg,672,418,adult,274,98,339,391\r\n","PennPed00029.jpg,672,418,adult,363,102,466,388\r\n","PennPed00029.jpg,672,418,adult,509,103,585,392\r\n","PennPed00030.jpg,479,354,adult,139,37,253,339\r\n","PennPed00030.jpg,479,354,adult,3,37,95,342\r\n","PennPed00030.jpg,479,354,adult,316,43,448,339\r\n","PennPed00031.jpg,487,340,adult,244,42,390,329\r\n","PennPed00032.jpg,374,344,adult,98,50,203,329\r\n","PennPed00033.jpg,533,391,adult,32,37,126,370\r\n","PennPed00033.jpg,533,391,adult,110,50,214,376\r\n","PennPed00033.jpg,533,391,adult,392,70,519,368\r\n","PennPed00033.jpg,533,391,adult,274,149,389,333\r\n","PennPed00034.jpg,538,422,adult,108,58,289,391\r\n","PennPed00034.jpg,538,422,adult,302,63,405,344\r\n","PennPed00034.jpg,538,422,adult,369,67,493,353\r\n","PennPed00034.jpg,538,422,adult,181,63,278,374\r\n","PennPed00035.jpg,623,353,adult,169,30,275,331\r\n","PennPed00035.jpg,623,353,adult,367,54,541,333\r\n","PennPed00035.jpg,623,353,adult,121,33,208,322\r\n","PennPed00036.jpg,694,370,adult,56,62,164,330\r\n","PennPed00036.jpg,694,370,adult,514,34,636,328\r\n","PennPed00037.jpg,366,318,adult,229,24,328,307\r\n","PennPed00039.jpg,495,373,adult,326,73,411,360\r\n","PennPed00038.jpg,436,353,adult,232,39,368,316\r\n","PennPed00040.jpg,406,342,adult,203,42,318,330\r\n","PennPed00041.jpg,570,422,adult,353,99,467,380\r\n","PennPed00042.jpg,569,344,adult,273,37,379,312\r\n","PennPed00042.jpg,569,344,adult,403,36,472,310\r\n","PennPed00043.jpg,480,356,adult,189,47,297,334\r\n","PennPed00043.jpg,480,356,adult,332,63,434,328\r\n","PennPed00043.jpg,480,356,adult,142,78,199,254\r\n","PennPed00043.jpg,480,356,adult,282,78,328,253\r\n","PennPed00043.jpg,480,356,adult,430,73,456,154\r\n","PennPed00044.jpg,483,335,adult,71,36,170,329\r\n","PennPed00044.jpg,483,335,adult,253,63,336,282\r\n","PennPed00044.jpg,483,335,adult,332,54,408,276\r\n","PennPed00044.jpg,483,335,adult,181,76,225,193\r\n","PennPed00045.jpg,692,395,adult,70,64,193,365\r\n","PennPed00045.jpg,692,395,adult,226,65,321,360\r\n","PennPed00045.jpg,692,395,adult,324,67,436,347\r\n","PennPed00045.jpg,692,395,adult,526,45,639,348\r\n","PennPed00045.jpg,692,395,adult,425,67,485,217\r\n","PennPed00045.jpg,692,395,adult,494,62,548,214\r\n","PennPed00046.jpg,534,348,adult,21,58,111,323\r\n","PennPed00046.jpg,534,348,adult,318,22,420,319\r\n","PennPed00046.jpg,534,348,adult,421,62,441,119\r\n","PennPed00047.jpg,500,346,adult,116,57,253,336\r\n","PennPed00047.jpg,500,346,adult,246,33,381,320\r\n","PennPed00047.jpg,500,346,adult,362,42,427,194\r\n","PennPed00048.jpg,423,380,adult,27,33,118,326\r\n","PennPed00048.jpg,423,380,adult,116,35,198,321\r\n","PennPed00048.jpg,423,380,adult,263,21,385,369\r\n","PennPed00049.jpg,452,350,adult,82,48,176,346\r\n","PennPed00049.jpg,452,350,adult,175,45,253,337\r\n","PennPed00049.jpg,452,350,adult,252,81,307,237\r\n","PennPed00049.jpg,452,350,adult,310,77,368,240\r\n","PennPed00050.jpg,419,315,adult,91,41,196,291\r\n","PennPed00051.jpg,723,411,adult,38,52,203,326\r\n","PennPed00051.jpg,723,411,adult,314,67,481,365\r\n","PennPed00051.jpg,723,411,adult,459,34,587,373\r\n","PennPed00051.jpg,723,411,adult,554,67,628,339\r\n","PennPed00052.jpg,682,525,adult,77,137,175,428\r\n","PennPed00052.jpg,682,525,adult,169,181,294,491\r\n","PennPed00052.jpg,682,525,adult,403,157,494,400\r\n","PennPed00052.jpg,682,525,adult,524,83,613,393\r\n","PennPed00052.jpg,682,525,adult,279,179,334,314\r\n","PennPed00053.jpg,377,344,adult,15,29,124,325\r\n","PennPed00053.jpg,377,344,adult,169,27,266,329\r\n","PennPed00054.jpg,324,334,adult,43,22,128,317\r\n","PennPed00055.jpg,417,353,adult,96,51,208,341\r\n","PennPed00055.jpg,417,353,adult,29,8,122,283\r\n","PennPed00056.jpg,618,420,adult,175,106,271,398\r\n","PennPed00056.jpg,618,420,adult,140,122,193,384\r\n","PennPed00056.jpg,618,420,adult,257,98,306,386\r\n","PennPed00057.jpg,371,364,adult,158,64,261,352\r\n","PennPed00057.jpg,371,364,adult,198,18,266,294\r\n","PennPed00058.jpg,509,380,adult,167,27,245,316\r\n","PennPed00058.jpg,509,380,adult,228,31,307,291\r\n","PennPed00058.jpg,509,380,adult,321,65,452,351\r\n","PennPed00058.jpg,509,380,adult,295,22,374,274\r\n","PennPed00059.jpg,582,362,adult,80,44,177,337\r\n","PennPed00059.jpg,582,362,adult,381,36,529,323\r\n","PennPed00060.jpg,505,351,adult,43,23,142,326\r\n","PennPed00060.jpg,505,351,adult,189,21,283,230\r\n","PennPed00060.jpg,505,351,adult,117,28,191,226\r\n","PennPed00060.jpg,505,351,adult,273,81,389,327\r\n","PennPed00060.jpg,505,351,adult,378,31,476,330\r\n","PennPed00061.jpg,314,320,adult,34,17,143,307\r\n","PennPed00061.jpg,314,320,adult,250,57,275,146\r\n","PennPed00062.jpg,396,315,adult,86,15,184,301\r\n","PennPed00062.jpg,396,315,adult,176,35,288,305\r\n","PennPed00062.jpg,396,315,adult,2,46,57,223\r\n","PennPed00062.jpg,396,315,adult,321,54,373,234\r\n","PennPed00063.jpg,427,350,adult,88,50,262,339\r\n","PennPed00064.jpg,370,322,adult,226,29,334,314\r\n","PennPed00065.jpg,324,318,adult,24,21,139,304\r\n","PennPed00066.jpg,428,375,adult,11,76,128,365\r\n","PennPed00066.jpg,428,375,adult,265,33,398,312\r\n","PennPed00066.jpg,428,375,adult,76,30,189,345\r\n","PennPed00067.jpg,370,318,adult,37,13,184,298\r\n","PennPed00068.jpg,603,387,adult,43,89,153,364\r\n","PennPed00068.jpg,603,387,adult,189,56,283,349\r\n","PennPed00068.jpg,603,387,adult,430,37,568,349\r\n","PennPed00068.jpg,603,387,adult,384,47,455,223\r\n","PennPed00069.jpg,388,345,adult,59,44,149,330\r\n","PennPed00069.jpg,388,345,adult,169,40,269,310\r\n","PennPed00070.jpg,466,354,adult,15,24,122,341\r\n","PennPed00070.jpg,466,354,adult,185,14,306,312\r\n","PennPed00071.jpg,767,379,adult,49,19,171,361\r\n","PennPed00071.jpg,767,379,adult,292,44,386,305\r\n","PennPed00071.jpg,767,379,adult,476,25,567,302\r\n","PennPed00071.jpg,767,379,adult,600,13,716,306\r\n","PennPed00071.jpg,767,379,adult,567,53,612,180\r\n","PennPed00071.jpg,767,379,adult,709,53,744,174\r\n","PennPed00072.jpg,473,348,adult,134,25,254,337\r\n","PennPed00072.jpg,473,348,adult,280,52,431,330\r\n","PennPed00073.jpg,579,364,adult,98,38,191,326\r\n","PennPed00073.jpg,579,364,adult,182,46,277,340\r\n","PennPed00073.jpg,579,364,adult,316,51,413,331\r\n","PennPed00074.jpg,442,332,adult,259,31,422,317\r\n","PennPed00074.jpg,442,332,adult,198,23,283,303\r\n","PennPed00074.jpg,442,332,adult,117,48,223,295\r\n","PennPed00076.jpg,342,348,adult,187,29,320,319\r\n","PennPed00075.jpg,368,322,adult,194,23,335,308\r\n","PennPed00077.jpg,352,323,adult,65,27,181,317\r\n","PennPed00078.jpg,349,372,adult,216,59,320,355\r\n","PennPed00079.jpg,357,337,adult,206,25,324,320\r\n","PennPed00080.jpg,435,361,adult,72,47,175,339\r\n","PennPed00080.jpg,435,361,adult,198,43,314,339\r\n","PennPed00080.jpg,435,361,adult,320,44,422,332\r\n","PennPed00081.jpg,561,387,adult,148,85,240,366\r\n","PennPed00081.jpg,561,387,adult,233,54,337,360\r\n","PennPed00081.jpg,561,387,adult,330,75,455,366\r\n","PennPed00082.jpg,398,321,adult,80,22,229,312\r\n","PennPed00083.jpg,371,341,adult,108,32,249,327\r\n","PennPed00083.jpg,371,341,adult,38,22,124,299\r\n","PennPed00084.jpg,370,356,adult,142,46,310,342\r\n","PennPed00084.jpg,370,356,adult,274,21,362,334\r\n","PennPed00084.jpg,370,356,adult,161,95,208,258\r\n","PennPed00085.jpg,403,341,adult,89,27,200,302\r\n","PennPed00085.jpg,403,341,adult,245,38,368,329\r\n","PennPed00086.jpg,474,354,adult,8,33,109,253\r\n","PennPed00086.jpg,474,354,adult,208,49,306,318\r\n","PennPed00086.jpg,474,354,adult,345,32,457,325\r\n","PennPed00086.jpg,474,354,adult,73,163,180,353\r\n","PennPed00086.jpg,474,354,adult,113,65,164,197\r\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"JthBVzlUO8cz","colab_type":"text"},"source":["# 3단계. 모델 선택"]},{"cell_type":"markdown","metadata":{"id":"zebW6IkaBCA2","colab_type":"text"},"source":["## 1) Configs & Hyperparameters"]},{"cell_type":"code","metadata":{"id":"AlDVKlnPtrVU","colab_type":"code","colab":{}},"source":["# github 가서 선택한 모델 다운로드\n","# https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/detection_model_zoo.md#coco-trained-models\n","MODELS_CONFIG = {\n","    'ssd_mobilenet_v2_quantized': {\n","        'model_name': 'ssd_mobilenet_v2_quantized_300x300_coco_2019_01_03',\n","        'pipeline_file': 'ssd_mobilenet_v2_quantized_300x300_coco.config',\n","        'batch_size': 12\n","    }\n","}\n","\n","# 뒤에서 변경\n","num_steps = 1000\n","num_eval_steps = 50\n","\n","selected_model = 'ssd_mobilenet_v2_quantized'\n","\n","# 사용할 모델 이름\n","MODEL = MODELS_CONFIG[selected_model]['model_name']\n","# tensorflow object detection API의 pipeline file 이름\n","pipeline_file = MODELS_CONFIG[selected_model]['pipeline_file']\n","batch_size = MODELS_CONFIG[selected_model]['batch_size']"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"ueFuqlCcBHmL","colab_type":"text"},"source":["## 2) Base model 설치"]},{"cell_type":"code","metadata":{"id":"X8gngEeKRnNJ","colab_type":"code","outputId":"67866955-01ac-44a1-ae1a-a219cc4b5538","executionInfo":{"status":"ok","timestamp":1592062492745,"user_tz":-540,"elapsed":4904,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":35}},"source":["import os\n","import shutil\n","import glob\n","import urllib.request\n","import tarfile\n","\n","%cd /content/gdrive/My Drive/capstone/models/research\n","\n","MODEL_FILE = MODEL + '.tar.gz'\n","DOWNLOAD_BASE = 'http://download.tensorflow.org/models/object_detection/'\n","DEST_DIR = '/content/gdrive/My Drive/capstone/models/research/pretrained_model'\n","\n","if not (os.path.exists(MODEL_FILE)):\n","    urllib.request.urlretrieve(DOWNLOAD_BASE + MODEL_FILE, MODEL_FILE)\n","\n","tar = tarfile.open(MODEL_FILE)\n","tar.extractall()\n","tar.close()\n","\n","os.remove(MODEL_FILE)\n","if (os.path.exists(DEST_DIR)):\n","    shutil.rmtree(DEST_DIR)\n","os.rename(MODEL, DEST_DIR)"],"execution_count":15,"outputs":[{"output_type":"stream","text":["/content/gdrive/My Drive/capstone/models/research\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"qDKGlV89SKN8","colab_type":"code","outputId":"61247de5-1b63-495f-8bf5-c00b561b0fcf","executionInfo":{"status":"ok","timestamp":1592062497933,"user_tz":-540,"elapsed":3910,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":162}},"source":["DEST_DIR = '/content/gdrive/My\\ Drive/capstone/models/research/pretrained_model'\n","!echo {DEST_DIR}\n","!ls -alh {DEST_DIR}"],"execution_count":16,"outputs":[{"output_type":"stream","text":["/content/gdrive/My Drive/capstone/models/research/pretrained_model\n","total 204M\n","-rw------- 1 root root  93M Jan  4  2019 model.ckpt.data-00000-of-00001\n","-rw------- 1 root root  68K Jan  4  2019 model.ckpt.index\n","-rw------- 1 root root  20M Jan  4  2019 model.ckpt.meta\n","-rw------- 1 root root 4.3K Jan  4  2019 pipeline.config\n","-rw------- 1 root root  24M Jan  4  2019 tflite_graph.pb\n","-rw------- 1 root root  68M Jan  4  2019 tflite_graph.pbtxt\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"9RhWeqOvTBUV","colab_type":"code","outputId":"641bf50b-fa91-4810-d579-a8835f7efd88","executionInfo":{"status":"ok","timestamp":1592062499680,"user_tz":-540,"elapsed":657,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":35}},"source":["import os\n","DEST_DIR = '/content/gdrive/My Drive/capstone/models/research/pretrained_model'\n","fine_tune_checkpoint = os.path.join(DEST_DIR, \"model.ckpt\")\n","fine_tune_checkpoint"],"execution_count":17,"outputs":[{"output_type":"execute_result","data":{"text/plain":["'/content/gdrive/My Drive/capstone/models/research/pretrained_model/model.ckpt'"]},"metadata":{"tags":[]},"execution_count":17}]},{"cell_type":"markdown","metadata":{"id":"Dj0V7qcgR8zh","colab_type":"text"},"source":["# 4단계. 전이 학습(Transfer Learning)"]},{"cell_type":"markdown","metadata":{"id":"jke6inSYS4nq","colab_type":"text"},"source":["## 1) Pipeline 파일 변경"]},{"cell_type":"code","metadata":{"id":"ymO67DVVTIel","colab_type":"code","colab":{}},"source":["import os\n","pipeline_fname = os.path.join('/content/gdrive/My Drive/capstone/models/research/object_detection/samples/configs/', pipeline_file)\n","\n","assert os.path.isfile(pipeline_fname), '`{}` not exist'.format(pipeline_fname)"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"o5LMwSKxTR9K","colab_type":"code","colab":{}},"source":["def get_num_classes(pbtxt_fname):\n","    from object_detection.utils import label_map_util\n","    label_map = label_map_util.load_labelmap(pbtxt_fname)\n","    categories = label_map_util.convert_label_map_to_categories(\n","        label_map, max_num_classes=90, use_display_name=True)\n","    category_index = label_map_util.create_category_index(categories)\n","    return len(category_index.keys())"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"zCZjD9YuTU9m","colab_type":"code","colab":{}},"source":["import re\n","\n","# training pipeline file defines:\n","# - pretrain model path\n","# - the train/test sets\n","# - ID to Label mapping and number of classes\n","# - training batch size\n","# - epochs to trains\n","# - learning rate\n","# - etc\n","\n","# note we just need to use a sample one, and make edits to it.\n","\n","# 몇 step부터 quantization 할지 결정\n","delay=4500\n","\n","import os\n","\n","#model_dir 설정\n","model_dir = '/content/gdrive/My Drive/capstone/model'\n","os.makedirs(model_dir, exist_ok=True)\n","\n","num_classes = get_num_classes(label_map_pbtxt_fname)\n","with open(pipeline_fname) as f:\n","    s = f.read()\n","with open(pipeline_fname, 'w') as f:\n","    \n","    # fine_tune_checkpoint: downloaded pre-trained model checkpoint path\n","    s = re.sub('fine_tune_checkpoint: \".*?\"',\n","               'fine_tune_checkpoint: \"{}\"'.format(fine_tune_checkpoint), s)\n","    \n","    # tfrecord files train and test, we created earlier with our training/test sets\n","    s = re.sub(\n","        '(input_path: \".*?)(train.record)(.*?\")', 'input_path: \"{}\"'.format(train_record_fname), s)\n","    s = re.sub(\n","        '(input_path: \".*?)(val.record)(.*?\")', 'input_path: \"{}\"'.format(test_record_fname), s)\n","\n","    # label_map_path: ID to label file\n","    s = re.sub(\n","        'label_map_path: \".*?\"', 'label_map_path: \"{}\"'.format(label_map_pbtxt_fname), s)\n","\n","    # Set training batch_size.\n","    s = re.sub('batch_size: [0-9]+',\n","               'batch_size: {}'.format(batch_size), s)\n","\n","    # Set training steps, num_steps (Number of epochs to train)\n","    s = re.sub('num_steps: [0-9]+',\n","               'num_steps: {}'.format(num_steps), s)\n","    \n","    # Set number of classes num_classes.\n","    s = re.sub('num_classes: [0-9]+',\n","               'num_classes: {}'.format(num_classes), s)\n","    \n","    s = re.sub('delay: [0-9]+',\n","               'delay: {}'.format(delay), s)\n","    \n","    f.write(s)"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"1L_lQd2KTahA","colab_type":"code","outputId":"96ad9563-fd6b-4674-a06d-29b9a8a14f59","executionInfo":{"status":"ok","timestamp":1592062515079,"user_tz":-540,"elapsed":2775,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":361}},"source":["label_map_pbtxt_fname = '/content/gdrive/My\\ Drive/capstone/data/annotations/label_map.pbtxt'\n","!cat {label_map_pbtxt_fname}\n","label_map_pbtxt_fname = '/content/gdrive/My Drive/capstone/data/annotations/label_map.pbtxt'"],"execution_count":21,"outputs":[{"output_type":"stream","text":["item {\n","    id: 1\n","    name: 'child'\n","}\n","\n","item {\n","    id: 2\n","    name: 'long_sign'\n","}\n","\n","item {\n","    id: 3\n","    name: 'wide_sign'\n","}\n","item {\n","    id: 4\n","    name: 'adult'\n","}\n","\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"yGNTrFhFZEJ3","colab_type":"code","outputId":"39442466-be45-472c-fd6b-dea90e4e3ece","executionInfo":{"status":"ok","timestamp":1592062518562,"user_tz":-540,"elapsed":2768,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":1000}},"source":["pipeline_fname = os.path.join('/content/gdrive/My\\ Drive/capstone/models/research/object_detection/samples/configs/', pipeline_file)\n","!cat {pipeline_fname}\n","pipeline_fname = os.path.join('/content/gdrive/My Drive/capstone/models/research/object_detection/samples/configs/', pipeline_file)"],"execution_count":22,"outputs":[{"output_type":"stream","text":["# Quantized trained SSD with Mobilenet v2 on MSCOCO Dataset.\n","# Users should configure the fine_tune_checkpoint field in the train config as\n","# well as the label_map_path and input_path fields in the train_input_reader and\n","# eval_input_reader. Search for \"PATH_TO_BE_CONFIGURED\" to find the fields that\n","# should be configured.\n","\n","model {\n","  ssd {\n","    num_classes: 4\n","    box_coder {\n","      faster_rcnn_box_coder {\n","        y_scale: 10.0\n","        x_scale: 10.0\n","        height_scale: 5.0\n","        width_scale: 5.0\n","      }\n","    }\n","    matcher {\n","      argmax_matcher {\n","        matched_threshold: 0.5\n","        unmatched_threshold: 0.5\n","        ignore_thresholds: false\n","        negatives_lower_than_unmatched: true\n","        force_match_for_each_row: true\n","      }\n","    }\n","    similarity_calculator {\n","      iou_similarity {\n","      }\n","    }\n","    anchor_generator {\n","      ssd_anchor_generator {\n","        num_layers: 6\n","        min_scale: 0.2\n","        max_scale: 0.95\n","        aspect_ratios: 1.0\n","        aspect_ratios: 2.0\n","        aspect_ratios: 0.5\n","        aspect_ratios: 3.0\n","        aspect_ratios: 0.3333\n","      }\n","    }\n","    image_resizer {\n","      fixed_shape_resizer {\n","        height: 300\n","        width: 300\n","      }\n","    }\n","    box_predictor {\n","      convolutional_box_predictor {\n","        min_depth: 0\n","        max_depth: 0\n","        num_layers_before_predictor: 0\n","        use_dropout: false\n","        dropout_keep_probability: 0.8\n","        kernel_size: 1\n","        box_code_size: 4\n","        apply_sigmoid_to_scores: false\n","        conv_hyperparams {\n","          activation: RELU_6,\n","          regularizer {\n","            l2_regularizer {\n","              weight: 0.00004\n","            }\n","          }\n","          initializer {\n","            truncated_normal_initializer {\n","              stddev: 0.03\n","              mean: 0.0\n","            }\n","          }\n","          batch_norm {\n","            train: true,\n","            scale: true,\n","            center: true,\n","            decay: 0.9997,\n","            epsilon: 0.001,\n","          }\n","        }\n","      }\n","    }\n","    feature_extractor {\n","      type: 'ssd_mobilenet_v2'\n","      min_depth: 16\n","      depth_multiplier: 1.0\n","      conv_hyperparams {\n","        activation: RELU_6,\n","        regularizer {\n","          l2_regularizer {\n","            weight: 0.00004\n","          }\n","        }\n","        initializer {\n","          truncated_normal_initializer {\n","            stddev: 0.03\n","            mean: 0.0\n","          }\n","        }\n","        batch_norm {\n","          train: true,\n","          scale: true,\n","          center: true,\n","          decay: 0.9997,\n","          epsilon: 0.001,\n","        }\n","      }\n","    }\n","    loss {\n","      classification_loss {\n","        weighted_sigmoid {\n","        }\n","      }\n","      localization_loss {\n","        weighted_smooth_l1 {\n","        }\n","      }\n","      hard_example_miner {\n","        num_hard_examples: 3000\n","        iou_threshold: 0.99\n","        loss_type: CLASSIFICATION\n","        max_negatives_per_positive: 3\n","        min_negatives_per_image: 3\n","      }\n","      classification_weight: 1.0\n","      localization_weight: 1.0\n","    }\n","    normalize_loss_by_num_matches: true\n","    post_processing {\n","      batch_non_max_suppression {\n","        score_threshold: 1e-8\n","        iou_threshold: 0.6\n","        max_detections_per_class: 100\n","        max_total_detections: 100\n","      }\n","      score_converter: SIGMOID\n","    }\n","  }\n","}\n","\n","train_config: {\n","  batch_size: 12\n","  optimizer {\n","    rms_prop_optimizer: {\n","      learning_rate: {\n","        exponential_decay_learning_rate {\n","          initial_learning_rate: 0.004\n","          decay_steps: 800720\n","          decay_factor: 0.95\n","        }\n","      }\n","      momentum_optimizer_value: 0.9\n","      decay: 0.9\n","      epsilon: 1.0\n","    }\n","  }\n","  fine_tune_checkpoint: \"/content/gdrive/My Drive/capstone/models/research/pretrained_model/model.ckpt\"\n","  fine_tune_checkpoint_type:  \"detection\"\n","  # Note: The below line limits the training process to 200K steps, which we\n","  # empirically found to be sufficient enough to train the pets dataset. This\n","  # effectively bypasses the learning rate schedule (the learning rate will\n","  # never decay). Remove the below line to train indefinitely.\n","  num_steps: 1000\n","  data_augmentation_options {\n","    random_horizontal_flip {\n","    }\n","  }\n","  data_augmentation_options {\n","    ssd_random_crop {\n","    }\n","  }\n","}\n","\n","train_input_reader: {\n","  tf_record_input_reader {\n","    input_path: \"/content/gdrive/My Drive/capstone/data/annotations/train.record\"\n","  }\n","  label_map_path: \"/content/gdrive/My Drive/capstone/data/annotations/label_map.pbtxt\"\n","}\n","\n","eval_config: {\n","  num_examples: 8000\n","  # Note: The below line limits the evaluation process to 10 evaluations.\n","  # Remove the below line to evaluate indefinitely.\n","  max_evals: 10\n","}\n","\n","eval_input_reader: {\n","  tf_record_input_reader {\n","    input_path: \"/content/gdrive/My Drive/capstone/data/annotations/test.record\"\n","  }\n","  label_map_path: \"/content/gdrive/My Drive/capstone/data/annotations/label_map.pbtxt\"\n","  shuffle: false\n","  num_readers: 1\n","}\n","\n","graph_rewriter {\n","  quantization {\n","    delay: 4500\n","    weight_bits: 8\n","    activation_bits: 8\n","  }\n","}"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"TEl3UaBzQlkr","colab_type":"text"},"source":["## 2) Tensorboard"]},{"cell_type":"code","metadata":{"id":"wvxacIG7ZLVF","colab_type":"code","outputId":"ed3b4f71-6199-4216-f782-b1bfc7795da7","executionInfo":{"status":"ok","timestamp":1592052211827,"user_tz":-540,"elapsed":6797,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":252}},"source":["!wget https://bin.equinox.io/c/4VmDzA7iaHb/ngrok-stable-linux-amd64.zip\n","!unzip -o ngrok-stable-linux-amd64.zip"],"execution_count":0,"outputs":[{"output_type":"stream","text":["--2020-06-13 12:43:24--  https://bin.equinox.io/c/4VmDzA7iaHb/ngrok-stable-linux-amd64.zip\n","Resolving bin.equinox.io (bin.equinox.io)... 34.231.204.249, 3.233.171.45, 54.84.72.55, ...\n","Connecting to bin.equinox.io (bin.equinox.io)|34.231.204.249|:443... connected.\n","HTTP request sent, awaiting response... 200 OK\n","Length: 13773305 (13M) [application/octet-stream]\n","Saving to: ‘ngrok-stable-linux-amd64.zip’\n","\n","\r          ngrok-sta   0%[                    ]       0  --.-KB/s               \r         ngrok-stab  58%[==========>         ]   7.73M  38.6MB/s               \rngrok-stable-linux- 100%[===================>]  13.13M  46.3MB/s    in 0.3s    \n","\n","2020-06-13 12:43:25 (46.3 MB/s) - ‘ngrok-stable-linux-amd64.zip’ saved [13773305/13773305]\n","\n","Archive:  ngrok-stable-linux-amd64.zip\n","  inflating: ngrok                   \n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"IYuccEx3ZQYw","colab_type":"code","colab":{}},"source":["LOG_DIR = model_dir\n","get_ipython().system_raw(\n","    'tensorboard --logdir \"{}\" --host 0.0.0.0 --port 6006 &'\n","    .format(LOG_DIR)\n",")"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"MWQZvZuOZTWm","colab_type":"code","colab":{}},"source":["get_ipython().system_raw('./ngrok http 6006 &')"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"nubOq61sZVLG","colab_type":"code","outputId":"6d5d755e-d42a-492f-b7fd-e4587845f6d9","executionInfo":{"status":"ok","timestamp":1592052216609,"user_tz":-540,"elapsed":1555,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":35}},"source":["! curl -s http://localhost:4040/api/tunnels | python3 -c \\\n","    \"import sys, json; print(json.load(sys.stdin)['tunnels'][0]['public_url'])\""],"execution_count":0,"outputs":[{"output_type":"stream","text":["https://a2a9f4a8cc83.ngrok.io\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"qyxRhfh4Qs_P","colab_type":"text"},"source":["## 3) 모델 학습"]},{"cell_type":"code","metadata":{"id":"CSq_iEdDCdox","colab_type":"code","outputId":"fc821b14-f268-4a55-e8e8-bd34157857c0","executionInfo":{"status":"ok","timestamp":1591988851139,"user_tz":-540,"elapsed":10229,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":307}},"source":["!pip install numpy==1.17"],"execution_count":0,"outputs":[{"output_type":"stream","text":["Collecting numpy==1.17\n","\u001b[?25l  Downloading https://files.pythonhosted.org/packages/19/b9/bda9781f0a74b90ebd2e046fde1196182900bd4a8e1ea503d3ffebc50e7c/numpy-1.17.0-cp36-cp36m-manylinux1_x86_64.whl (20.4MB)\n","\u001b[K     |████████████████████████████████| 20.4MB 1.4MB/s \n","\u001b[31mERROR: tensorflow 1.15.2 has requirement gast==0.2.2, but you'll have gast 0.3.3 which is incompatible.\u001b[0m\n","\u001b[31mERROR: datascience 0.10.6 has requirement folium==0.2.1, but you'll have folium 0.8.3 which is incompatible.\u001b[0m\n","\u001b[31mERROR: albumentations 0.1.12 has requirement imgaug<0.2.7,>=0.2.5, but you'll have imgaug 0.2.9 which is incompatible.\u001b[0m\n","\u001b[?25hInstalling collected packages: numpy\n","  Found existing installation: numpy 1.18.5\n","    Uninstalling numpy-1.18.5:\n","      Successfully uninstalled numpy-1.18.5\n","Successfully installed numpy-1.17.0\n"],"name":"stdout"},{"output_type":"display_data","data":{"application/vnd.colab-display-data+json":{"pip_warning":{"packages":["numpy"]}}},"metadata":{"tags":[]}}]},{"cell_type":"code","metadata":{"id":"QOhMb6clZmY-","colab_type":"code","outputId":"c9778142-7720-4d8d-9850-7a93d3bf49b8","executionInfo":{"status":"ok","timestamp":1592014203310,"user_tz":-540,"elapsed":25227576,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":1000}},"source":["num_steps = 5000  # num_steps=(#images x epoch)/batch_size \n","num_eval_steps=50\n","\n","!python /content/gdrive/My\\ Drive/capstone/models/research/object_detection/model_main.py \\\n","    --pipeline_config_path='{pipeline_fname}' \\\n","    --model_dir='{model_dir}' \\\n","    --alsologtostderr \\\n","    --num_train_steps={num_steps} \\\n","    --num_eval_steps={num_eval_steps}"],"execution_count":0,"outputs":[{"output_type":"stream","text":["\u001b[1;30;43m스트리밍 출력 내용이 길어서 마지막 5000줄이 삭제되었습니다.\u001b[0m\n","I0612 21:01:21.601303 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0612 21:01:21.601609 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0612 21:01:21.601795 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0612 21:01:21.602059 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0612 21:01:21.602245 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0612 21:01:21.602507 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0612 21:01:21.602680 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0612 21:01:21.602932 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0612 21:01:21.603106 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0612 21:01:21.603359 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0612 21:01:21.603542 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0612 21:01:21.603797 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0612 21:01:21.603984 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0612 21:01:21.604227 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0612 21:01:21.604403 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0612 21:01:21.604662 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0612 21:01:21.604843 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0612 21:01:21.605097 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0612 21:01:21.605260 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0612 21:01:21.605511 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0612 21:01:21.605679 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0612 21:01:21.605935 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0612 21:01:21.606098 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0612 21:01:21.606344 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0612 21:01:21.606515 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0612 21:01:21.606762 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0612 21:01:21.606935 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0612 21:01:21.607180 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0612 21:01:21.607348 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0612 21:01:21.607620 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0612 21:01:21.607792 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0612 21:01:21.608048 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0612 21:01:21.608211 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0612 21:01:21.608471 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0612 21:01:21.608630 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0612 21:01:21.608782 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0612 21:01:21.608942 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0612 21:01:21.609097 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0612 21:01:21.609248 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0612 21:01:21.609398 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0612 21:01:21.609560 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0612 21:01:21.609713 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0612 21:01:22.673808 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-12T21:01:22Z\n","I0612 21:01:22.692204 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-12T21:01:22Z\n","INFO:tensorflow:Graph was finalized.\n","I0612 21:01:23.705609 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-1325\n","I0612 21:01:23.709199 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-1325\n","INFO:tensorflow:Running local_init_op.\n","I0612 21:01:25.387523 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0612 21:01:25.623786 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0612 21:01:37.249731 140537697679104 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0612 21:01:37.251213 140537697679104 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.00s)\n","I0612 21:01:37.256107 140537697679104 coco_tools.py:137] DONE (t=0.00s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.38s).\n","Accumulating evaluation results...\n","DONE (t=0.10s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.563\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.955\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.569\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.300\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.568\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.486\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.642\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.650\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.300\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.654\n","INFO:tensorflow:Finished evaluation at 2020-06-12-21:01:38\n","I0612 21:01:38.576180 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-12-21:01:38\n","INFO:tensorflow:Saving dict for global step 1325: DetectionBoxes_Precision/mAP = 0.56313306, DetectionBoxes_Precision/mAP (large) = 0.5681387, DetectionBoxes_Precision/mAP (medium) = 0.3, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9554783, DetectionBoxes_Precision/mAP@.75IOU = 0.5687389, DetectionBoxes_Recall/AR@1 = 0.48628473, DetectionBoxes_Recall/AR@10 = 0.64193374, DetectionBoxes_Recall/AR@100 = 0.64950585, DetectionBoxes_Recall/AR@100 (large) = 0.6544017, DetectionBoxes_Recall/AR@100 (medium) = 0.3, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.1705973, Loss/localization_loss = 0.583413, Loss/regularization_loss = 0.34874994, Loss/total_loss = 4.1027603, global_step = 1325, learning_rate = 0.004, loss = 4.1027603\n","I0612 21:01:38.576510 140539975640960 estimator.py:2049] Saving dict for global step 1325: DetectionBoxes_Precision/mAP = 0.56313306, DetectionBoxes_Precision/mAP (large) = 0.5681387, DetectionBoxes_Precision/mAP (medium) = 0.3, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9554783, DetectionBoxes_Precision/mAP@.75IOU = 0.5687389, DetectionBoxes_Recall/AR@1 = 0.48628473, DetectionBoxes_Recall/AR@10 = 0.64193374, DetectionBoxes_Recall/AR@100 = 0.64950585, DetectionBoxes_Recall/AR@100 (large) = 0.6544017, DetectionBoxes_Recall/AR@100 (medium) = 0.3, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.1705973, Loss/localization_loss = 0.583413, Loss/regularization_loss = 0.34874994, Loss/total_loss = 4.1027603, global_step = 1325, learning_rate = 0.004, loss = 4.1027603\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 1325: /content/gdrive/My Drive/capstone/model/model.ckpt-1325\n","I0612 21:01:38.585641 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 1325: /content/gdrive/My Drive/capstone/model/model.ckpt-1325\n","INFO:tensorflow:global_step/sec: 0.195903\n","I0612 21:07:48.914821 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.195903\n","INFO:tensorflow:loss = 2.160614, step = 1401 (510.458 sec)\n","I0612 21:07:48.917670 140539975640960 basic_session_run_hooks.py:260] loss = 2.160614, step = 1401 (510.458 sec)\n","INFO:tensorflow:Saving checkpoints for 1444 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0612 21:11:14.364614 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 1444 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd171bb5710>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0612 21:11:17.619617 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd171bb5710>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd171d96730> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0612 21:11:17.814924 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd171d96730> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0612 21:11:18.306942 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:11:21.396350 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:11:21.429907 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:11:21.462429 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:11:21.498420 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:11:21.531364 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:11:21.564662 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0612 21:11:23.996720 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0612 21:11:23.997147 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0612 21:11:23.997506 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0612 21:11:23.997741 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0612 21:11:23.998058 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0612 21:11:23.998277 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0612 21:11:23.998599 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0612 21:11:23.998815 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0612 21:11:23.999123 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0612 21:11:23.999332 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0612 21:11:23.999644 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0612 21:11:23.999863 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0612 21:11:24.000192 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0612 21:11:24.000422 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0612 21:11:24.000707 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0612 21:11:24.000890 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0612 21:11:24.001159 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0612 21:11:24.001333 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0612 21:11:24.001615 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0612 21:11:24.001791 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0612 21:11:24.002066 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0612 21:11:24.002239 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0612 21:11:24.002523 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0612 21:11:24.002701 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0612 21:11:24.002993 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0612 21:11:24.003220 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0612 21:11:24.003554 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0612 21:11:24.003769 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0612 21:11:24.004085 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0612 21:11:24.004295 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0612 21:11:24.004642 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0612 21:11:24.004864 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0612 21:11:24.005166 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0612 21:11:24.005376 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0612 21:11:24.005692 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0612 21:11:24.005905 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0612 21:11:24.006112 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0612 21:11:24.006314 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0612 21:11:24.006541 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0612 21:11:24.006751 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0612 21:11:24.006958 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0612 21:11:24.007161 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0612 21:11:24.007362 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0612 21:11:25.229662 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-12T21:11:25Z\n","I0612 21:11:25.250115 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-12T21:11:25Z\n","INFO:tensorflow:Graph was finalized.\n","I0612 21:11:26.063925 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-1444\n","I0612 21:11:26.067548 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-1444\n","INFO:tensorflow:Running local_init_op.\n","I0612 21:11:27.927103 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0612 21:11:28.170564 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0612 21:11:40.280860 140537672500992 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0612 21:11:40.281661 140537672500992 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.00s)\n","I0612 21:11:40.286649 140537672500992 coco_tools.py:137] DONE (t=0.00s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.56s).\n","Accumulating evaluation results...\n","DONE (t=0.11s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.555\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.929\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.574\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.057\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.563\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.472\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.630\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.631\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.400\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.634\n","INFO:tensorflow:Finished evaluation at 2020-06-12-21:11:41\n","I0612 21:11:41.581679 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-12-21:11:41\n","INFO:tensorflow:Saving dict for global step 1444: DetectionBoxes_Precision/mAP = 0.5552453, DetectionBoxes_Precision/mAP (large) = 0.56258225, DetectionBoxes_Precision/mAP (medium) = 0.057142857, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.92911965, DetectionBoxes_Precision/mAP@.75IOU = 0.57398397, DetectionBoxes_Recall/AR@1 = 0.472289, DetectionBoxes_Recall/AR@10 = 0.63019496, DetectionBoxes_Recall/AR@100 = 0.6311565, DetectionBoxes_Recall/AR@100 (large) = 0.6338649, DetectionBoxes_Recall/AR@100 (medium) = 0.4, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.2758803, Loss/localization_loss = 0.558315, Loss/regularization_loss = 0.3489412, Loss/total_loss = 4.183137, global_step = 1444, learning_rate = 0.004, loss = 4.183137\n","I0612 21:11:41.581993 140539975640960 estimator.py:2049] Saving dict for global step 1444: DetectionBoxes_Precision/mAP = 0.5552453, DetectionBoxes_Precision/mAP (large) = 0.56258225, DetectionBoxes_Precision/mAP (medium) = 0.057142857, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.92911965, DetectionBoxes_Precision/mAP@.75IOU = 0.57398397, DetectionBoxes_Recall/AR@1 = 0.472289, DetectionBoxes_Recall/AR@10 = 0.63019496, DetectionBoxes_Recall/AR@100 = 0.6311565, DetectionBoxes_Recall/AR@100 (large) = 0.6338649, DetectionBoxes_Recall/AR@100 (medium) = 0.4, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.2758803, Loss/localization_loss = 0.558315, Loss/regularization_loss = 0.3489412, Loss/total_loss = 4.183137, global_step = 1444, learning_rate = 0.004, loss = 4.183137\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 1444: /content/gdrive/My Drive/capstone/model/model.ckpt-1444\n","I0612 21:11:41.590274 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 1444: /content/gdrive/My Drive/capstone/model/model.ckpt-1444\n","INFO:tensorflow:global_step/sec: 0.194734\n","I0612 21:16:22.435748 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.194734\n","INFO:tensorflow:loss = 2.218592, step = 1501 (513.520 sec)\n","I0612 21:16:22.437608 140539975640960 basic_session_run_hooks.py:260] loss = 2.218592, step = 1501 (513.520 sec)\n","INFO:tensorflow:Saving checkpoints for 1562 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0612 21:21:17.084882 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 1562 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd172629550>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0612 21:21:20.096627 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd172629550>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd170f63620> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0612 21:21:20.284224 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd170f63620> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0612 21:21:20.749027 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:21:23.069088 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:21:23.103957 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:21:23.138090 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:21:23.172523 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:21:23.207968 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:21:23.241013 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0612 21:21:25.661397 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0612 21:21:25.661841 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0612 21:21:25.662135 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0612 21:21:25.662316 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0612 21:21:25.662595 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0612 21:21:25.662771 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0612 21:21:25.663031 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0612 21:21:25.663210 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0612 21:21:25.663476 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0612 21:21:25.663650 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0612 21:21:25.663909 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0612 21:21:25.664074 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0612 21:21:25.664337 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0612 21:21:25.664525 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0612 21:21:25.664783 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0612 21:21:25.665026 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0612 21:21:25.665305 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0612 21:21:25.665491 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0612 21:21:25.665753 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0612 21:21:25.665920 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0612 21:21:25.666210 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0612 21:21:25.666385 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0612 21:21:25.666658 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0612 21:21:25.666825 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0612 21:21:25.667076 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0612 21:21:25.667253 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0612 21:21:25.667526 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0612 21:21:25.667698 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0612 21:21:25.667955 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0612 21:21:25.668123 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0612 21:21:25.668380 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0612 21:21:25.668555 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0612 21:21:25.668809 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0612 21:21:25.668976 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0612 21:21:25.669240 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0612 21:21:25.669399 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0612 21:21:25.669573 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0612 21:21:25.669728 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0612 21:21:25.669883 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0612 21:21:25.670042 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0612 21:21:25.670198 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0612 21:21:25.670353 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0612 21:21:25.670524 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0612 21:21:27.438916 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-12T21:21:27Z\n","I0612 21:21:27.458157 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-12T21:21:27Z\n","INFO:tensorflow:Graph was finalized.\n","I0612 21:21:28.282315 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-1562\n","I0612 21:21:28.286563 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-1562\n","INFO:tensorflow:Running local_init_op.\n","I0612 21:21:30.095653 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0612 21:21:30.347435 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0612 21:21:42.326955 140537680893696 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0612 21:21:42.327517 140537680893696 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.00s)\n","I0612 21:21:42.332114 140537680893696 coco_tools.py:137] DONE (t=0.00s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.52s).\n","Accumulating evaluation results...\n","DONE (t=0.12s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.579\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.946\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.676\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.233\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.585\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.481\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.643\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.644\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.300\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.649\n","INFO:tensorflow:Finished evaluation at 2020-06-12-21:21:43\n","I0612 21:21:43.719947 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-12-21:21:43\n","INFO:tensorflow:Saving dict for global step 1562: DetectionBoxes_Precision/mAP = 0.5794748, DetectionBoxes_Precision/mAP (large) = 0.5846723, DetectionBoxes_Precision/mAP (medium) = 0.23333333, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.945898, DetectionBoxes_Precision/mAP@.75IOU = 0.6758083, DetectionBoxes_Recall/AR@1 = 0.48072916, DetectionBoxes_Recall/AR@10 = 0.6428686, DetectionBoxes_Recall/AR@100 = 0.6438301, DetectionBoxes_Recall/AR@100 (large) = 0.6485176, DetectionBoxes_Recall/AR@100 (medium) = 0.3, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.1727316, Loss/localization_loss = 0.61367613, Loss/regularization_loss = 0.34906787, Loss/total_loss = 4.135477, global_step = 1562, learning_rate = 0.004, loss = 4.135477\n","I0612 21:21:43.720272 140539975640960 estimator.py:2049] Saving dict for global step 1562: DetectionBoxes_Precision/mAP = 0.5794748, DetectionBoxes_Precision/mAP (large) = 0.5846723, DetectionBoxes_Precision/mAP (medium) = 0.23333333, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.945898, DetectionBoxes_Precision/mAP@.75IOU = 0.6758083, DetectionBoxes_Recall/AR@1 = 0.48072916, DetectionBoxes_Recall/AR@10 = 0.6428686, DetectionBoxes_Recall/AR@100 = 0.6438301, DetectionBoxes_Recall/AR@100 (large) = 0.6485176, DetectionBoxes_Recall/AR@100 (medium) = 0.3, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.1727316, Loss/localization_loss = 0.61367613, Loss/regularization_loss = 0.34906787, Loss/total_loss = 4.135477, global_step = 1562, learning_rate = 0.004, loss = 4.135477\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 1562: /content/gdrive/My Drive/capstone/model/model.ckpt-1562\n","I0612 21:21:43.729093 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 1562: /content/gdrive/My Drive/capstone/model/model.ckpt-1562\n","INFO:tensorflow:global_step/sec: 0.194226\n","I0612 21:24:57.299294 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.194226\n","INFO:tensorflow:loss = 1.7113242, step = 1601 (514.863 sec)\n","I0612 21:24:57.300973 140539975640960 basic_session_run_hooks.py:260] loss = 1.7113242, step = 1601 (514.863 sec)\n","INFO:tensorflow:Saving checkpoints for 1680 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0612 21:31:21.031147 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 1680 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd171f68a58>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0612 21:31:23.972634 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd171f68a58>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd16e33b620> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0612 21:31:24.152610 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd16e33b620> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0612 21:31:24.619543 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:31:26.957351 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:31:26.995867 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:31:27.034506 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:31:27.072865 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:31:27.113970 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:31:27.159310 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0612 21:31:29.601917 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0612 21:31:29.602282 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0612 21:31:29.602615 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0612 21:31:29.602831 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0612 21:31:29.603124 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0612 21:31:29.603331 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0612 21:31:29.603626 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0612 21:31:29.603828 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0612 21:31:29.604113 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0612 21:31:29.604312 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0612 21:31:29.604601 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0612 21:31:29.604819 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0612 21:31:29.605105 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0612 21:31:29.605302 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0612 21:31:29.605585 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0612 21:31:29.605780 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0612 21:31:29.606059 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0612 21:31:29.606257 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0612 21:31:29.606543 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0612 21:31:29.606741 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0612 21:31:29.607022 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0612 21:31:29.607220 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0612 21:31:29.607509 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0612 21:31:29.607706 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0612 21:31:29.607987 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0612 21:31:29.608187 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0612 21:31:29.608478 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0612 21:31:29.608671 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0612 21:31:29.608951 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0612 21:31:29.609151 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0612 21:31:29.609424 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0612 21:31:29.609642 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0612 21:31:29.609923 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0612 21:31:29.610122 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0612 21:31:29.610402 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0612 21:31:29.610604 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0612 21:31:29.610790 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0612 21:31:29.610980 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0612 21:31:29.611181 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0612 21:31:29.611372 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0612 21:31:29.611583 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0612 21:31:29.611780 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0612 21:31:29.611971 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0612 21:31:30.630275 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-12T21:31:30Z\n","I0612 21:31:30.648602 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-12T21:31:30Z\n","INFO:tensorflow:Graph was finalized.\n","I0612 21:31:31.483436 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-1680\n","I0612 21:31:31.487168 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-1680\n","INFO:tensorflow:Running local_init_op.\n","I0612 21:31:33.120167 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0612 21:31:33.342839 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0612 21:31:45.032826 140537697679104 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0612 21:31:45.033392 140537697679104 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.01s)\n","I0612 21:31:45.039353 140537697679104 coco_tools.py:137] DONE (t=0.01s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.37s).\n","Accumulating evaluation results...\n","DONE (t=0.13s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.574\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.918\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.700\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.205\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.581\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.484\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.659\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.664\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.300\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.669\n","INFO:tensorflow:Finished evaluation at 2020-06-12-21:31:46\n","I0612 21:31:46.381055 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-12-21:31:46\n","INFO:tensorflow:Saving dict for global step 1680: DetectionBoxes_Precision/mAP = 0.574376, DetectionBoxes_Precision/mAP (large) = 0.5813416, DetectionBoxes_Precision/mAP (medium) = 0.20476191, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9176189, DetectionBoxes_Precision/mAP@.75IOU = 0.69982666, DetectionBoxes_Recall/AR@1 = 0.4837874, DetectionBoxes_Recall/AR@10 = 0.659148, DetectionBoxes_Recall/AR@100 = 0.66383547, DetectionBoxes_Recall/AR@100 (large) = 0.6690438, DetectionBoxes_Recall/AR@100 (medium) = 0.3, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.3515563, Loss/localization_loss = 0.5674439, Loss/regularization_loss = 0.34920904, Loss/total_loss = 4.2682085, global_step = 1680, learning_rate = 0.004, loss = 4.2682085\n","I0612 21:31:46.381361 140539975640960 estimator.py:2049] Saving dict for global step 1680: DetectionBoxes_Precision/mAP = 0.574376, DetectionBoxes_Precision/mAP (large) = 0.5813416, DetectionBoxes_Precision/mAP (medium) = 0.20476191, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9176189, DetectionBoxes_Precision/mAP@.75IOU = 0.69982666, DetectionBoxes_Recall/AR@1 = 0.4837874, DetectionBoxes_Recall/AR@10 = 0.659148, DetectionBoxes_Recall/AR@100 = 0.66383547, DetectionBoxes_Recall/AR@100 (large) = 0.6690438, DetectionBoxes_Recall/AR@100 (medium) = 0.3, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.3515563, Loss/localization_loss = 0.5674439, Loss/regularization_loss = 0.34920904, Loss/total_loss = 4.2682085, global_step = 1680, learning_rate = 0.004, loss = 4.2682085\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 1680: /content/gdrive/My Drive/capstone/model/model.ckpt-1680\n","I0612 21:31:46.389819 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 1680: /content/gdrive/My Drive/capstone/model/model.ckpt-1680\n","INFO:tensorflow:global_step/sec: 0.19631\n","I0612 21:33:26.696612 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.19631\n","INFO:tensorflow:loss = 2.7009773, step = 1701 (509.397 sec)\n","I0612 21:33:26.698252 140539975640960 basic_session_run_hooks.py:260] loss = 2.7009773, step = 1701 (509.397 sec)\n","INFO:tensorflow:Saving checkpoints for 1801 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0612 21:41:24.169529 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 1801 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd16f017518>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0612 21:41:27.201091 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd16f017518>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd16fcb8f28> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0612 21:41:27.386774 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd16fcb8f28> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0612 21:41:27.858363 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:41:30.186001 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:41:30.222123 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:41:30.256914 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:41:30.294716 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:41:30.329493 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:41:30.363566 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0612 21:41:33.310400 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0612 21:41:33.310818 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0612 21:41:33.311147 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0612 21:41:33.311368 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0612 21:41:33.311696 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0612 21:41:33.311915 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0612 21:41:33.312232 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0612 21:41:33.312465 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0612 21:41:33.312787 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0612 21:41:33.312997 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0612 21:41:33.313301 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0612 21:41:33.313543 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0612 21:41:33.313858 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0612 21:41:33.314070 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0612 21:41:33.314365 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0612 21:41:33.314582 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0612 21:41:33.314879 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0612 21:41:33.315088 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0612 21:41:33.315388 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0612 21:41:33.315673 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0612 21:41:33.316006 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0612 21:41:33.316224 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0612 21:41:33.316573 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0612 21:41:33.316802 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0612 21:41:33.317111 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0612 21:41:33.317320 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0612 21:41:33.317655 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0612 21:41:33.317868 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0612 21:41:33.318168 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0612 21:41:33.318372 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0612 21:41:33.318678 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0612 21:41:33.318885 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0612 21:41:33.319184 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0612 21:41:33.319409 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0612 21:41:33.319745 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0612 21:41:33.319944 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0612 21:41:33.320144 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0612 21:41:33.320339 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0612 21:41:33.320563 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0612 21:41:33.320765 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0612 21:41:33.320961 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0612 21:41:33.321166 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0612 21:41:33.321367 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0612 21:41:34.414321 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-12T21:41:34Z\n","I0612 21:41:34.437359 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-12T21:41:34Z\n","INFO:tensorflow:Graph was finalized.\n","I0612 21:41:35.381862 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-1801\n","I0612 21:41:35.385353 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-1801\n","INFO:tensorflow:Running local_init_op.\n","I0612 21:41:37.041786 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0612 21:41:37.265890 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0612 21:41:48.847549 140537672500992 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0612 21:41:48.848156 140537672500992 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.00s)\n","I0612 21:41:48.853368 140537672500992 coco_tools.py:137] DONE (t=0.00s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.34s).\n","Accumulating evaluation results...\n","DONE (t=0.14s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.593\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.963\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.698\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.250\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.597\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.492\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.651\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.653\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.400\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.656\n","INFO:tensorflow:Finished evaluation at 2020-06-12-21:41:50\n","I0612 21:41:50.132371 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-12-21:41:50\n","INFO:tensorflow:Saving dict for global step 1801: DetectionBoxes_Precision/mAP = 0.59272206, DetectionBoxes_Precision/mAP (large) = 0.5968228, DetectionBoxes_Precision/mAP (medium) = 0.25, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9627347, DetectionBoxes_Precision/mAP@.75IOU = 0.6978526, DetectionBoxes_Recall/AR@1 = 0.49154648, DetectionBoxes_Recall/AR@10 = 0.6506811, DetectionBoxes_Recall/AR@100 = 0.65260416, DetectionBoxes_Recall/AR@100 (large) = 0.6557292, DetectionBoxes_Recall/AR@100 (medium) = 0.4, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.0487247, Loss/localization_loss = 0.5109903, Loss/regularization_loss = 0.34933966, Loss/total_loss = 3.9090562, global_step = 1801, learning_rate = 0.004, loss = 3.9090562\n","I0612 21:41:50.132717 140539975640960 estimator.py:2049] Saving dict for global step 1801: DetectionBoxes_Precision/mAP = 0.59272206, DetectionBoxes_Precision/mAP (large) = 0.5968228, DetectionBoxes_Precision/mAP (medium) = 0.25, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9627347, DetectionBoxes_Precision/mAP@.75IOU = 0.6978526, DetectionBoxes_Recall/AR@1 = 0.49154648, DetectionBoxes_Recall/AR@10 = 0.6506811, DetectionBoxes_Recall/AR@100 = 0.65260416, DetectionBoxes_Recall/AR@100 (large) = 0.6557292, DetectionBoxes_Recall/AR@100 (medium) = 0.4, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.0487247, Loss/localization_loss = 0.5109903, Loss/regularization_loss = 0.34933966, Loss/total_loss = 3.9090562, global_step = 1801, learning_rate = 0.004, loss = 3.9090562\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 1801: /content/gdrive/My Drive/capstone/model/model.ckpt-1801\n","I0612 21:41:50.141212 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 1801: /content/gdrive/My Drive/capstone/model/model.ckpt-1801\n","INFO:tensorflow:global_step/sec: 0.198631\n","I0612 21:41:50.143523 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.198631\n","INFO:tensorflow:loss = 3.730486, step = 1801 (503.446 sec)\n","I0612 21:41:50.144377 140539975640960 basic_session_run_hooks.py:260] loss = 3.730486, step = 1801 (503.446 sec)\n","INFO:tensorflow:global_step/sec: 0.211283\n","I0612 21:49:43.443166 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.211283\n","INFO:tensorflow:loss = 2.2319918, step = 1901 (473.301 sec)\n","I0612 21:49:43.444982 140539975640960 basic_session_run_hooks.py:260] loss = 2.2319918, step = 1901 (473.301 sec)\n","INFO:tensorflow:Saving checkpoints for 1923 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0612 21:51:26.990018 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 1923 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd1714100b8>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0612 21:51:30.036499 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd1714100b8>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd17498c510> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0612 21:51:30.225400 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd17498c510> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0612 21:51:30.679729 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:51:32.917886 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:51:32.951173 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:51:32.982700 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:51:33.014737 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:51:33.045869 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 21:51:33.077036 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0612 21:51:35.409569 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0612 21:51:35.409902 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0612 21:51:35.410173 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0612 21:51:35.410342 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0612 21:51:35.410603 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0612 21:51:35.410805 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0612 21:51:35.411046 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0612 21:51:35.411215 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0612 21:51:35.411470 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0612 21:51:35.411638 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0612 21:51:35.411877 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0612 21:51:35.412038 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0612 21:51:35.412273 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0612 21:51:35.412457 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0612 21:51:35.412697 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0612 21:51:35.412853 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0612 21:51:35.413087 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0612 21:51:35.413267 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0612 21:51:35.413518 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0612 21:51:35.413695 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0612 21:51:35.413924 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0612 21:51:35.414076 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0612 21:51:35.414304 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0612 21:51:35.414487 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0612 21:51:35.414728 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0612 21:51:35.414913 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0612 21:51:35.415145 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0612 21:51:35.415313 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0612 21:51:35.415569 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0612 21:51:35.415732 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0612 21:51:35.415965 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0612 21:51:35.416120 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0612 21:51:35.416422 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0612 21:51:35.416652 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0612 21:51:35.416893 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0612 21:51:35.417057 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0612 21:51:35.417205 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0612 21:51:35.417350 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0612 21:51:35.417517 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0612 21:51:35.417667 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0612 21:51:35.417810 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0612 21:51:35.417957 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0612 21:51:35.418104 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0612 21:51:36.430928 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-12T21:51:36Z\n","I0612 21:51:36.449944 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-12T21:51:36Z\n","INFO:tensorflow:Graph was finalized.\n","I0612 21:51:37.367717 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-1923\n","I0612 21:51:37.371343 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-1923\n","INFO:tensorflow:Running local_init_op.\n","I0612 21:51:39.033095 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0612 21:51:39.249505 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0612 21:51:50.598560 140537680893696 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0612 21:51:50.599138 140537680893696 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.00s)\n","I0612 21:51:50.604070 140537680893696 coco_tools.py:137] DONE (t=0.00s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.39s).\n","Accumulating evaluation results...\n","DONE (t=0.10s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.597\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.960\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.680\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.117\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.603\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.487\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.662\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.663\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.200\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.669\n","INFO:tensorflow:Finished evaluation at 2020-06-12-21:51:51\n","I0612 21:51:51.975030 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-12-21:51:51\n","INFO:tensorflow:Saving dict for global step 1923: DetectionBoxes_Precision/mAP = 0.5965719, DetectionBoxes_Precision/mAP (large) = 0.60256726, DetectionBoxes_Precision/mAP (medium) = 0.11666667, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.95963264, DetectionBoxes_Precision/mAP@.75IOU = 0.6800178, DetectionBoxes_Recall/AR@1 = 0.48731303, DetectionBoxes_Recall/AR@10 = 0.66160524, DetectionBoxes_Recall/AR@100 = 0.6631677, DetectionBoxes_Recall/AR@100 (large) = 0.6688969, DetectionBoxes_Recall/AR@100 (medium) = 0.2, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.1900945, Loss/localization_loss = 0.5719563, Loss/regularization_loss = 0.34949243, Loss/total_loss = 4.1115437, global_step = 1923, learning_rate = 0.004, loss = 4.1115437\n","I0612 21:51:51.975317 140539975640960 estimator.py:2049] Saving dict for global step 1923: DetectionBoxes_Precision/mAP = 0.5965719, DetectionBoxes_Precision/mAP (large) = 0.60256726, DetectionBoxes_Precision/mAP (medium) = 0.11666667, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.95963264, DetectionBoxes_Precision/mAP@.75IOU = 0.6800178, DetectionBoxes_Recall/AR@1 = 0.48731303, DetectionBoxes_Recall/AR@10 = 0.66160524, DetectionBoxes_Recall/AR@100 = 0.6631677, DetectionBoxes_Recall/AR@100 (large) = 0.6688969, DetectionBoxes_Recall/AR@100 (medium) = 0.2, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.1900945, Loss/localization_loss = 0.5719563, Loss/regularization_loss = 0.34949243, Loss/total_loss = 4.1115437, global_step = 1923, learning_rate = 0.004, loss = 4.1115437\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 1923: /content/gdrive/My Drive/capstone/model/model.ckpt-1923\n","I0612 21:51:51.983968 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 1923: /content/gdrive/My Drive/capstone/model/model.ckpt-1923\n","INFO:tensorflow:global_step/sec: 0.200741\n","I0612 21:58:01.597262 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.200741\n","INFO:tensorflow:loss = 2.2270641, step = 2001 (498.154 sec)\n","I0612 21:58:01.598956 140539975640960 basic_session_run_hooks.py:260] loss = 2.2270641, step = 2001 (498.154 sec)\n","INFO:tensorflow:Saving checkpoints for 2044 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0612 22:01:29.399909 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 2044 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd1724968d0>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0612 22:01:32.388284 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd1724968d0>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd175327b70> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0612 22:01:32.561052 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd175327b70> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0612 22:01:33.023119 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:01:35.907524 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:01:35.940498 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:01:35.972111 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:01:36.005286 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:01:36.036671 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:01:36.068057 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0612 22:01:38.474891 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0612 22:01:38.475220 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0612 22:01:38.475498 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0612 22:01:38.475693 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0612 22:01:38.475946 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0612 22:01:38.476123 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0612 22:01:38.476362 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0612 22:01:38.476539 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0612 22:01:38.476788 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0612 22:01:38.476961 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0612 22:01:38.477198 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0612 22:01:38.477358 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0612 22:01:38.477602 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0612 22:01:38.477760 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0612 22:01:38.478000 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0612 22:01:38.478155 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0612 22:01:38.478389 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0612 22:01:38.478567 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0612 22:01:38.478805 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0612 22:01:38.478980 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0612 22:01:38.479222 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0612 22:01:38.479392 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0612 22:01:38.479648 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0612 22:01:38.479812 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0612 22:01:38.480056 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0612 22:01:38.480215 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0612 22:01:38.480465 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0612 22:01:38.480629 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0612 22:01:38.480876 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0612 22:01:38.481038 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0612 22:01:38.481276 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0612 22:01:38.481435 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0612 22:01:38.481688 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0612 22:01:38.481850 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0612 22:01:38.482095 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0612 22:01:38.482247 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0612 22:01:38.482395 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0612 22:01:38.482568 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0612 22:01:38.482722 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0612 22:01:38.482877 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0612 22:01:38.483029 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0612 22:01:38.483177 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0612 22:01:38.483326 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0612 22:01:39.537734 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-12T22:01:39Z\n","I0612 22:01:39.561625 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-12T22:01:39Z\n","INFO:tensorflow:Graph was finalized.\n","I0612 22:01:40.531731 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-2044\n","I0612 22:01:40.535388 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-2044\n","INFO:tensorflow:Running local_init_op.\n","I0612 22:01:42.112812 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0612 22:01:42.336079 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0612 22:01:53.697215 140537672500992 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0612 22:01:53.697901 140537672500992 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.00s)\n","I0612 22:01:53.702878 140537672500992 coco_tools.py:137] DONE (t=0.00s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.59s).\n","Accumulating evaluation results...\n","DONE (t=0.09s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.561\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.928\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.644\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.211\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.568\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.491\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.635\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.646\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.300\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.651\n","INFO:tensorflow:Finished evaluation at 2020-06-12-22:01:54\n","I0612 22:01:55.000071 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-12-22:01:54\n","INFO:tensorflow:Saving dict for global step 2044: DetectionBoxes_Precision/mAP = 0.56114334, DetectionBoxes_Precision/mAP (large) = 0.5678903, DetectionBoxes_Precision/mAP (medium) = 0.21111111, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.927914, DetectionBoxes_Precision/mAP@.75IOU = 0.6440702, DetectionBoxes_Recall/AR@1 = 0.49102563, DetectionBoxes_Recall/AR@10 = 0.63537663, DetectionBoxes_Recall/AR@100 = 0.64595354, DetectionBoxes_Recall/AR@100 (large) = 0.65095353, DetectionBoxes_Recall/AR@100 (medium) = 0.3, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.4879117, Loss/localization_loss = 0.54213786, Loss/regularization_loss = 0.34960878, Loss/total_loss = 4.3796577, global_step = 2044, learning_rate = 0.004, loss = 4.3796577\n","I0612 22:01:55.000405 140539975640960 estimator.py:2049] Saving dict for global step 2044: DetectionBoxes_Precision/mAP = 0.56114334, DetectionBoxes_Precision/mAP (large) = 0.5678903, DetectionBoxes_Precision/mAP (medium) = 0.21111111, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.927914, DetectionBoxes_Precision/mAP@.75IOU = 0.6440702, DetectionBoxes_Recall/AR@1 = 0.49102563, DetectionBoxes_Recall/AR@10 = 0.63537663, DetectionBoxes_Recall/AR@100 = 0.64595354, DetectionBoxes_Recall/AR@100 (large) = 0.65095353, DetectionBoxes_Recall/AR@100 (medium) = 0.3, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.4879117, Loss/localization_loss = 0.54213786, Loss/regularization_loss = 0.34960878, Loss/total_loss = 4.3796577, global_step = 2044, learning_rate = 0.004, loss = 4.3796577\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 2044: /content/gdrive/My Drive/capstone/model/model.ckpt-2044\n","I0612 22:01:55.008464 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 2044: /content/gdrive/My Drive/capstone/model/model.ckpt-2044\n","INFO:tensorflow:global_step/sec: 0.197959\n","I0612 22:06:26.752137 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.197959\n","INFO:tensorflow:loss = 2.8756194, step = 2101 (505.155 sec)\n","I0612 22:06:26.753607 140539975640960 basic_session_run_hooks.py:260] loss = 2.8756194, step = 2101 (505.155 sec)\n","INFO:tensorflow:Saving checkpoints for 2164 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0612 22:11:33.897041 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 2164 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd171779fd0>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0612 22:11:37.273659 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd171779fd0>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd17178a510> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0612 22:11:37.456854 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd17178a510> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0612 22:11:37.931281 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:11:40.227135 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:11:40.261412 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:11:40.294039 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:11:40.327068 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:11:40.361019 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:11:40.392802 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0612 22:11:42.752401 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0612 22:11:42.752760 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0612 22:11:42.753027 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0612 22:11:42.753197 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0612 22:11:42.753465 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0612 22:11:42.753641 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0612 22:11:42.753885 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0612 22:11:42.754064 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0612 22:11:42.754304 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0612 22:11:42.754485 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0612 22:11:42.754729 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0612 22:11:42.754900 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0612 22:11:42.755139 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0612 22:11:42.755299 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0612 22:11:42.755571 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0612 22:11:42.755748 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0612 22:11:42.755990 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0612 22:11:42.756153 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0612 22:11:42.756396 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0612 22:11:42.756573 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0612 22:11:42.756817 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0612 22:11:42.756985 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0612 22:11:42.757226 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0612 22:11:42.757421 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0612 22:11:42.757676 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0612 22:11:42.757842 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0612 22:11:42.758084 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0612 22:11:42.758242 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0612 22:11:42.758503 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0612 22:11:42.758669 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0612 22:11:42.758914 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0612 22:11:42.759091 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0612 22:11:42.759332 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0612 22:11:42.759507 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0612 22:11:42.759755 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0612 22:11:42.759909 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0612 22:11:42.760061 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0612 22:11:42.760211 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0612 22:11:42.760362 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0612 22:11:42.760529 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0612 22:11:42.760681 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0612 22:11:42.760830 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0612 22:11:42.760979 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0612 22:11:43.775490 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-12T22:11:43Z\n","I0612 22:11:43.793235 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-12T22:11:43Z\n","INFO:tensorflow:Graph was finalized.\n","I0612 22:11:44.812295 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-2164\n","I0612 22:11:44.816045 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-2164\n","INFO:tensorflow:Running local_init_op.\n","I0612 22:11:46.447003 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0612 22:11:46.672355 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0612 22:11:58.819652 140537689286400 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0612 22:11:58.820588 140537689286400 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.00s)\n","I0612 22:11:58.825473 140537689286400 coco_tools.py:137] DONE (t=0.00s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.38s).\n","Accumulating evaluation results...\n","DONE (t=0.10s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.603\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.952\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.710\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.300\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.609\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.510\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.681\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.692\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.300\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.698\n","INFO:tensorflow:Finished evaluation at 2020-06-12-22:12:00\n","I0612 22:12:00.152858 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-12-22:12:00\n","INFO:tensorflow:Saving dict for global step 2164: DetectionBoxes_Precision/mAP = 0.60342187, DetectionBoxes_Precision/mAP (large) = 0.6093835, DetectionBoxes_Precision/mAP (medium) = 0.3, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.95196486, DetectionBoxes_Precision/mAP@.75IOU = 0.70993763, DetectionBoxes_Recall/AR@1 = 0.51032317, DetectionBoxes_Recall/AR@10 = 0.68135685, DetectionBoxes_Recall/AR@100 = 0.69193375, DetectionBoxes_Recall/AR@100 (large) = 0.6977671, DetectionBoxes_Recall/AR@100 (medium) = 0.3, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.3915293, Loss/localization_loss = 0.4866264, Loss/regularization_loss = 0.3497799, Loss/total_loss = 4.227935, global_step = 2164, learning_rate = 0.004, loss = 4.227935\n","I0612 22:12:00.153172 140539975640960 estimator.py:2049] Saving dict for global step 2164: DetectionBoxes_Precision/mAP = 0.60342187, DetectionBoxes_Precision/mAP (large) = 0.6093835, DetectionBoxes_Precision/mAP (medium) = 0.3, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.95196486, DetectionBoxes_Precision/mAP@.75IOU = 0.70993763, DetectionBoxes_Recall/AR@1 = 0.51032317, DetectionBoxes_Recall/AR@10 = 0.68135685, DetectionBoxes_Recall/AR@100 = 0.69193375, DetectionBoxes_Recall/AR@100 (large) = 0.6977671, DetectionBoxes_Recall/AR@100 (medium) = 0.3, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.3915293, Loss/localization_loss = 0.4866264, Loss/regularization_loss = 0.3497799, Loss/total_loss = 4.227935, global_step = 2164, learning_rate = 0.004, loss = 4.227935\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 2164: /content/gdrive/My Drive/capstone/model/model.ckpt-2164\n","I0612 22:12:00.161397 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 2164: /content/gdrive/My Drive/capstone/model/model.ckpt-2164\n","INFO:tensorflow:global_step/sec: 0.197086\n","I0612 22:14:54.145278 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.197086\n","INFO:tensorflow:loss = 1.7939185, step = 2201 (507.394 sec)\n","I0612 22:14:54.147245 140539975640960 basic_session_run_hooks.py:260] loss = 1.7939185, step = 2201 (507.394 sec)\n","INFO:tensorflow:Saving checkpoints for 2286 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0612 22:21:37.071941 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 2286 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd178dd61d0>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0612 22:21:40.038380 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd178dd61d0>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd17178a158> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0612 22:21:40.218659 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd17178a158> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0612 22:21:40.710756 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:21:42.987594 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:21:43.019909 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:21:43.052283 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:21:43.084844 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:21:43.117068 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:21:43.151022 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0612 22:21:45.566707 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0612 22:21:45.567102 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0612 22:21:45.567428 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0612 22:21:45.567675 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0612 22:21:45.567976 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0612 22:21:45.568191 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0612 22:21:45.568498 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0612 22:21:45.568722 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0612 22:21:45.569011 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0612 22:21:45.569216 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0612 22:21:45.569523 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0612 22:21:45.569732 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0612 22:21:45.570019 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0612 22:21:45.570226 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0612 22:21:45.570532 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0612 22:21:45.570744 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0612 22:21:45.571036 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0612 22:21:45.571237 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0612 22:21:45.571541 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0612 22:21:45.571751 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0612 22:21:45.572035 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0612 22:21:45.572234 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0612 22:21:45.572539 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0612 22:21:45.572747 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0612 22:21:45.573032 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0612 22:21:45.573230 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0612 22:21:45.573535 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0612 22:21:45.573742 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0612 22:21:45.574026 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0612 22:21:45.574225 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0612 22:21:45.574527 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0612 22:21:45.574731 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0612 22:21:45.575014 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0612 22:21:45.575211 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0612 22:21:45.575516 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0612 22:21:45.575710 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0612 22:21:45.575899 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0612 22:21:45.576101 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0612 22:21:45.576305 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0612 22:21:45.576518 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0612 22:21:45.576718 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0612 22:21:45.576912 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0612 22:21:45.577105 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0612 22:21:46.625714 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-12T22:21:46Z\n","I0612 22:21:46.643326 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-12T22:21:46Z\n","INFO:tensorflow:Graph was finalized.\n","I0612 22:21:47.569540 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-2286\n","I0612 22:21:47.573589 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-2286\n","INFO:tensorflow:Running local_init_op.\n","I0612 22:21:49.181798 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0612 22:21:49.398991 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0612 22:22:00.821272 140537672500992 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0612 22:22:00.821880 140537672500992 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.00s)\n","I0612 22:22:00.825873 140537672500992 coco_tools.py:137] DONE (t=0.00s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.35s).\n","Accumulating evaluation results...\n","DONE (t=0.17s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.609\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.960\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.691\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.212\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.617\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.511\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.680\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.682\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.400\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.686\n","INFO:tensorflow:Finished evaluation at 2020-06-12-22:22:02\n","I0612 22:22:02.279284 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-12-22:22:02\n","INFO:tensorflow:Saving dict for global step 2286: DetectionBoxes_Precision/mAP = 0.60919034, DetectionBoxes_Precision/mAP (large) = 0.6166904, DetectionBoxes_Precision/mAP (medium) = 0.21176471, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.95953685, DetectionBoxes_Precision/mAP@.75IOU = 0.6910639, DetectionBoxes_Recall/AR@1 = 0.510844, DetectionBoxes_Recall/AR@10 = 0.68008816, DetectionBoxes_Recall/AR@100 = 0.68165064, DetectionBoxes_Recall/AR@100 (large) = 0.6857131, DetectionBoxes_Recall/AR@100 (medium) = 0.4, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.204482, Loss/localization_loss = 0.44900927, Loss/regularization_loss = 0.34992853, Loss/total_loss = 4.00342, global_step = 2286, learning_rate = 0.004, loss = 4.00342\n","I0612 22:22:02.279650 140539975640960 estimator.py:2049] Saving dict for global step 2286: DetectionBoxes_Precision/mAP = 0.60919034, DetectionBoxes_Precision/mAP (large) = 0.6166904, DetectionBoxes_Precision/mAP (medium) = 0.21176471, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.95953685, DetectionBoxes_Precision/mAP@.75IOU = 0.6910639, DetectionBoxes_Recall/AR@1 = 0.510844, DetectionBoxes_Recall/AR@10 = 0.68008816, DetectionBoxes_Recall/AR@100 = 0.68165064, DetectionBoxes_Recall/AR@100 (large) = 0.6857131, DetectionBoxes_Recall/AR@100 (medium) = 0.4, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.204482, Loss/localization_loss = 0.44900927, Loss/regularization_loss = 0.34992853, Loss/total_loss = 4.00342, global_step = 2286, learning_rate = 0.004, loss = 4.00342\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 2286: /content/gdrive/My Drive/capstone/model/model.ckpt-2286\n","I0612 22:22:02.288336 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 2286: /content/gdrive/My Drive/capstone/model/model.ckpt-2286\n","INFO:tensorflow:global_step/sec: 0.199138\n","I0612 22:23:16.310517 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.199138\n","INFO:tensorflow:loss = 2.0655081, step = 2301 (502.164 sec)\n","I0612 22:23:16.311506 140539975640960 basic_session_run_hooks.py:260] loss = 2.0655081, step = 2301 (502.164 sec)\n","INFO:tensorflow:global_step/sec: 0.209426\n","I0612 22:31:13.805199 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.209426\n","INFO:tensorflow:loss = 2.340856, step = 2401 (477.496 sec)\n","I0612 22:31:13.807175 140539975640960 basic_session_run_hooks.py:260] loss = 2.340856, step = 2401 (477.496 sec)\n","INFO:tensorflow:Saving checkpoints for 2406 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0612 22:31:37.892066 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 2406 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd1713847b8>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0612 22:31:40.906565 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd1713847b8>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd17191ba60> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0612 22:31:41.084847 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd17191ba60> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0612 22:31:41.558063 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:31:43.815188 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:31:43.849114 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:31:43.881617 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:31:43.914170 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:31:43.947518 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:31:43.979647 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0612 22:31:46.401787 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0612 22:31:46.402147 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0612 22:31:46.402424 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0612 22:31:46.402627 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0612 22:31:46.402886 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0612 22:31:46.403070 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0612 22:31:46.403324 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0612 22:31:46.403507 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0612 22:31:46.403764 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0612 22:31:46.403931 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0612 22:31:46.404181 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0612 22:31:46.404346 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0612 22:31:46.404616 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0612 22:31:46.404795 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0612 22:31:46.405052 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0612 22:31:46.405230 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0612 22:31:46.405501 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0612 22:31:46.405707 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0612 22:31:46.405957 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0612 22:31:46.406124 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0612 22:31:46.406392 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0612 22:31:46.406584 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0612 22:31:46.406840 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0612 22:31:46.407007 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0612 22:31:46.407257 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0612 22:31:46.407431 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0612 22:31:46.407701 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0612 22:31:46.407861 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0612 22:31:46.408108 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0612 22:31:46.408281 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0612 22:31:46.408544 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0612 22:31:46.408713 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0612 22:31:46.408961 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0612 22:31:46.409120 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0612 22:31:46.409367 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0612 22:31:46.409540 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0612 22:31:46.409698 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0612 22:31:46.409851 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0612 22:31:46.410001 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0612 22:31:46.410152 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0612 22:31:46.410305 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0612 22:31:46.410501 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0612 22:31:46.410679 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0612 22:31:48.124959 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-12T22:31:48Z\n","I0612 22:31:48.151157 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-12T22:31:48Z\n","INFO:tensorflow:Graph was finalized.\n","I0612 22:31:49.045174 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-2406\n","I0612 22:31:49.048685 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-2406\n","INFO:tensorflow:Running local_init_op.\n","I0612 22:31:50.693893 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0612 22:31:50.905775 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0612 22:32:02.365932 140537680893696 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0612 22:32:02.366737 140537680893696 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.00s)\n","I0612 22:32:02.370917 140537680893696 coco_tools.py:137] DONE (t=0.00s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.34s).\n","Accumulating evaluation results...\n","DONE (t=0.11s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.610\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.957\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.705\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.300\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.615\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.496\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.665\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.667\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.300\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.671\n","INFO:tensorflow:Finished evaluation at 2020-06-12-22:32:03\n","I0612 22:32:03.745405 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-12-22:32:03\n","INFO:tensorflow:Saving dict for global step 2406: DetectionBoxes_Precision/mAP = 0.6101466, DetectionBoxes_Precision/mAP (large) = 0.61451787, DetectionBoxes_Precision/mAP (medium) = 0.3, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.95701265, DetectionBoxes_Precision/mAP@.75IOU = 0.7046015, DetectionBoxes_Recall/AR@1 = 0.49586004, DetectionBoxes_Recall/AR@10 = 0.6654514, DetectionBoxes_Recall/AR@100 = 0.6670139, DetectionBoxes_Recall/AR@100 (large) = 0.67138886, DetectionBoxes_Recall/AR@100 (medium) = 0.3, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.0846043, Loss/localization_loss = 0.49524933, Loss/regularization_loss = 0.3500626, Loss/total_loss = 3.9299161, global_step = 2406, learning_rate = 0.004, loss = 3.9299161\n","I0612 22:32:03.745719 140539975640960 estimator.py:2049] Saving dict for global step 2406: DetectionBoxes_Precision/mAP = 0.6101466, DetectionBoxes_Precision/mAP (large) = 0.61451787, DetectionBoxes_Precision/mAP (medium) = 0.3, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.95701265, DetectionBoxes_Precision/mAP@.75IOU = 0.7046015, DetectionBoxes_Recall/AR@1 = 0.49586004, DetectionBoxes_Recall/AR@10 = 0.6654514, DetectionBoxes_Recall/AR@100 = 0.6670139, DetectionBoxes_Recall/AR@100 (large) = 0.67138886, DetectionBoxes_Recall/AR@100 (medium) = 0.3, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.0846043, Loss/localization_loss = 0.49524933, Loss/regularization_loss = 0.3500626, Loss/total_loss = 3.9299161, global_step = 2406, learning_rate = 0.004, loss = 3.9299161\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 2406: /content/gdrive/My Drive/capstone/model/model.ckpt-2406\n","I0612 22:32:03.754264 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 2406: /content/gdrive/My Drive/capstone/model/model.ckpt-2406\n","INFO:tensorflow:global_step/sec: 0.198782\n","I0612 22:39:36.869557 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.198782\n","INFO:tensorflow:loss = 1.7231023, step = 2501 (503.064 sec)\n","I0612 22:39:36.871287 140539975640960 basic_session_run_hooks.py:260] loss = 1.7231023, step = 2501 (503.064 sec)\n","INFO:tensorflow:Saving checkpoints for 2527 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0612 22:41:42.423608 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 2527 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd176164898>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0612 22:41:45.716000 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd176164898>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd17f9ff840> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0612 22:41:45.896186 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd17f9ff840> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0612 22:41:46.360629 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:41:48.636378 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:41:48.669386 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:41:48.700728 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:41:48.732702 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:41:48.764002 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:41:48.796026 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0612 22:41:51.186934 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0612 22:41:51.187326 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0612 22:41:51.187645 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0612 22:41:51.187825 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0612 22:41:51.188072 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0612 22:41:51.188254 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0612 22:41:51.188516 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0612 22:41:51.188694 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0612 22:41:51.188936 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0612 22:41:51.189097 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0612 22:41:51.189336 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0612 22:41:51.189519 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0612 22:41:51.189762 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0612 22:41:51.189921 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0612 22:41:51.190152 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0612 22:41:51.190344 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0612 22:41:51.190603 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0612 22:41:51.190763 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0612 22:41:51.191001 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0612 22:41:51.191180 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0612 22:41:51.191417 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0612 22:41:51.191603 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0612 22:41:51.191846 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0612 22:41:51.192004 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0612 22:41:51.192241 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0612 22:41:51.192418 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0612 22:41:51.192675 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0612 22:41:51.192836 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0612 22:41:51.193072 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0612 22:41:51.193234 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0612 22:41:51.193502 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0612 22:41:51.193672 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0612 22:41:51.193910 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0612 22:41:51.194068 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0612 22:41:51.194305 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0612 22:41:51.194469 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0612 22:41:51.194632 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0612 22:41:51.194787 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0612 22:41:51.194936 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0612 22:41:51.195085 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0612 22:41:51.195231 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0612 22:41:51.195379 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0612 22:41:51.195546 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0612 22:41:52.206199 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-12T22:41:52Z\n","I0612 22:41:52.223809 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-12T22:41:52Z\n","INFO:tensorflow:Graph was finalized.\n","I0612 22:41:53.199020 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-2527\n","I0612 22:41:53.202937 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-2527\n","INFO:tensorflow:Running local_init_op.\n","I0612 22:41:54.891690 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0612 22:41:55.123166 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0612 22:42:06.614701 140537689286400 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0612 22:42:06.615701 140537689286400 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.01s)\n","I0612 22:42:06.621587 140537689286400 coco_tools.py:137] DONE (t=0.01s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.50s).\n","Accumulating evaluation results...\n","DONE (t=0.12s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.621\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.962\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.785\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.267\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.625\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.508\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.673\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.686\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.400\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.689\n","INFO:tensorflow:Finished evaluation at 2020-06-12-22:42:07\n","I0612 22:42:07.984093 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-12-22:42:07\n","INFO:tensorflow:Saving dict for global step 2527: DetectionBoxes_Precision/mAP = 0.62051576, DetectionBoxes_Precision/mAP (large) = 0.62532806, DetectionBoxes_Precision/mAP (medium) = 0.26666668, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9622666, DetectionBoxes_Precision/mAP@.75IOU = 0.7854955, DetectionBoxes_Recall/AR@1 = 0.50836, DetectionBoxes_Recall/AR@10 = 0.67321044, DetectionBoxes_Recall/AR@100 = 0.68607104, DetectionBoxes_Recall/AR@100 (large) = 0.68930024, DetectionBoxes_Recall/AR@100 (medium) = 0.4, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.806088, Loss/localization_loss = 0.4817247, Loss/regularization_loss = 0.35018903, Loss/total_loss = 3.6380014, global_step = 2527, learning_rate = 0.004, loss = 3.6380014\n","I0612 22:42:07.984365 140539975640960 estimator.py:2049] Saving dict for global step 2527: DetectionBoxes_Precision/mAP = 0.62051576, DetectionBoxes_Precision/mAP (large) = 0.62532806, DetectionBoxes_Precision/mAP (medium) = 0.26666668, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9622666, DetectionBoxes_Precision/mAP@.75IOU = 0.7854955, DetectionBoxes_Recall/AR@1 = 0.50836, DetectionBoxes_Recall/AR@10 = 0.67321044, DetectionBoxes_Recall/AR@100 = 0.68607104, DetectionBoxes_Recall/AR@100 (large) = 0.68930024, DetectionBoxes_Recall/AR@100 (medium) = 0.4, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.806088, Loss/localization_loss = 0.4817247, Loss/regularization_loss = 0.35018903, Loss/total_loss = 3.6380014, global_step = 2527, learning_rate = 0.004, loss = 3.6380014\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 2527: /content/gdrive/My Drive/capstone/model/model.ckpt-2527\n","I0612 22:42:07.994461 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 2527: /content/gdrive/My Drive/capstone/model/model.ckpt-2527\n","INFO:tensorflow:global_step/sec: 0.197065\n","I0612 22:48:04.315641 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.197065\n","INFO:tensorflow:loss = 2.4159412, step = 2601 (507.446 sec)\n","I0612 22:48:04.317279 140539975640960 basic_session_run_hooks.py:260] loss = 2.4159412, step = 2601 (507.446 sec)\n","INFO:tensorflow:Saving checkpoints for 2648 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0612 22:51:45.603663 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 2648 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd1754a54e0>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0612 22:51:48.614391 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd1754a54e0>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd1712aff28> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0612 22:51:48.797768 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd1712aff28> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0612 22:51:49.258861 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:51:51.553344 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:51:51.586577 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:51:51.618612 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:51:51.651330 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:51:51.686294 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 22:51:51.719483 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0612 22:51:54.736954 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0612 22:51:54.737318 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0612 22:51:54.737616 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0612 22:51:54.737807 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0612 22:51:54.738073 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0612 22:51:54.738250 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0612 22:51:54.738520 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0612 22:51:54.738705 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0612 22:51:54.738966 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0612 22:51:54.739159 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0612 22:51:54.739417 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0612 22:51:54.739602 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0612 22:51:54.739867 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0612 22:51:54.740038 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0612 22:51:54.740288 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0612 22:51:54.740468 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0612 22:51:54.740731 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0612 22:51:54.740900 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0612 22:51:54.741156 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0612 22:51:54.741322 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0612 22:51:54.741587 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0612 22:51:54.741768 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0612 22:51:54.742019 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0612 22:51:54.742183 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0612 22:51:54.742434 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0612 22:51:54.742617 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0612 22:51:54.742880 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0612 22:51:54.743048 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0612 22:51:54.743297 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0612 22:51:54.743477 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0612 22:51:54.743746 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0612 22:51:54.743928 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0612 22:51:54.744188 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0612 22:51:54.744387 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0612 22:51:54.744664 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0612 22:51:54.744837 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0612 22:51:54.744994 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0612 22:51:54.745152 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0612 22:51:54.745307 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0612 22:51:54.745475 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0612 22:51:54.745636 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0612 22:51:54.745800 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0612 22:51:54.745962 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0612 22:51:55.765242 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-12T22:51:55Z\n","I0612 22:51:55.783936 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-12T22:51:55Z\n","INFO:tensorflow:Graph was finalized.\n","I0612 22:51:56.760320 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-2648\n","I0612 22:51:56.763665 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-2648\n","INFO:tensorflow:Running local_init_op.\n","I0612 22:51:58.421421 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0612 22:51:58.662324 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0612 22:52:10.361983 140537697679104 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0612 22:52:10.362744 140537697679104 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.00s)\n","I0612 22:52:10.367062 140537697679104 coco_tools.py:137] DONE (t=0.00s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.42s).\n","Accumulating evaluation results...\n","DONE (t=0.09s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.622\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.958\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.768\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.267\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.628\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.498\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.678\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.681\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.400\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.685\n","INFO:tensorflow:Finished evaluation at 2020-06-12-22:52:11\n","I0612 22:52:11.585493 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-12-22:52:11\n","INFO:tensorflow:Saving dict for global step 2648: DetectionBoxes_Precision/mAP = 0.62204576, DetectionBoxes_Precision/mAP (large) = 0.62795657, DetectionBoxes_Precision/mAP (medium) = 0.26666668, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.958397, DetectionBoxes_Precision/mAP@.75IOU = 0.767776, DetectionBoxes_Recall/AR@1 = 0.49842414, DetectionBoxes_Recall/AR@10 = 0.67777777, DetectionBoxes_Recall/AR@100 = 0.6809028, DetectionBoxes_Recall/AR@100 (large) = 0.68475693, DetectionBoxes_Recall/AR@100 (medium) = 0.4, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.7504187, Loss/localization_loss = 0.5007788, Loss/regularization_loss = 0.35031888, Loss/total_loss = 3.601517, global_step = 2648, learning_rate = 0.004, loss = 3.601517\n","I0612 22:52:11.585777 140539975640960 estimator.py:2049] Saving dict for global step 2648: DetectionBoxes_Precision/mAP = 0.62204576, DetectionBoxes_Precision/mAP (large) = 0.62795657, DetectionBoxes_Precision/mAP (medium) = 0.26666668, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.958397, DetectionBoxes_Precision/mAP@.75IOU = 0.767776, DetectionBoxes_Recall/AR@1 = 0.49842414, DetectionBoxes_Recall/AR@10 = 0.67777777, DetectionBoxes_Recall/AR@100 = 0.6809028, DetectionBoxes_Recall/AR@100 (large) = 0.68475693, DetectionBoxes_Recall/AR@100 (medium) = 0.4, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.7504187, Loss/localization_loss = 0.5007788, Loss/regularization_loss = 0.35031888, Loss/total_loss = 3.601517, global_step = 2648, learning_rate = 0.004, loss = 3.601517\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 2648: /content/gdrive/My Drive/capstone/model/model.ckpt-2648\n","I0612 22:52:11.594200 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 2648: /content/gdrive/My Drive/capstone/model/model.ckpt-2648\n","INFO:tensorflow:global_step/sec: 0.198871\n","I0612 22:56:27.153562 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.198871\n","INFO:tensorflow:loss = 1.9308404, step = 2701 (502.838 sec)\n","I0612 22:56:27.155239 140539975640960 basic_session_run_hooks.py:260] loss = 1.9308404, step = 2701 (502.838 sec)\n","INFO:tensorflow:Saving checkpoints for 2767 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0612 23:01:46.458555 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 2767 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd16e2090b8>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0612 23:01:49.575858 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd16e2090b8>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd171887950> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0612 23:01:49.767424 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd171887950> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0612 23:01:50.240759 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:01:52.543032 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:01:52.575790 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:01:52.607216 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:01:52.639519 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:01:52.671333 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:01:52.702791 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0612 23:01:55.063203 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0612 23:01:55.063568 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0612 23:01:55.063856 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0612 23:01:55.064036 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0612 23:01:55.064291 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0612 23:01:55.064476 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0612 23:01:55.064742 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0612 23:01:55.064912 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0612 23:01:55.065161 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0612 23:01:55.065345 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0612 23:01:55.065604 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0612 23:01:55.065782 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0612 23:01:55.066034 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0612 23:01:55.066196 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0612 23:01:55.066452 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0612 23:01:55.066662 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0612 23:01:55.066920 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0612 23:01:55.067095 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0612 23:01:55.067342 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0612 23:01:55.067533 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0612 23:01:55.067789 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0612 23:01:55.067975 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0612 23:01:55.068227 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0612 23:01:55.068396 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0612 23:01:55.068662 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0612 23:01:55.068872 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0612 23:01:55.069125 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0612 23:01:55.069293 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0612 23:01:55.069561 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0612 23:01:55.069739 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0612 23:01:55.069991 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0612 23:01:55.070156 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0612 23:01:55.070408 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0612 23:01:55.070597 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0612 23:01:55.070868 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0612 23:01:55.071064 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0612 23:01:55.071225 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0612 23:01:55.071383 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0612 23:01:55.071555 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0612 23:01:55.071717 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0612 23:01:55.071879 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0612 23:01:55.072036 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0612 23:01:55.072192 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0612 23:01:56.111939 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-12T23:01:56Z\n","I0612 23:01:56.130032 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-12T23:01:56Z\n","INFO:tensorflow:Graph was finalized.\n","I0612 23:01:57.193619 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-2767\n","I0612 23:01:57.197888 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-2767\n","INFO:tensorflow:Running local_init_op.\n","I0612 23:01:58.833024 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0612 23:01:59.059651 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0612 23:02:10.636534 140537672500992 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0612 23:02:10.637510 140537672500992 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.01s)\n","I0612 23:02:10.643344 140537672500992 coco_tools.py:137] DONE (t=0.01s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.44s).\n","Accumulating evaluation results...\n","DONE (t=0.10s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.634\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.961\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.776\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.267\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.639\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.508\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.686\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.686\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.400\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.689\n","INFO:tensorflow:Finished evaluation at 2020-06-12-23:02:11\n","I0612 23:02:11.921101 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-12-23:02:11\n","INFO:tensorflow:Saving dict for global step 2767: DetectionBoxes_Precision/mAP = 0.6343202, DetectionBoxes_Precision/mAP (large) = 0.6388227, DetectionBoxes_Precision/mAP (medium) = 0.26666668, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.96108294, DetectionBoxes_Precision/mAP@.75IOU = 0.7760874, DetectionBoxes_Recall/AR@1 = 0.5080262, DetectionBoxes_Recall/AR@10 = 0.68567044, DetectionBoxes_Recall/AR@100 = 0.68567044, DetectionBoxes_Recall/AR@100 (large) = 0.6891079, DetectionBoxes_Recall/AR@100 (medium) = 0.4, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.0241714, Loss/localization_loss = 0.4993433, Loss/regularization_loss = 0.35041788, Loss/total_loss = 3.8739321, global_step = 2767, learning_rate = 0.004, loss = 3.8739321\n","I0612 23:02:11.921388 140539975640960 estimator.py:2049] Saving dict for global step 2767: DetectionBoxes_Precision/mAP = 0.6343202, DetectionBoxes_Precision/mAP (large) = 0.6388227, DetectionBoxes_Precision/mAP (medium) = 0.26666668, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.96108294, DetectionBoxes_Precision/mAP@.75IOU = 0.7760874, DetectionBoxes_Recall/AR@1 = 0.5080262, DetectionBoxes_Recall/AR@10 = 0.68567044, DetectionBoxes_Recall/AR@100 = 0.68567044, DetectionBoxes_Recall/AR@100 (large) = 0.6891079, DetectionBoxes_Recall/AR@100 (medium) = 0.4, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.0241714, Loss/localization_loss = 0.4993433, Loss/regularization_loss = 0.35041788, Loss/total_loss = 3.8739321, global_step = 2767, learning_rate = 0.004, loss = 3.8739321\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 2767: /content/gdrive/My Drive/capstone/model/model.ckpt-2767\n","I0612 23:02:11.929694 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 2767: /content/gdrive/My Drive/capstone/model/model.ckpt-2767\n","INFO:tensorflow:global_step/sec: 0.196584\n","I0612 23:04:55.843013 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.196584\n","INFO:tensorflow:loss = 1.9440815, step = 2801 (508.689 sec)\n","I0612 23:04:55.844679 140539975640960 basic_session_run_hooks.py:260] loss = 1.9440815, step = 2801 (508.689 sec)\n","INFO:tensorflow:Saving checkpoints for 2886 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0612 23:11:48.646301 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 2886 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd16f9056a0>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0612 23:11:51.795166 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd16f9056a0>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd16fee3730> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0612 23:11:51.979228 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd16fee3730> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0612 23:11:52.499010 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:11:55.431966 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:11:55.464669 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:11:55.496741 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:11:55.530136 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:11:55.561963 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:11:55.594389 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0612 23:11:58.201116 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0612 23:11:58.201533 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0612 23:11:58.201858 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0612 23:11:58.202051 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0612 23:11:58.202321 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0612 23:11:58.202512 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0612 23:11:58.202787 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0612 23:11:58.202966 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0612 23:11:58.203233 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0612 23:11:58.203415 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0612 23:11:58.203691 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0612 23:11:58.203894 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0612 23:11:58.204156 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0612 23:11:58.204339 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0612 23:11:58.204609 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0612 23:11:58.204803 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0612 23:11:58.205069 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0612 23:11:58.205269 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0612 23:11:58.205554 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0612 23:11:58.205743 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0612 23:11:58.206011 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0612 23:11:58.206184 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0612 23:11:58.206471 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0612 23:11:58.206655 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0612 23:11:58.206925 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0612 23:11:58.207094 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0612 23:11:58.207354 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0612 23:11:58.207584 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0612 23:11:58.207854 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0612 23:11:58.208050 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0612 23:11:58.208311 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0612 23:11:58.208507 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0612 23:11:58.208782 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0612 23:11:58.208995 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0612 23:11:58.209265 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0612 23:11:58.209434 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0612 23:11:58.209612 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0612 23:11:58.209781 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0612 23:11:58.209944 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0612 23:11:58.210103 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0612 23:11:58.210260 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0612 23:11:58.210418 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0612 23:11:58.210600 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0612 23:11:59.232943 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-12T23:11:59Z\n","I0612 23:11:59.250896 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-12T23:11:59Z\n","INFO:tensorflow:Graph was finalized.\n","I0612 23:12:00.071508 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-2886\n","I0612 23:12:00.075168 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-2886\n","INFO:tensorflow:Running local_init_op.\n","I0612 23:12:01.738972 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0612 23:12:01.972685 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0612 23:12:14.097273 140537672500992 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0612 23:12:14.097846 140537672500992 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.00s)\n","I0612 23:12:14.101901 140537672500992 coco_tools.py:137] DONE (t=0.00s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.40s).\n","Accumulating evaluation results...\n","DONE (t=0.15s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.629\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.961\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.754\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.300\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.635\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.527\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.690\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.691\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.400\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.695\n","INFO:tensorflow:Finished evaluation at 2020-06-12-23:12:15\n","I0612 23:12:15.392226 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-12-23:12:15\n","INFO:tensorflow:Saving dict for global step 2886: DetectionBoxes_Precision/mAP = 0.62852603, DetectionBoxes_Precision/mAP (large) = 0.63463247, DetectionBoxes_Precision/mAP (medium) = 0.3, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9611067, DetectionBoxes_Precision/mAP@.75IOU = 0.75359166, DetectionBoxes_Recall/AR@1 = 0.52689636, DetectionBoxes_Recall/AR@10 = 0.69011754, DetectionBoxes_Recall/AR@100 = 0.6910791, DetectionBoxes_Recall/AR@100 (large) = 0.69514155, DetectionBoxes_Recall/AR@100 (medium) = 0.4, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.1817517, Loss/localization_loss = 0.45824316, Loss/regularization_loss = 0.35052228, Loss/total_loss = 3.9905171, global_step = 2886, learning_rate = 0.004, loss = 3.9905171\n","I0612 23:12:15.392558 140539975640960 estimator.py:2049] Saving dict for global step 2886: DetectionBoxes_Precision/mAP = 0.62852603, DetectionBoxes_Precision/mAP (large) = 0.63463247, DetectionBoxes_Precision/mAP (medium) = 0.3, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9611067, DetectionBoxes_Precision/mAP@.75IOU = 0.75359166, DetectionBoxes_Recall/AR@1 = 0.52689636, DetectionBoxes_Recall/AR@10 = 0.69011754, DetectionBoxes_Recall/AR@100 = 0.6910791, DetectionBoxes_Recall/AR@100 (large) = 0.69514155, DetectionBoxes_Recall/AR@100 (medium) = 0.4, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.1817517, Loss/localization_loss = 0.45824316, Loss/regularization_loss = 0.35052228, Loss/total_loss = 3.9905171, global_step = 2886, learning_rate = 0.004, loss = 3.9905171\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 2886: /content/gdrive/My Drive/capstone/model/model.ckpt-2886\n","I0612 23:12:15.401224 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 2886: /content/gdrive/My Drive/capstone/model/model.ckpt-2886\n","INFO:tensorflow:global_step/sec: 0.196013\n","I0612 23:13:26.013307 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.196013\n","INFO:tensorflow:loss = 3.2296011, step = 2901 (510.170 sec)\n","I0612 23:13:26.014260 140539975640960 basic_session_run_hooks.py:260] loss = 3.2296011, step = 2901 (510.170 sec)\n","INFO:tensorflow:global_step/sec: 0.208351\n","I0612 23:21:25.973484 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.208351\n","INFO:tensorflow:loss = 2.1752012, step = 3001 (479.961 sec)\n","I0612 23:21:25.975219 140539975640960 basic_session_run_hooks.py:260] loss = 2.1752012, step = 3001 (479.961 sec)\n","INFO:tensorflow:Saving checkpoints for 3006 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0612 23:21:49.252983 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 3006 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd1711ec1d0>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0612 23:21:52.422930 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd1711ec1d0>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd176118378> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0612 23:21:52.608309 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd176118378> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0612 23:21:53.075641 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:21:55.404940 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:21:55.437504 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:21:55.469172 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:21:55.501647 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:21:55.534309 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:21:55.566476 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0612 23:21:58.220584 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0612 23:21:58.220966 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0612 23:21:58.221249 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0612 23:21:58.221504 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0612 23:21:58.221807 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0612 23:21:58.222011 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0612 23:21:58.222268 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0612 23:21:58.222463 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0612 23:21:58.222716 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0612 23:21:58.222912 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0612 23:21:58.223162 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0612 23:21:58.223331 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0612 23:21:58.223614 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0612 23:21:58.223820 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0612 23:21:58.224073 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0612 23:21:58.224240 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0612 23:21:58.224507 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0612 23:21:58.224773 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0612 23:21:58.225101 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0612 23:21:58.225282 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0612 23:21:58.225573 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0612 23:21:58.225761 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0612 23:21:58.226024 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0612 23:21:58.226198 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0612 23:21:58.226489 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0612 23:21:58.226713 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0612 23:21:58.226982 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0612 23:21:58.227210 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0612 23:21:58.227493 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0612 23:21:58.227683 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0612 23:21:58.227950 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0612 23:21:58.228140 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0612 23:21:58.228512 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0612 23:21:58.228710 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0612 23:21:58.228968 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0612 23:21:58.229132 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0612 23:21:58.229291 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0612 23:21:58.229460 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0612 23:21:58.229617 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0612 23:21:58.229772 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0612 23:21:58.229925 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0612 23:21:58.230074 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0612 23:21:58.230224 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0612 23:21:59.385549 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-12T23:21:59Z\n","I0612 23:21:59.403407 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-12T23:21:59Z\n","INFO:tensorflow:Graph was finalized.\n","I0612 23:22:00.706617 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-3006\n","I0612 23:22:00.710157 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-3006\n","INFO:tensorflow:Running local_init_op.\n","I0612 23:22:02.388513 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0612 23:22:02.629318 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0612 23:22:14.388516 140537680893696 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0612 23:22:14.388959 140537680893696 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.01s)\n","I0612 23:22:14.394401 140537680893696 coco_tools.py:137] DONE (t=0.01s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.35s).\n","Accumulating evaluation results...\n","DONE (t=0.10s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.645\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.967\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.669\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.500\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.648\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.529\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.704\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.704\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.500\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.706\n","INFO:tensorflow:Finished evaluation at 2020-06-12-23:22:15\n","I0612 23:22:15.681309 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-12-23:22:15\n","INFO:tensorflow:Saving dict for global step 3006: DetectionBoxes_Precision/mAP = 0.6450881, DetectionBoxes_Precision/mAP (large) = 0.6477831, DetectionBoxes_Precision/mAP (medium) = 0.5, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9669951, DetectionBoxes_Precision/mAP@.75IOU = 0.6693516, DetectionBoxes_Recall/AR@1 = 0.5294605, DetectionBoxes_Recall/AR@10 = 0.70397973, DetectionBoxes_Recall/AR@100 = 0.70397973, DetectionBoxes_Recall/AR@100 (large) = 0.70627135, DetectionBoxes_Recall/AR@100 (medium) = 0.5, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.8818772, Loss/localization_loss = 0.45059407, Loss/regularization_loss = 0.35065848, Loss/total_loss = 3.6831295, global_step = 3006, learning_rate = 0.004, loss = 3.6831295\n","I0612 23:22:15.681689 140539975640960 estimator.py:2049] Saving dict for global step 3006: DetectionBoxes_Precision/mAP = 0.6450881, DetectionBoxes_Precision/mAP (large) = 0.6477831, DetectionBoxes_Precision/mAP (medium) = 0.5, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9669951, DetectionBoxes_Precision/mAP@.75IOU = 0.6693516, DetectionBoxes_Recall/AR@1 = 0.5294605, DetectionBoxes_Recall/AR@10 = 0.70397973, DetectionBoxes_Recall/AR@100 = 0.70397973, DetectionBoxes_Recall/AR@100 (large) = 0.70627135, DetectionBoxes_Recall/AR@100 (medium) = 0.5, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.8818772, Loss/localization_loss = 0.45059407, Loss/regularization_loss = 0.35065848, Loss/total_loss = 3.6831295, global_step = 3006, learning_rate = 0.004, loss = 3.6831295\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 3006: /content/gdrive/My Drive/capstone/model/model.ckpt-3006\n","I0612 23:22:15.691026 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 3006: /content/gdrive/My Drive/capstone/model/model.ckpt-3006\n","INFO:tensorflow:global_step/sec: 0.199316\n","I0612 23:29:47.689211 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.199316\n","INFO:tensorflow:loss = 2.1066918, step = 3101 (501.716 sec)\n","I0612 23:29:47.690759 140539975640960 basic_session_run_hooks.py:260] loss = 2.1066918, step = 3101 (501.716 sec)\n","INFO:tensorflow:Saving checkpoints for 3126 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0612 23:31:51.447113 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 3126 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd17906fcf8>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0612 23:31:54.577150 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd17906fcf8>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd170f448c8> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0612 23:31:54.764317 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd170f448c8> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0612 23:31:55.275403 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:31:57.604650 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:31:57.639478 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:31:57.674239 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:31:57.709068 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:31:57.742627 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:31:57.776690 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0612 23:32:00.159283 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0612 23:32:00.159704 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0612 23:32:00.160049 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0612 23:32:00.160274 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0612 23:32:00.160594 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0612 23:32:00.160826 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0612 23:32:00.161128 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0612 23:32:00.161341 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0612 23:32:00.161647 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0612 23:32:00.161883 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0612 23:32:00.162181 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0612 23:32:00.162404 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0612 23:32:00.162736 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0612 23:32:00.162960 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0612 23:32:00.163254 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0612 23:32:00.163484 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0612 23:32:00.163790 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0612 23:32:00.164005 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0612 23:32:00.164301 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0612 23:32:00.164531 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0612 23:32:00.164839 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0612 23:32:00.165068 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0612 23:32:00.165364 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0612 23:32:00.165586 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0612 23:32:00.165888 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0612 23:32:00.166098 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0612 23:32:00.166391 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0612 23:32:00.166635 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0612 23:32:00.166941 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0612 23:32:00.167153 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0612 23:32:00.167464 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0612 23:32:00.167677 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0612 23:32:00.167978 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0612 23:32:00.168191 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0612 23:32:00.168505 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0612 23:32:00.168715 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0612 23:32:00.168940 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0612 23:32:00.169154 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0612 23:32:00.169365 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0612 23:32:00.169587 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0612 23:32:00.169808 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0612 23:32:00.170019 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0612 23:32:00.170225 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0612 23:32:01.218253 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-12T23:32:01Z\n","I0612 23:32:01.236350 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-12T23:32:01Z\n","INFO:tensorflow:Graph was finalized.\n","I0612 23:32:02.132998 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-3126\n","I0612 23:32:02.137129 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-3126\n","INFO:tensorflow:Running local_init_op.\n","I0612 23:32:03.936342 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0612 23:32:04.156892 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0612 23:32:15.518679 140537697679104 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0612 23:32:15.519250 140537697679104 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.00s)\n","I0612 23:32:15.524345 140537697679104 coco_tools.py:137] DONE (t=0.00s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.41s).\n","Accumulating evaluation results...\n","DONE (t=0.19s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.636\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.970\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.684\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.500\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.641\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.517\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.685\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.688\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.500\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.691\n","INFO:tensorflow:Finished evaluation at 2020-06-12-23:32:16\n","I0612 23:32:16.831974 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-12-23:32:16\n","INFO:tensorflow:Saving dict for global step 3126: DetectionBoxes_Precision/mAP = 0.635802, DetectionBoxes_Precision/mAP (large) = 0.6405555, DetectionBoxes_Precision/mAP (medium) = 0.5, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.96967804, DetectionBoxes_Precision/mAP@.75IOU = 0.68440866, DetectionBoxes_Recall/AR@1 = 0.5173878, DetectionBoxes_Recall/AR@10 = 0.6848958, DetectionBoxes_Recall/AR@100 = 0.6880208, DetectionBoxes_Recall/AR@100 (large) = 0.6905208, DetectionBoxes_Recall/AR@100 (medium) = 0.5, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.626016, Loss/localization_loss = 0.5179458, Loss/regularization_loss = 0.35079935, Loss/total_loss = 3.4947612, global_step = 3126, learning_rate = 0.004, loss = 3.4947612\n","I0612 23:32:16.832300 140539975640960 estimator.py:2049] Saving dict for global step 3126: DetectionBoxes_Precision/mAP = 0.635802, DetectionBoxes_Precision/mAP (large) = 0.6405555, DetectionBoxes_Precision/mAP (medium) = 0.5, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.96967804, DetectionBoxes_Precision/mAP@.75IOU = 0.68440866, DetectionBoxes_Recall/AR@1 = 0.5173878, DetectionBoxes_Recall/AR@10 = 0.6848958, DetectionBoxes_Recall/AR@100 = 0.6880208, DetectionBoxes_Recall/AR@100 (large) = 0.6905208, DetectionBoxes_Recall/AR@100 (medium) = 0.5, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.626016, Loss/localization_loss = 0.5179458, Loss/regularization_loss = 0.35079935, Loss/total_loss = 3.4947612, global_step = 3126, learning_rate = 0.004, loss = 3.4947612\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 3126: /content/gdrive/My Drive/capstone/model/model.ckpt-3126\n","I0612 23:32:16.840519 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 3126: /content/gdrive/My Drive/capstone/model/model.ckpt-3126\n","INFO:tensorflow:global_step/sec: 0.196861\n","I0612 23:38:15.661987 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.196861\n","INFO:tensorflow:loss = 1.7272168, step = 3201 (507.974 sec)\n","I0612 23:38:15.664427 140539975640960 basic_session_run_hooks.py:260] loss = 1.7272168, step = 3201 (507.974 sec)\n","INFO:tensorflow:Saving checkpoints for 3246 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0612 23:41:53.493315 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 3246 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd16fd91358>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0612 23:41:56.659128 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd16fd91358>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd1709c5ea0> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0612 23:41:56.845784 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd1709c5ea0> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0612 23:41:57.360886 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:41:59.676896 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:41:59.712476 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:41:59.746687 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:41:59.781915 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:41:59.816894 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:41:59.851899 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0612 23:42:02.924792 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0612 23:42:02.925156 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0612 23:42:02.925434 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0612 23:42:02.925632 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0612 23:42:02.925900 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0612 23:42:02.926077 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0612 23:42:02.926341 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0612 23:42:02.926547 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0612 23:42:02.926816 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0612 23:42:02.927001 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0612 23:42:02.927259 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0612 23:42:02.927426 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0612 23:42:02.927695 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0612 23:42:02.927867 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0612 23:42:02.928120 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0612 23:42:02.928299 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0612 23:42:02.928616 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0612 23:42:02.928819 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0612 23:42:02.929076 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0612 23:42:02.929254 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0612 23:42:02.929517 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0612 23:42:02.929699 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0612 23:42:02.929958 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0612 23:42:02.930135 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0612 23:42:02.930385 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0612 23:42:02.930565 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0612 23:42:02.930823 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0612 23:42:02.930989 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0612 23:42:02.931241 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0612 23:42:02.931401 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0612 23:42:02.931662 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0612 23:42:02.931839 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0612 23:42:02.932089 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0612 23:42:02.932264 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0612 23:42:02.932530 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0612 23:42:02.932711 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0612 23:42:02.932871 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0612 23:42:02.933027 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0612 23:42:02.933182 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0612 23:42:02.933332 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0612 23:42:02.933495 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0612 23:42:02.933649 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0612 23:42:02.933808 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0612 23:42:04.066096 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-12T23:42:04Z\n","I0612 23:42:04.087709 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-12T23:42:04Z\n","INFO:tensorflow:Graph was finalized.\n","I0612 23:42:04.954252 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-3246\n","I0612 23:42:04.959257 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-3246\n","INFO:tensorflow:Running local_init_op.\n","I0612 23:42:06.587864 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0612 23:42:06.795943 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0612 23:42:18.360803 140537672500992 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0612 23:42:18.361552 140537672500992 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.00s)\n","I0612 23:42:18.364112 140537672500992 coco_tools.py:137] DONE (t=0.00s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.40s).\n","Accumulating evaluation results...\n","DONE (t=0.16s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.592\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.956\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.629\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.500\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.594\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.502\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.657\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.662\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.500\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.663\n","INFO:tensorflow:Finished evaluation at 2020-06-12-23:42:19\n","I0612 23:42:19.723961 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-12-23:42:19\n","INFO:tensorflow:Saving dict for global step 3246: DetectionBoxes_Precision/mAP = 0.59194773, DetectionBoxes_Precision/mAP (large) = 0.59427947, DetectionBoxes_Precision/mAP (medium) = 0.5, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9556889, DetectionBoxes_Precision/mAP@.75IOU = 0.62943655, DetectionBoxes_Recall/AR@1 = 0.501656, DetectionBoxes_Recall/AR@10 = 0.65682423, DetectionBoxes_Recall/AR@100 = 0.6618723, DetectionBoxes_Recall/AR@100 (large) = 0.6634348, DetectionBoxes_Recall/AR@100 (medium) = 0.5, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.7412271, Loss/localization_loss = 0.51106834, Loss/regularization_loss = 0.35092348, Loss/total_loss = 3.6032186, global_step = 3246, learning_rate = 0.004, loss = 3.6032186\n","I0612 23:42:19.724248 140539975640960 estimator.py:2049] Saving dict for global step 3246: DetectionBoxes_Precision/mAP = 0.59194773, DetectionBoxes_Precision/mAP (large) = 0.59427947, DetectionBoxes_Precision/mAP (medium) = 0.5, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9556889, DetectionBoxes_Precision/mAP@.75IOU = 0.62943655, DetectionBoxes_Recall/AR@1 = 0.501656, DetectionBoxes_Recall/AR@10 = 0.65682423, DetectionBoxes_Recall/AR@100 = 0.6618723, DetectionBoxes_Recall/AR@100 (large) = 0.6634348, DetectionBoxes_Recall/AR@100 (medium) = 0.5, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.7412271, Loss/localization_loss = 0.51106834, Loss/regularization_loss = 0.35092348, Loss/total_loss = 3.6032186, global_step = 3246, learning_rate = 0.004, loss = 3.6032186\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 3246: /content/gdrive/My Drive/capstone/model/model.ckpt-3246\n","I0612 23:42:19.736809 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 3246: /content/gdrive/My Drive/capstone/model/model.ckpt-3246\n","INFO:tensorflow:global_step/sec: 0.198134\n","I0612 23:46:40.371566 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.198134\n","INFO:tensorflow:loss = 1.7644088, step = 3301 (504.709 sec)\n","I0612 23:46:40.373161 140539975640960 basic_session_run_hooks.py:260] loss = 1.7644088, step = 3301 (504.709 sec)\n","INFO:tensorflow:Saving checkpoints for 3367 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0612 23:51:58.085949 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 3367 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd171e7d0b8>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0612 23:52:01.130259 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd171e7d0b8>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd171229620> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0612 23:52:01.306584 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd171229620> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0612 23:52:01.815431 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:52:04.118982 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:52:04.153420 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:52:04.184930 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:52:04.217752 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:52:04.250301 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0612 23:52:04.282281 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0612 23:52:06.639500 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0612 23:52:06.639844 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0612 23:52:06.640118 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0612 23:52:06.640292 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0612 23:52:06.640558 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0612 23:52:06.640747 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0612 23:52:06.641005 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0612 23:52:06.641194 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0612 23:52:06.641435 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0612 23:52:06.641613 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0612 23:52:06.641881 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0612 23:52:06.642077 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0612 23:52:06.642328 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0612 23:52:06.642523 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0612 23:52:06.642779 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0612 23:52:06.642955 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0612 23:52:06.643213 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0612 23:52:06.643383 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0612 23:52:06.643649 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0612 23:52:06.643824 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0612 23:52:06.644079 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0612 23:52:06.644254 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0612 23:52:06.644520 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0612 23:52:06.644701 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0612 23:52:06.644964 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0612 23:52:06.645144 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0612 23:52:06.645411 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0612 23:52:06.645615 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0612 23:52:06.645876 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0612 23:52:06.646046 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0612 23:52:06.646294 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0612 23:52:06.646486 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0612 23:52:06.646758 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0612 23:52:06.646928 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0612 23:52:06.647188 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0612 23:52:06.647357 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0612 23:52:06.647540 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0612 23:52:06.647703 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0612 23:52:06.647868 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0612 23:52:06.648029 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0612 23:52:06.648181 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0612 23:52:06.648333 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0612 23:52:06.648497 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0612 23:52:07.700464 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-12T23:52:07Z\n","I0612 23:52:07.719682 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-12T23:52:07Z\n","INFO:tensorflow:Graph was finalized.\n","I0612 23:52:08.704185 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-3367\n","I0612 23:52:08.707013 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-3367\n","INFO:tensorflow:Running local_init_op.\n","I0612 23:52:10.327956 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0612 23:52:10.551196 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0612 23:52:21.981494 140537697679104 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0612 23:52:21.982114 140537697679104 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.00s)\n","I0612 23:52:21.987366 140537697679104 coco_tools.py:137] DONE (t=0.00s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.42s).\n","Accumulating evaluation results...\n","DONE (t=0.09s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.629\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.968\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.696\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.333\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.634\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.516\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.683\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.683\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.400\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.687\n","INFO:tensorflow:Finished evaluation at 2020-06-12-23:52:23\n","I0612 23:52:23.388253 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-12-23:52:23\n","INFO:tensorflow:Saving dict for global step 3367: DetectionBoxes_Precision/mAP = 0.62942725, DetectionBoxes_Precision/mAP (large) = 0.6342819, DetectionBoxes_Precision/mAP (medium) = 0.33333334, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.96752787, DetectionBoxes_Precision/mAP@.75IOU = 0.696016, DetectionBoxes_Recall/AR@1 = 0.5157586, DetectionBoxes_Recall/AR@10 = 0.68282586, DetectionBoxes_Recall/AR@100 = 0.68282586, DetectionBoxes_Recall/AR@100 (large) = 0.68688834, DetectionBoxes_Recall/AR@100 (medium) = 0.4, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.896298, Loss/localization_loss = 0.46757662, Loss/regularization_loss = 0.3510266, Loss/total_loss = 3.7149005, global_step = 3367, learning_rate = 0.004, loss = 3.7149005\n","I0612 23:52:23.388587 140539975640960 estimator.py:2049] Saving dict for global step 3367: DetectionBoxes_Precision/mAP = 0.62942725, DetectionBoxes_Precision/mAP (large) = 0.6342819, DetectionBoxes_Precision/mAP (medium) = 0.33333334, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.96752787, DetectionBoxes_Precision/mAP@.75IOU = 0.696016, DetectionBoxes_Recall/AR@1 = 0.5157586, DetectionBoxes_Recall/AR@10 = 0.68282586, DetectionBoxes_Recall/AR@100 = 0.68282586, DetectionBoxes_Recall/AR@100 (large) = 0.68688834, DetectionBoxes_Recall/AR@100 (medium) = 0.4, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.896298, Loss/localization_loss = 0.46757662, Loss/regularization_loss = 0.3510266, Loss/total_loss = 3.7149005, global_step = 3367, learning_rate = 0.004, loss = 3.7149005\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 3367: /content/gdrive/My Drive/capstone/model/model.ckpt-3367\n","I0612 23:52:23.397849 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 3367: /content/gdrive/My Drive/capstone/model/model.ckpt-3367\n","INFO:tensorflow:global_step/sec: 0.196691\n","I0612 23:55:08.782720 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.196691\n","INFO:tensorflow:loss = 2.0340924, step = 3401 (508.411 sec)\n","I0612 23:55:08.784381 140539975640960 basic_session_run_hooks.py:260] loss = 2.0340924, step = 3401 (508.411 sec)\n","INFO:tensorflow:Saving checkpoints for 3486 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0613 00:02:00.704865 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 3486 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd1753df5f8>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0613 00:02:03.804148 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd1753df5f8>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd171441b70> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0613 00:02:03.980138 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd171441b70> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0613 00:02:04.445081 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:02:06.753495 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:02:06.786529 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:02:06.822664 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:02:06.855261 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:02:06.887140 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:02:06.918898 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0613 00:02:09.857076 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0613 00:02:09.857428 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0613 00:02:09.857765 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0613 00:02:09.857982 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0613 00:02:09.858319 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0613 00:02:09.858527 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0613 00:02:09.858827 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0613 00:02:09.859035 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0613 00:02:09.859314 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0613 00:02:09.859526 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0613 00:02:09.859827 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0613 00:02:09.860027 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0613 00:02:09.860299 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0613 00:02:09.860506 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0613 00:02:09.860793 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0613 00:02:09.860992 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0613 00:02:09.861268 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0613 00:02:09.861476 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0613 00:02:09.861750 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0613 00:02:09.861952 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0613 00:02:09.862226 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0613 00:02:09.862419 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0613 00:02:09.862711 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0613 00:02:09.862906 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0613 00:02:09.863198 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0613 00:02:09.863401 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0613 00:02:09.863713 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0613 00:02:09.863921 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0613 00:02:09.864198 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0613 00:02:09.864395 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0613 00:02:09.864710 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0613 00:02:09.864917 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0613 00:02:09.865193 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0613 00:02:09.865389 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0613 00:02:09.865682 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0613 00:02:09.865876 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0613 00:02:09.866063 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0613 00:02:09.866251 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0613 00:02:09.866455 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0613 00:02:09.866663 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0613 00:02:09.866860 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0613 00:02:09.867050 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0613 00:02:09.867243 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0613 00:02:10.919583 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-13T00:02:10Z\n","I0613 00:02:10.938623 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-13T00:02:10Z\n","INFO:tensorflow:Graph was finalized.\n","I0613 00:02:11.911738 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-3486\n","I0613 00:02:11.915370 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-3486\n","INFO:tensorflow:Running local_init_op.\n","I0613 00:02:13.544914 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0613 00:02:13.757678 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0613 00:02:25.349316 140537689286400 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0613 00:02:25.350125 140537689286400 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.00s)\n","I0613 00:02:25.355322 140537689286400 coco_tools.py:137] DONE (t=0.00s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.48s).\n","Accumulating evaluation results...\n","DONE (t=0.18s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.632\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.966\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.738\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.450\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.636\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.518\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.678\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.683\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.600\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.683\n","INFO:tensorflow:Finished evaluation at 2020-06-13-00:02:26\n","I0613 00:02:26.646572 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-13-00:02:26\n","INFO:tensorflow:Saving dict for global step 3486: DetectionBoxes_Precision/mAP = 0.6323873, DetectionBoxes_Precision/mAP (large) = 0.6357523, DetectionBoxes_Precision/mAP (medium) = 0.45, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.96565765, DetectionBoxes_Precision/mAP@.75IOU = 0.7378785, DetectionBoxes_Recall/AR@1 = 0.51753473, DetectionBoxes_Recall/AR@10 = 0.67823184, DetectionBoxes_Recall/AR@100 = 0.68267894, DetectionBoxes_Recall/AR@100 (large) = 0.68340814, DetectionBoxes_Recall/AR@100 (medium) = 0.6, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.7506592, Loss/localization_loss = 0.48289773, Loss/regularization_loss = 0.35113215, Loss/total_loss = 3.5846896, global_step = 3486, learning_rate = 0.004, loss = 3.5846896\n","I0613 00:02:26.646862 140539975640960 estimator.py:2049] Saving dict for global step 3486: DetectionBoxes_Precision/mAP = 0.6323873, DetectionBoxes_Precision/mAP (large) = 0.6357523, DetectionBoxes_Precision/mAP (medium) = 0.45, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.96565765, DetectionBoxes_Precision/mAP@.75IOU = 0.7378785, DetectionBoxes_Recall/AR@1 = 0.51753473, DetectionBoxes_Recall/AR@10 = 0.67823184, DetectionBoxes_Recall/AR@100 = 0.68267894, DetectionBoxes_Recall/AR@100 (large) = 0.68340814, DetectionBoxes_Recall/AR@100 (medium) = 0.6, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.7506592, Loss/localization_loss = 0.48289773, Loss/regularization_loss = 0.35113215, Loss/total_loss = 3.5846896, global_step = 3486, learning_rate = 0.004, loss = 3.5846896\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 3486: /content/gdrive/My Drive/capstone/model/model.ckpt-3486\n","I0613 00:02:26.655721 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 3486: /content/gdrive/My Drive/capstone/model/model.ckpt-3486\n","INFO:tensorflow:global_step/sec: 0.195416\n","I0613 00:03:40.511472 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.195416\n","INFO:tensorflow:loss = 1.5650862, step = 3501 (511.728 sec)\n","I0613 00:03:40.512582 140539975640960 basic_session_run_hooks.py:260] loss = 1.5650862, step = 3501 (511.728 sec)\n","INFO:tensorflow:global_step/sec: 0.207271\n","I0613 00:11:42.972466 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.207271\n","INFO:tensorflow:loss = 1.7950413, step = 3601 (482.462 sec)\n","I0613 00:11:42.974132 140539975640960 basic_session_run_hooks.py:260] loss = 1.7950413, step = 3601 (482.462 sec)\n","INFO:tensorflow:Saving checkpoints for 3605 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0613 00:12:02.379992 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 3605 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd16f3a00b8>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0613 00:12:05.468717 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd16f3a00b8>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd16fae8730> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0613 00:12:05.643147 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd16fae8730> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0613 00:12:06.114305 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:12:08.398580 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:12:08.431547 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:12:08.463808 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:12:08.496193 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:12:08.528189 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:12:08.559620 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0613 00:12:10.911938 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0613 00:12:10.912322 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0613 00:12:10.912656 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0613 00:12:10.912878 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0613 00:12:10.913171 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0613 00:12:10.913378 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0613 00:12:10.913696 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0613 00:12:10.913911 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0613 00:12:10.914224 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0613 00:12:10.914427 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0613 00:12:10.914729 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0613 00:12:10.914937 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0613 00:12:10.915222 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0613 00:12:10.915452 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0613 00:12:10.915745 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0613 00:12:10.915954 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0613 00:12:10.916238 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0613 00:12:10.916455 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0613 00:12:10.916743 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0613 00:12:10.916949 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0613 00:12:10.917255 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0613 00:12:10.917496 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0613 00:12:10.917823 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0613 00:12:10.918032 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0613 00:12:10.918315 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0613 00:12:10.918529 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0613 00:12:10.918827 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0613 00:12:10.919036 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0613 00:12:10.919323 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0613 00:12:10.919536 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0613 00:12:10.919831 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0613 00:12:10.920033 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0613 00:12:10.920317 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0613 00:12:10.920527 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0613 00:12:10.920825 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0613 00:12:10.921020 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0613 00:12:10.921210 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0613 00:12:10.921403 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0613 00:12:10.921618 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0613 00:12:10.921822 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0613 00:12:10.922017 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0613 00:12:10.922212 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0613 00:12:10.922405 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0613 00:12:11.936761 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-13T00:12:11Z\n","I0613 00:12:11.955354 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-13T00:12:11Z\n","INFO:tensorflow:Graph was finalized.\n","I0613 00:12:12.925480 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-3605\n","I0613 00:12:12.929552 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-3605\n","INFO:tensorflow:Running local_init_op.\n","I0613 00:12:14.601958 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0613 00:12:14.823243 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0613 00:12:26.405627 140537697679104 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0613 00:12:26.406365 140537697679104 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.00s)\n","I0613 00:12:26.411289 140537697679104 coco_tools.py:137] DONE (t=0.00s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.60s).\n","Accumulating evaluation results...\n","DONE (t=0.10s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.637\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.956\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.783\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.400\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.642\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.514\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.694\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.696\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.600\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.696\n","INFO:tensorflow:Finished evaluation at 2020-06-13-00:12:27\n","I0613 00:12:27.707704 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-13-00:12:27\n","INFO:tensorflow:Saving dict for global step 3605: DetectionBoxes_Precision/mAP = 0.6374975, DetectionBoxes_Precision/mAP (large) = 0.641876, DetectionBoxes_Precision/mAP (medium) = 0.4, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9562012, DetectionBoxes_Precision/mAP@.75IOU = 0.78332376, DetectionBoxes_Recall/AR@1 = 0.5140492, DetectionBoxes_Recall/AR@10 = 0.69397706, DetectionBoxes_Recall/AR@100 = 0.69553953, DetectionBoxes_Recall/AR@100 (large) = 0.69616455, DetectionBoxes_Recall/AR@100 (medium) = 0.6, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.752057, Loss/localization_loss = 0.46073472, Loss/regularization_loss = 0.35126516, Loss/total_loss = 3.5640574, global_step = 3605, learning_rate = 0.004, loss = 3.5640574\n","I0613 00:12:27.708009 140539975640960 estimator.py:2049] Saving dict for global step 3605: DetectionBoxes_Precision/mAP = 0.6374975, DetectionBoxes_Precision/mAP (large) = 0.641876, DetectionBoxes_Precision/mAP (medium) = 0.4, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9562012, DetectionBoxes_Precision/mAP@.75IOU = 0.78332376, DetectionBoxes_Recall/AR@1 = 0.5140492, DetectionBoxes_Recall/AR@10 = 0.69397706, DetectionBoxes_Recall/AR@100 = 0.69553953, DetectionBoxes_Recall/AR@100 (large) = 0.69616455, DetectionBoxes_Recall/AR@100 (medium) = 0.6, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.752057, Loss/localization_loss = 0.46073472, Loss/regularization_loss = 0.35126516, Loss/total_loss = 3.5640574, global_step = 3605, learning_rate = 0.004, loss = 3.5640574\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 3605: /content/gdrive/My Drive/capstone/model/model.ckpt-3605\n","I0613 00:12:27.716220 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 3605: /content/gdrive/My Drive/capstone/model/model.ckpt-3605\n","INFO:tensorflow:global_step/sec: 0.196076\n","I0613 00:20:12.978129 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.196076\n","INFO:tensorflow:loss = 1.9116399, step = 3701 (510.006 sec)\n","I0613 00:20:12.979771 140539975640960 basic_session_run_hooks.py:260] loss = 1.9116399, step = 3701 (510.006 sec)\n","INFO:tensorflow:Saving checkpoints for 3724 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0613 00:22:02.536663 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 3724 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd16fc5b710>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0613 00:22:05.614497 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd16fc5b710>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd170b09ea0> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0613 00:22:05.787208 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd170b09ea0> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0613 00:22:06.240044 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:22:09.155540 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:22:09.189910 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:22:09.221652 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:22:09.254087 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:22:09.285851 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:22:09.318022 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0613 00:22:11.666912 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0613 00:22:11.667259 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0613 00:22:11.667558 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0613 00:22:11.667739 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0613 00:22:11.668027 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0613 00:22:11.668220 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0613 00:22:11.668488 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0613 00:22:11.668665 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0613 00:22:11.668923 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0613 00:22:11.669100 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0613 00:22:11.669348 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0613 00:22:11.669527 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0613 00:22:11.669786 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0613 00:22:11.669951 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0613 00:22:11.670199 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0613 00:22:11.670360 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0613 00:22:11.670616 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0613 00:22:11.670789 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0613 00:22:11.671039 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0613 00:22:11.671211 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0613 00:22:11.671470 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0613 00:22:11.671638 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0613 00:22:11.671892 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0613 00:22:11.672050 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0613 00:22:11.672283 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0613 00:22:11.672449 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0613 00:22:11.672687 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0613 00:22:11.672849 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0613 00:22:11.673104 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0613 00:22:11.673262 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0613 00:22:11.673517 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0613 00:22:11.673681 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0613 00:22:11.673928 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0613 00:22:11.674088 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0613 00:22:11.674329 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0613 00:22:11.674494 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0613 00:22:11.674648 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0613 00:22:11.674807 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0613 00:22:11.674967 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0613 00:22:11.675126 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0613 00:22:11.675275 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0613 00:22:11.675422 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0613 00:22:11.675582 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0613 00:22:12.790276 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-13T00:22:12Z\n","I0613 00:22:12.816542 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-13T00:22:12Z\n","INFO:tensorflow:Graph was finalized.\n","I0613 00:22:13.821629 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-3724\n","I0613 00:22:13.825418 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-3724\n","INFO:tensorflow:Running local_init_op.\n","I0613 00:22:15.381047 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0613 00:22:15.600250 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0613 00:22:26.896909 140537697679104 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0613 00:22:26.897429 140537697679104 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.00s)\n","I0613 00:22:26.900514 140537697679104 coco_tools.py:137] DONE (t=0.00s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.49s).\n","Accumulating evaluation results...\n","DONE (t=0.15s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.654\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.967\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.819\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.533\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.658\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.539\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.710\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.712\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.700\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.712\n","INFO:tensorflow:Finished evaluation at 2020-06-13-00:22:28\n","I0613 00:22:28.217764 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-13-00:22:28\n","INFO:tensorflow:Saving dict for global step 3724: DetectionBoxes_Precision/mAP = 0.654376, DetectionBoxes_Precision/mAP (large) = 0.6576574, DetectionBoxes_Precision/mAP (medium) = 0.53333336, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9665766, DetectionBoxes_Precision/mAP@.75IOU = 0.8188151, DetectionBoxes_Recall/AR@1 = 0.538515, DetectionBoxes_Recall/AR@10 = 0.7104033, DetectionBoxes_Recall/AR@100 = 0.7119658, DetectionBoxes_Recall/AR@100 (large) = 0.71186167, DetectionBoxes_Recall/AR@100 (medium) = 0.7, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.5664017, Loss/localization_loss = 0.416702, Loss/regularization_loss = 0.35136026, Loss/total_loss = 3.3344646, global_step = 3724, learning_rate = 0.004, loss = 3.3344646\n","I0613 00:22:28.218065 140539975640960 estimator.py:2049] Saving dict for global step 3724: DetectionBoxes_Precision/mAP = 0.654376, DetectionBoxes_Precision/mAP (large) = 0.6576574, DetectionBoxes_Precision/mAP (medium) = 0.53333336, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9665766, DetectionBoxes_Precision/mAP@.75IOU = 0.8188151, DetectionBoxes_Recall/AR@1 = 0.538515, DetectionBoxes_Recall/AR@10 = 0.7104033, DetectionBoxes_Recall/AR@100 = 0.7119658, DetectionBoxes_Recall/AR@100 (large) = 0.71186167, DetectionBoxes_Recall/AR@100 (medium) = 0.7, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.5664017, Loss/localization_loss = 0.416702, Loss/regularization_loss = 0.35136026, Loss/total_loss = 3.3344646, global_step = 3724, learning_rate = 0.004, loss = 3.3344646\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 3724: /content/gdrive/My Drive/capstone/model/model.ckpt-3724\n","I0613 00:22:28.226551 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 3724: /content/gdrive/My Drive/capstone/model/model.ckpt-3724\n","INFO:tensorflow:global_step/sec: 0.198957\n","I0613 00:28:35.598796 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.198957\n","INFO:tensorflow:loss = 1.4047899, step = 3801 (502.621 sec)\n","I0613 00:28:35.600573 140539975640960 basic_session_run_hooks.py:260] loss = 1.4047899, step = 3801 (502.621 sec)\n","INFO:tensorflow:Saving checkpoints for 3845 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0613 00:32:05.021198 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 3845 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd170a63a20>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0613 00:32:08.181895 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd170a63a20>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd171e41268> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0613 00:32:08.358489 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd171e41268> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0613 00:32:08.824082 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:32:11.132886 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:32:11.166566 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:32:11.198915 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:32:11.231070 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:32:11.262733 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:32:11.294912 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0613 00:32:13.752072 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0613 00:32:13.752386 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0613 00:32:13.752669 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0613 00:32:13.752865 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0613 00:32:13.753109 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0613 00:32:13.753282 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0613 00:32:13.753528 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0613 00:32:13.753697 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0613 00:32:13.753937 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0613 00:32:13.754109 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0613 00:32:13.754344 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0613 00:32:13.754516 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0613 00:32:13.754754 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0613 00:32:13.754916 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0613 00:32:13.755150 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0613 00:32:13.755305 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0613 00:32:13.755549 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0613 00:32:13.755710 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0613 00:32:13.755963 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0613 00:32:13.756117 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0613 00:32:13.756365 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0613 00:32:13.756554 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0613 00:32:13.756792 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0613 00:32:13.756961 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0613 00:32:13.757196 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0613 00:32:13.757385 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0613 00:32:13.757629 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0613 00:32:13.757812 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0613 00:32:13.758090 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0613 00:32:13.758290 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0613 00:32:13.758701 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0613 00:32:13.759003 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0613 00:32:13.759336 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0613 00:32:13.759534 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0613 00:32:13.759785 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0613 00:32:13.759948 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0613 00:32:13.760097 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0613 00:32:13.760250 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0613 00:32:13.760397 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0613 00:32:13.760559 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0613 00:32:13.760705 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0613 00:32:13.760855 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0613 00:32:13.761003 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0613 00:32:15.306485 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-13T00:32:15Z\n","I0613 00:32:15.329199 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-13T00:32:15Z\n","INFO:tensorflow:Graph was finalized.\n","I0613 00:32:16.346981 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-3845\n","I0613 00:32:16.350666 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-3845\n","INFO:tensorflow:Running local_init_op.\n","I0613 00:32:18.052601 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0613 00:32:18.279328 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0613 00:32:29.671493 140537680893696 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0613 00:32:29.672252 140537680893696 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.00s)\n","I0613 00:32:29.676616 140537680893696 coco_tools.py:137] DONE (t=0.00s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.36s).\n","Accumulating evaluation results...\n","DONE (t=0.10s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.648\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.961\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.787\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.600\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.650\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.537\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.697\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.700\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.600\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.701\n","INFO:tensorflow:Finished evaluation at 2020-06-13-00:32:30\n","I0613 00:32:30.880038 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-13-00:32:30\n","INFO:tensorflow:Saving dict for global step 3845: DetectionBoxes_Precision/mAP = 0.64758396, DetectionBoxes_Precision/mAP (large) = 0.64956874, DetectionBoxes_Precision/mAP (medium) = 0.6, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9609015, DetectionBoxes_Precision/mAP@.75IOU = 0.78654504, DetectionBoxes_Recall/AR@1 = 0.53671205, DetectionBoxes_Recall/AR@10 = 0.6971688, DetectionBoxes_Recall/AR@100 = 0.7002938, DetectionBoxes_Recall/AR@100 (large) = 0.7014396, DetectionBoxes_Recall/AR@100 (medium) = 0.6, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.462971, Loss/localization_loss = 0.39052883, Loss/regularization_loss = 0.3514799, Loss/total_loss = 3.204979, global_step = 3845, learning_rate = 0.004, loss = 3.204979\n","I0613 00:32:30.880353 140539975640960 estimator.py:2049] Saving dict for global step 3845: DetectionBoxes_Precision/mAP = 0.64758396, DetectionBoxes_Precision/mAP (large) = 0.64956874, DetectionBoxes_Precision/mAP (medium) = 0.6, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9609015, DetectionBoxes_Precision/mAP@.75IOU = 0.78654504, DetectionBoxes_Recall/AR@1 = 0.53671205, DetectionBoxes_Recall/AR@10 = 0.6971688, DetectionBoxes_Recall/AR@100 = 0.7002938, DetectionBoxes_Recall/AR@100 (large) = 0.7014396, DetectionBoxes_Recall/AR@100 (medium) = 0.6, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.462971, Loss/localization_loss = 0.39052883, Loss/regularization_loss = 0.3514799, Loss/total_loss = 3.204979, global_step = 3845, learning_rate = 0.004, loss = 3.204979\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 3845: /content/gdrive/My Drive/capstone/model/model.ckpt-3845\n","I0613 00:32:30.888212 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 3845: /content/gdrive/My Drive/capstone/model/model.ckpt-3845\n","INFO:tensorflow:global_step/sec: 0.200024\n","I0613 00:36:55.540061 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.200024\n","INFO:tensorflow:loss = 1.7639943, step = 3901 (499.941 sec)\n","I0613 00:36:55.541870 140539975640960 basic_session_run_hooks.py:260] loss = 1.7639943, step = 3901 (499.941 sec)\n","INFO:tensorflow:Saving checkpoints for 3966 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0613 00:42:05.864267 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 3966 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd1727eb518>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0613 00:42:09.137531 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd1727eb518>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd16e3427b8> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0613 00:42:09.320075 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd16e3427b8> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0613 00:42:09.789585 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:42:12.110316 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:42:12.144719 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:42:12.179275 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:42:12.212910 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:42:12.245997 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:42:12.278473 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0613 00:42:14.624956 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0613 00:42:14.625316 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0613 00:42:14.625606 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0613 00:42:14.625784 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0613 00:42:14.626045 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0613 00:42:14.626224 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0613 00:42:14.626489 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0613 00:42:14.626676 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0613 00:42:14.626937 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0613 00:42:14.627106 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0613 00:42:14.627353 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0613 00:42:14.627540 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0613 00:42:14.627795 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0613 00:42:14.627965 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0613 00:42:14.628228 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0613 00:42:14.628395 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0613 00:42:14.628654 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0613 00:42:14.628827 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0613 00:42:14.629075 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0613 00:42:14.629240 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0613 00:42:14.629499 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0613 00:42:14.629667 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0613 00:42:14.629922 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0613 00:42:14.630087 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0613 00:42:14.630336 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0613 00:42:14.630513 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0613 00:42:14.630761 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0613 00:42:14.630931 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0613 00:42:14.631179 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0613 00:42:14.631341 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0613 00:42:14.631600 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0613 00:42:14.631766 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0613 00:42:14.632016 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0613 00:42:14.632187 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0613 00:42:14.632428 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0613 00:42:14.632599 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0613 00:42:14.632754 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0613 00:42:14.632912 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0613 00:42:14.633059 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0613 00:42:14.633210 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0613 00:42:14.633359 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0613 00:42:14.633522 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0613 00:42:14.633674 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0613 00:42:15.674696 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-13T00:42:15Z\n","I0613 00:42:15.692551 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-13T00:42:15Z\n","INFO:tensorflow:Graph was finalized.\n","I0613 00:42:16.686155 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-3966\n","I0613 00:42:16.690402 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-3966\n","INFO:tensorflow:Running local_init_op.\n","I0613 00:42:18.335900 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0613 00:42:18.563640 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0613 00:42:29.875798 140537672500992 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0613 00:42:29.876453 140537672500992 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.00s)\n","I0613 00:42:29.879376 140537672500992 coco_tools.py:137] DONE (t=0.00s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.41s).\n","Accumulating evaluation results...\n","DONE (t=0.10s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.602\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.932\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.684\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.500\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.604\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.527\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.680\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.684\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.700\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.683\n","INFO:tensorflow:Finished evaluation at 2020-06-13-00:42:31\n","I0613 00:42:31.224579 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-13-00:42:31\n","INFO:tensorflow:Saving dict for global step 3966: DetectionBoxes_Precision/mAP = 0.6015867, DetectionBoxes_Precision/mAP (large) = 0.6043433, DetectionBoxes_Precision/mAP (medium) = 0.5, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9324607, DetectionBoxes_Precision/mAP@.75IOU = 0.6843519, DetectionBoxes_Recall/AR@1 = 0.52678955, DetectionBoxes_Recall/AR@10 = 0.68040866, DetectionBoxes_Recall/AR@100 = 0.68353367, DetectionBoxes_Recall/AR@100 (large) = 0.683117, DetectionBoxes_Recall/AR@100 (medium) = 0.7, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.9768953, Loss/localization_loss = 0.41501862, Loss/regularization_loss = 0.3515936, Loss/total_loss = 3.7435076, global_step = 3966, learning_rate = 0.004, loss = 3.7435076\n","I0613 00:42:31.224882 140539975640960 estimator.py:2049] Saving dict for global step 3966: DetectionBoxes_Precision/mAP = 0.6015867, DetectionBoxes_Precision/mAP (large) = 0.6043433, DetectionBoxes_Precision/mAP (medium) = 0.5, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9324607, DetectionBoxes_Precision/mAP@.75IOU = 0.6843519, DetectionBoxes_Recall/AR@1 = 0.52678955, DetectionBoxes_Recall/AR@10 = 0.68040866, DetectionBoxes_Recall/AR@100 = 0.68353367, DetectionBoxes_Recall/AR@100 (large) = 0.683117, DetectionBoxes_Recall/AR@100 (medium) = 0.7, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.9768953, Loss/localization_loss = 0.41501862, Loss/regularization_loss = 0.3515936, Loss/total_loss = 3.7435076, global_step = 3966, learning_rate = 0.004, loss = 3.7435076\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 3966: /content/gdrive/My Drive/capstone/model/model.ckpt-3966\n","I0613 00:42:31.233304 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 3966: /content/gdrive/My Drive/capstone/model/model.ckpt-3966\n","INFO:tensorflow:global_step/sec: 0.200825\n","I0613 00:45:13.485044 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.200825\n","INFO:tensorflow:loss = 2.3127425, step = 4001 (497.945 sec)\n","I0613 00:45:13.486737 140539975640960 basic_session_run_hooks.py:260] loss = 2.3127425, step = 4001 (497.945 sec)\n","INFO:tensorflow:Saving checkpoints for 4089 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0613 00:52:06.574652 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 4089 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd170ae88d0>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0613 00:52:09.630527 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd170ae88d0>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd170c4bbf8> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0613 00:52:09.806289 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd170c4bbf8> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0613 00:52:10.270863 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:52:12.549478 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:52:12.581966 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:52:12.613810 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:52:12.646018 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:52:12.678031 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 00:52:12.710508 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0613 00:52:15.722809 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0613 00:52:15.723108 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0613 00:52:15.723396 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0613 00:52:15.723575 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0613 00:52:15.723806 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0613 00:52:15.723969 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0613 00:52:15.724213 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0613 00:52:15.724375 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0613 00:52:15.724622 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0613 00:52:15.724781 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0613 00:52:15.725012 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0613 00:52:15.725166 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0613 00:52:15.725415 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0613 00:52:15.725594 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0613 00:52:15.725855 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0613 00:52:15.726038 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0613 00:52:15.726268 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0613 00:52:15.726430 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0613 00:52:15.726692 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0613 00:52:15.726850 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0613 00:52:15.727089 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0613 00:52:15.727241 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0613 00:52:15.727502 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0613 00:52:15.727674 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0613 00:52:15.727898 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0613 00:52:15.728047 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0613 00:52:15.728308 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0613 00:52:15.728501 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0613 00:52:15.728742 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0613 00:52:15.728890 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0613 00:52:15.729138 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0613 00:52:15.729293 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0613 00:52:15.729567 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0613 00:52:15.729783 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0613 00:52:15.730020 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0613 00:52:15.730183 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0613 00:52:15.730362 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0613 00:52:15.730533 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0613 00:52:15.730683 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0613 00:52:15.730824 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0613 00:52:15.730966 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0613 00:52:15.731111 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0613 00:52:15.731256 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0613 00:52:16.801249 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-13T00:52:16Z\n","I0613 00:52:16.824845 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-13T00:52:16Z\n","INFO:tensorflow:Graph was finalized.\n","I0613 00:52:17.751526 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-4089\n","I0613 00:52:17.755181 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-4089\n","INFO:tensorflow:Running local_init_op.\n","I0613 00:52:19.313748 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0613 00:52:19.537168 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0613 00:52:31.015808 140537680893696 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0613 00:52:31.016434 140537680893696 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.00s)\n","I0613 00:52:31.020647 140537680893696 coco_tools.py:137] DONE (t=0.00s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.58s).\n","Accumulating evaluation results...\n","DONE (t=0.14s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.650\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.963\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.765\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.500\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.652\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.533\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.700\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.703\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.500\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.706\n","INFO:tensorflow:Finished evaluation at 2020-06-13-00:52:32\n","I0613 00:52:32.411779 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-13-00:52:32\n","INFO:tensorflow:Saving dict for global step 4089: DetectionBoxes_Precision/mAP = 0.6501912, DetectionBoxes_Precision/mAP (large) = 0.65216553, DetectionBoxes_Precision/mAP (medium) = 0.5, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9631085, DetectionBoxes_Precision/mAP@.75IOU = 0.7650529, DetectionBoxes_Recall/AR@1 = 0.5329861, DetectionBoxes_Recall/AR@10 = 0.69969285, DetectionBoxes_Recall/AR@100 = 0.70281786, DetectionBoxes_Recall/AR@100 (large) = 0.7057345, DetectionBoxes_Recall/AR@100 (medium) = 0.5, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.7886996, Loss/localization_loss = 0.43674257, Loss/regularization_loss = 0.35178563, Loss/total_loss = 3.5772288, global_step = 4089, learning_rate = 0.004, loss = 3.5772288\n","I0613 00:52:32.412072 140539975640960 estimator.py:2049] Saving dict for global step 4089: DetectionBoxes_Precision/mAP = 0.6501912, DetectionBoxes_Precision/mAP (large) = 0.65216553, DetectionBoxes_Precision/mAP (medium) = 0.5, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9631085, DetectionBoxes_Precision/mAP@.75IOU = 0.7650529, DetectionBoxes_Recall/AR@1 = 0.5329861, DetectionBoxes_Recall/AR@10 = 0.69969285, DetectionBoxes_Recall/AR@100 = 0.70281786, DetectionBoxes_Recall/AR@100 (large) = 0.7057345, DetectionBoxes_Recall/AR@100 (medium) = 0.5, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.7886996, Loss/localization_loss = 0.43674257, Loss/regularization_loss = 0.35178563, Loss/total_loss = 3.5772288, global_step = 4089, learning_rate = 0.004, loss = 3.5772288\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 4089: /content/gdrive/My Drive/capstone/model/model.ckpt-4089\n","I0613 00:52:32.420017 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 4089: /content/gdrive/My Drive/capstone/model/model.ckpt-4089\n","INFO:tensorflow:global_step/sec: 0.202091\n","I0613 00:53:28.312083 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.202091\n","INFO:tensorflow:loss = 2.2701087, step = 4101 (494.826 sec)\n","I0613 00:53:28.313076 140539975640960 basic_session_run_hooks.py:260] loss = 2.2701087, step = 4101 (494.826 sec)\n","INFO:tensorflow:global_step/sec: 0.210255\n","I0613 01:01:23.924861 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.210255\n","INFO:tensorflow:loss = 1.6827936, step = 4201 (475.614 sec)\n","I0613 01:01:23.926662 140539975640960 basic_session_run_hooks.py:260] loss = 1.6827936, step = 4201 (475.614 sec)\n","INFO:tensorflow:Saving checkpoints for 4210 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0613 01:02:09.162244 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 4210 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd174951c18>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0613 01:02:12.280693 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd174951c18>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd1753f6950> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0613 01:02:12.471026 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd1753f6950> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0613 01:02:12.923725 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:02:15.199669 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:02:15.232567 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:02:15.264347 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:02:15.296433 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:02:15.330184 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:02:15.363101 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0613 01:02:17.755066 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0613 01:02:17.755382 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0613 01:02:17.755661 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0613 01:02:17.755840 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0613 01:02:17.756086 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0613 01:02:17.756270 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0613 01:02:17.756520 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0613 01:02:17.756691 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0613 01:02:17.756934 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0613 01:02:17.757093 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0613 01:02:17.757327 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0613 01:02:17.757499 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0613 01:02:17.757735 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0613 01:02:17.757915 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0613 01:02:17.758150 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0613 01:02:17.758330 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0613 01:02:17.758578 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0613 01:02:17.758752 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0613 01:02:17.758993 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0613 01:02:17.759166 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0613 01:02:17.759397 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0613 01:02:17.759569 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0613 01:02:17.759834 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0613 01:02:17.759999 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0613 01:02:17.760235 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0613 01:02:17.760391 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0613 01:02:17.760637 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0613 01:02:17.760798 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0613 01:02:17.761051 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0613 01:02:17.761217 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0613 01:02:17.761472 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0613 01:02:17.761647 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0613 01:02:17.761895 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0613 01:02:17.762056 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0613 01:02:17.762300 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0613 01:02:17.762465 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0613 01:02:17.762617 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0613 01:02:17.762767 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0613 01:02:17.762924 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0613 01:02:17.763075 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0613 01:02:17.763224 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0613 01:02:17.763373 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0613 01:02:17.763533 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0613 01:02:18.784631 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-13T01:02:18Z\n","I0613 01:02:18.802727 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-13T01:02:18Z\n","INFO:tensorflow:Graph was finalized.\n","I0613 01:02:19.709254 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-4210\n","I0613 01:02:19.713728 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-4210\n","INFO:tensorflow:Running local_init_op.\n","I0613 01:02:21.341153 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0613 01:02:21.555336 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0613 01:02:33.032748 140537689286400 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0613 01:02:33.034375 140537689286400 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.01s)\n","I0613 01:02:33.040087 140537689286400 coco_tools.py:137] DONE (t=0.01s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.67s).\n","Accumulating evaluation results...\n","DONE (t=0.11s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.664\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.972\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.833\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.400\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.668\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.536\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.704\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.705\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.400\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.710\n","INFO:tensorflow:Finished evaluation at 2020-06-13-01:02:34\n","I0613 01:02:34.326726 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-13-01:02:34\n","INFO:tensorflow:Saving dict for global step 4210: DetectionBoxes_Precision/mAP = 0.6636982, DetectionBoxes_Precision/mAP (large) = 0.66844136, DetectionBoxes_Precision/mAP (medium) = 0.4, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.97243315, DetectionBoxes_Precision/mAP@.75IOU = 0.8333635, DetectionBoxes_Recall/AR@1 = 0.53609776, DetectionBoxes_Recall/AR@10 = 0.7036458, DetectionBoxes_Recall/AR@100 = 0.70460737, DetectionBoxes_Recall/AR@100 (large) = 0.70971155, DetectionBoxes_Recall/AR@100 (medium) = 0.4, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.643924, Loss/localization_loss = 0.39165854, Loss/regularization_loss = 0.35188276, Loss/total_loss = 3.3874655, global_step = 4210, learning_rate = 0.004, loss = 3.3874655\n","I0613 01:02:34.327042 140539975640960 estimator.py:2049] Saving dict for global step 4210: DetectionBoxes_Precision/mAP = 0.6636982, DetectionBoxes_Precision/mAP (large) = 0.66844136, DetectionBoxes_Precision/mAP (medium) = 0.4, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.97243315, DetectionBoxes_Precision/mAP@.75IOU = 0.8333635, DetectionBoxes_Recall/AR@1 = 0.53609776, DetectionBoxes_Recall/AR@10 = 0.7036458, DetectionBoxes_Recall/AR@100 = 0.70460737, DetectionBoxes_Recall/AR@100 (large) = 0.70971155, DetectionBoxes_Recall/AR@100 (medium) = 0.4, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.643924, Loss/localization_loss = 0.39165854, Loss/regularization_loss = 0.35188276, Loss/total_loss = 3.3874655, global_step = 4210, learning_rate = 0.004, loss = 3.3874655\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 4210: /content/gdrive/My Drive/capstone/model/model.ckpt-4210\n","I0613 01:02:34.335596 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 4210: /content/gdrive/My Drive/capstone/model/model.ckpt-4210\n","INFO:tensorflow:global_step/sec: 0.198459\n","I0613 01:09:47.806240 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.198459\n","INFO:tensorflow:loss = 2.044566, step = 4301 (503.881 sec)\n","I0613 01:09:47.807726 140539975640960 basic_session_run_hooks.py:260] loss = 2.044566, step = 4301 (503.881 sec)\n","INFO:tensorflow:Saving checkpoints for 4330 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0613 01:12:10.899512 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 4330 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd172688748>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0613 01:12:13.940040 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd172688748>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd175576730> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0613 01:12:14.115527 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd175576730> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0613 01:12:14.591828 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:12:17.578405 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:12:17.611031 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:12:17.642629 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:12:17.677421 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:12:17.709405 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:12:17.741007 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0613 01:12:20.113143 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0613 01:12:20.113504 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0613 01:12:20.113783 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0613 01:12:20.114000 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0613 01:12:20.114258 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0613 01:12:20.114426 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0613 01:12:20.114692 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0613 01:12:20.114877 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0613 01:12:20.115126 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0613 01:12:20.115291 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0613 01:12:20.115548 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0613 01:12:20.115720 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0613 01:12:20.115971 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0613 01:12:20.116136 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0613 01:12:20.116392 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0613 01:12:20.116575 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0613 01:12:20.116829 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0613 01:12:20.116995 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0613 01:12:20.117241 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0613 01:12:20.117402 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0613 01:12:20.117661 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0613 01:12:20.117831 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0613 01:12:20.118077 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0613 01:12:20.118240 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0613 01:12:20.118503 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0613 01:12:20.118671 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0613 01:12:20.118924 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0613 01:12:20.119087 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0613 01:12:20.119330 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0613 01:12:20.119505 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0613 01:12:20.119754 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0613 01:12:20.119926 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0613 01:12:20.120178 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0613 01:12:20.120343 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0613 01:12:20.120620 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0613 01:12:20.120787 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0613 01:12:20.120949 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0613 01:12:20.121104 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0613 01:12:20.121259 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0613 01:12:20.121414 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0613 01:12:20.121580 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0613 01:12:20.121734 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0613 01:12:20.121891 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0613 01:12:21.144781 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-13T01:12:21Z\n","I0613 01:12:21.164944 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-13T01:12:21Z\n","INFO:tensorflow:Graph was finalized.\n","I0613 01:12:22.107843 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-4330\n","I0613 01:12:22.111573 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-4330\n","INFO:tensorflow:Running local_init_op.\n","I0613 01:12:23.792178 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0613 01:12:24.036783 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0613 01:12:35.541528 140537672500992 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0613 01:12:35.542347 140537672500992 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.00s)\n","I0613 01:12:35.546686 140537672500992 coco_tools.py:137] DONE (t=0.00s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.53s).\n","Accumulating evaluation results...\n","DONE (t=0.18s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.674\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.975\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.765\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.475\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.677\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.557\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.715\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.718\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.700\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.718\n","INFO:tensorflow:Finished evaluation at 2020-06-13-01:12:36\n","I0613 01:12:36.815277 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-13-01:12:36\n","INFO:tensorflow:Saving dict for global step 4330: DetectionBoxes_Precision/mAP = 0.6739168, DetectionBoxes_Precision/mAP (large) = 0.67669356, DetectionBoxes_Precision/mAP (medium) = 0.475, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9753917, DetectionBoxes_Precision/mAP@.75IOU = 0.76505125, DetectionBoxes_Recall/AR@1 = 0.5569845, DetectionBoxes_Recall/AR@10 = 0.71455663, DetectionBoxes_Recall/AR@100 = 0.71768165, DetectionBoxes_Recall/AR@100 (large) = 0.71768165, DetectionBoxes_Recall/AR@100 (medium) = 0.7, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.7683272, Loss/localization_loss = 0.3681706, Loss/regularization_loss = 0.3519732, Loss/total_loss = 3.4884715, global_step = 4330, learning_rate = 0.004, loss = 3.4884715\n","I0613 01:12:36.815605 140539975640960 estimator.py:2049] Saving dict for global step 4330: DetectionBoxes_Precision/mAP = 0.6739168, DetectionBoxes_Precision/mAP (large) = 0.67669356, DetectionBoxes_Precision/mAP (medium) = 0.475, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9753917, DetectionBoxes_Precision/mAP@.75IOU = 0.76505125, DetectionBoxes_Recall/AR@1 = 0.5569845, DetectionBoxes_Recall/AR@10 = 0.71455663, DetectionBoxes_Recall/AR@100 = 0.71768165, DetectionBoxes_Recall/AR@100 (large) = 0.71768165, DetectionBoxes_Recall/AR@100 (medium) = 0.7, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.7683272, Loss/localization_loss = 0.3681706, Loss/regularization_loss = 0.3519732, Loss/total_loss = 3.4884715, global_step = 4330, learning_rate = 0.004, loss = 3.4884715\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 4330: /content/gdrive/My Drive/capstone/model/model.ckpt-4330\n","I0613 01:12:36.823894 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 4330: /content/gdrive/My Drive/capstone/model/model.ckpt-4330\n","INFO:tensorflow:global_step/sec: 0.195772\n","I0613 01:18:18.603592 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.195772\n","INFO:tensorflow:loss = 1.769833, step = 4401 (510.798 sec)\n","I0613 01:18:18.605315 140539975640960 basic_session_run_hooks.py:260] loss = 1.769833, step = 4401 (510.798 sec)\n","INFO:tensorflow:Saving checkpoints for 4450 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0613 01:22:13.568192 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 4450 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd16ed3c2b0>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0613 01:22:16.707942 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd16ed3c2b0>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd16f38d7b8> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0613 01:22:16.903039 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd16f38d7b8> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0613 01:22:17.392889 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:22:19.693163 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:22:19.725902 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:22:19.757496 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:22:19.790438 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:22:19.822629 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:22:19.854192 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0613 01:22:22.197514 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0613 01:22:22.197880 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0613 01:22:22.198215 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0613 01:22:22.198459 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0613 01:22:22.198784 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0613 01:22:22.199005 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0613 01:22:22.199299 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0613 01:22:22.199522 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0613 01:22:22.199812 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0613 01:22:22.200020 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0613 01:22:22.200307 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0613 01:22:22.200526 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0613 01:22:22.200812 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0613 01:22:22.201020 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0613 01:22:22.201299 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0613 01:22:22.201510 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0613 01:22:22.201790 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0613 01:22:22.201989 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0613 01:22:22.202265 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0613 01:22:22.202483 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0613 01:22:22.202788 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0613 01:22:22.202996 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0613 01:22:22.203280 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0613 01:22:22.203494 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0613 01:22:22.203776 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0613 01:22:22.203985 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0613 01:22:22.204268 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0613 01:22:22.204479 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0613 01:22:22.204761 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0613 01:22:22.204957 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0613 01:22:22.205240 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0613 01:22:22.205435 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0613 01:22:22.205734 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0613 01:22:22.205930 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0613 01:22:22.206219 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0613 01:22:22.206405 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0613 01:22:22.206635 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0613 01:22:22.206837 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0613 01:22:22.207037 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0613 01:22:22.207237 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0613 01:22:22.207434 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0613 01:22:22.207647 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0613 01:22:22.207843 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0613 01:22:23.225003 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-13T01:22:23Z\n","I0613 01:22:23.242846 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-13T01:22:23Z\n","INFO:tensorflow:Graph was finalized.\n","I0613 01:22:24.710394 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-4450\n","I0613 01:22:24.714285 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-4450\n","INFO:tensorflow:Running local_init_op.\n","I0613 01:22:26.370546 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0613 01:22:26.594413 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0613 01:22:38.302221 140537697679104 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0613 01:22:38.302787 140537697679104 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.00s)\n","I0613 01:22:38.307589 140537697679104 coco_tools.py:137] DONE (t=0.00s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.39s).\n","Accumulating evaluation results...\n","DONE (t=0.10s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.610\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.962\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.633\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.600\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.612\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.513\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.657\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.661\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.600\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.663\n","INFO:tensorflow:Finished evaluation at 2020-06-13-01:22:39\n","I0613 01:22:39.623939 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-13-01:22:39\n","INFO:tensorflow:Saving dict for global step 4450: DetectionBoxes_Precision/mAP = 0.6099259, DetectionBoxes_Precision/mAP (large) = 0.61242396, DetectionBoxes_Precision/mAP (medium) = 0.6, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9623202, DetectionBoxes_Precision/mAP@.75IOU = 0.6332801, DetectionBoxes_Recall/AR@1 = 0.51254004, DetectionBoxes_Recall/AR@10 = 0.6567174, DetectionBoxes_Recall/AR@100 = 0.6614049, DetectionBoxes_Recall/AR@100 (large) = 0.66275907, DetectionBoxes_Recall/AR@100 (medium) = 0.6, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.8759053, Loss/localization_loss = 0.4792715, Loss/regularization_loss = 0.35210097, Loss/total_loss = 3.7072794, global_step = 4450, learning_rate = 0.004, loss = 3.7072794\n","I0613 01:22:39.624236 140539975640960 estimator.py:2049] Saving dict for global step 4450: DetectionBoxes_Precision/mAP = 0.6099259, DetectionBoxes_Precision/mAP (large) = 0.61242396, DetectionBoxes_Precision/mAP (medium) = 0.6, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9623202, DetectionBoxes_Precision/mAP@.75IOU = 0.6332801, DetectionBoxes_Recall/AR@1 = 0.51254004, DetectionBoxes_Recall/AR@10 = 0.6567174, DetectionBoxes_Recall/AR@100 = 0.6614049, DetectionBoxes_Recall/AR@100 (large) = 0.66275907, DetectionBoxes_Recall/AR@100 (medium) = 0.6, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.8759053, Loss/localization_loss = 0.4792715, Loss/regularization_loss = 0.35210097, Loss/total_loss = 3.7072794, global_step = 4450, learning_rate = 0.004, loss = 3.7072794\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 4450: /content/gdrive/My Drive/capstone/model/model.ckpt-4450\n","I0613 01:22:39.632149 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 4450: /content/gdrive/My Drive/capstone/model/model.ckpt-4450\n","INFO:tensorflow:global_step/sec: 0.196878\n","I0613 01:26:46.532227 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.196878\n","INFO:tensorflow:loss = 2.0564628, step = 4501 (507.929 sec)\n","I0613 01:26:46.533823 140539975640960 basic_session_run_hooks.py:260] loss = 2.0564628, step = 4501 (507.929 sec)\n","INFO:tensorflow:Saving checkpoints for 4568 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0613 01:32:13.824784 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 4568 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd178e46668>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0613 01:32:16.900372 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd178e46668>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd172003840> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0613 01:32:17.077220 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd172003840> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0613 01:32:17.543028 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:32:19.801388 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:32:19.833863 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:32:19.865288 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:32:19.901079 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:32:19.935483 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:32:19.970978 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0613 01:32:22.330486 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0613 01:32:22.330812 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0613 01:32:22.331085 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0613 01:32:22.331257 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0613 01:32:22.331519 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0613 01:32:22.331700 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0613 01:32:22.331951 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0613 01:32:22.332116 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0613 01:32:22.332355 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0613 01:32:22.332553 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0613 01:32:22.332796 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0613 01:32:22.332965 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0613 01:32:22.333205 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0613 01:32:22.333362 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0613 01:32:22.333613 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0613 01:32:22.333777 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0613 01:32:22.334021 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0613 01:32:22.334182 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0613 01:32:22.334423 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0613 01:32:22.334600 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0613 01:32:22.334842 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0613 01:32:22.335006 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0613 01:32:22.335246 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0613 01:32:22.335405 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0613 01:32:22.335659 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0613 01:32:22.335822 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0613 01:32:22.336063 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0613 01:32:22.336223 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0613 01:32:22.336477 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0613 01:32:22.336638 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0613 01:32:22.336882 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0613 01:32:22.337042 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0613 01:32:22.337281 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0613 01:32:22.337450 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0613 01:32:22.337700 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0613 01:32:22.337858 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0613 01:32:22.338010 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0613 01:32:22.338162 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0613 01:32:22.338310 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0613 01:32:22.338478 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0613 01:32:22.338634 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0613 01:32:22.338783 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0613 01:32:22.338938 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0613 01:32:23.386868 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-13T01:32:23Z\n","I0613 01:32:23.404831 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-13T01:32:23Z\n","INFO:tensorflow:Graph was finalized.\n","I0613 01:32:24.302527 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-4568\n","I0613 01:32:24.307304 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-4568\n","INFO:tensorflow:Running local_init_op.\n","I0613 01:32:26.005589 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0613 01:32:26.239864 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0613 01:32:37.590567 140537697679104 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0613 01:32:37.591610 140537697679104 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.01s)\n","I0613 01:32:37.597305 140537697679104 coco_tools.py:137] DONE (t=0.01s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.56s).\n","Accumulating evaluation results...\n","DONE (t=0.10s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.647\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.954\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.676\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.700\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.649\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.540\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.705\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.707\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.700\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.707\n","INFO:tensorflow:Finished evaluation at 2020-06-13-01:32:39\n","I0613 01:32:39.021183 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-13-01:32:39\n","INFO:tensorflow:Saving dict for global step 4568: DetectionBoxes_Precision/mAP = 0.6466451, DetectionBoxes_Precision/mAP (large) = 0.6491518, DetectionBoxes_Precision/mAP (medium) = 0.7, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9536217, DetectionBoxes_Precision/mAP@.75IOU = 0.67600936, DetectionBoxes_Recall/AR@1 = 0.53999734, DetectionBoxes_Recall/AR@10 = 0.70538193, DetectionBoxes_Recall/AR@100 = 0.70694447, DetectionBoxes_Recall/AR@100 (large) = 0.7073611, DetectionBoxes_Recall/AR@100 (medium) = 0.7, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.079565, Loss/localization_loss = 0.40191397, Loss/regularization_loss = 0.35220334, Loss/total_loss = 3.8336825, global_step = 4568, learning_rate = 0.004, loss = 3.8336825\n","I0613 01:32:39.021556 140539975640960 estimator.py:2049] Saving dict for global step 4568: DetectionBoxes_Precision/mAP = 0.6466451, DetectionBoxes_Precision/mAP (large) = 0.6491518, DetectionBoxes_Precision/mAP (medium) = 0.7, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9536217, DetectionBoxes_Precision/mAP@.75IOU = 0.67600936, DetectionBoxes_Recall/AR@1 = 0.53999734, DetectionBoxes_Recall/AR@10 = 0.70538193, DetectionBoxes_Recall/AR@100 = 0.70694447, DetectionBoxes_Recall/AR@100 (large) = 0.7073611, DetectionBoxes_Recall/AR@100 (medium) = 0.7, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.079565, Loss/localization_loss = 0.40191397, Loss/regularization_loss = 0.35220334, Loss/total_loss = 3.8336825, global_step = 4568, learning_rate = 0.004, loss = 3.8336825\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 4568: /content/gdrive/My Drive/capstone/model/model.ckpt-4568\n","I0613 01:32:39.031062 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 4568: /content/gdrive/My Drive/capstone/model/model.ckpt-4568\n","INFO:tensorflow:global_step/sec: 0.193074\n","I0613 01:35:24.469023 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.193074\n","INFO:tensorflow:loss = 1.6711236, step = 4601 (517.937 sec)\n","I0613 01:35:24.470544 140539975640960 basic_session_run_hooks.py:260] loss = 1.6711236, step = 4601 (517.937 sec)\n","INFO:tensorflow:Saving checkpoints for 4684 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0613 01:42:14.584321 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 4684 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd16fe9a9e8>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0613 01:42:17.701690 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd16fe9a9e8>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd171c52e18> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0613 01:42:17.879747 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd171c52e18> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0613 01:42:18.355019 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:42:20.608655 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:42:20.642066 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:42:20.674436 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:42:20.707678 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:42:20.739155 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:42:20.771346 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0613 01:42:23.756806 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0613 01:42:23.757167 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0613 01:42:23.757463 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0613 01:42:23.757683 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0613 01:42:23.757945 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0613 01:42:23.758146 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0613 01:42:23.758409 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0613 01:42:23.758597 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0613 01:42:23.758854 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0613 01:42:23.759038 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0613 01:42:23.759295 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0613 01:42:23.759473 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0613 01:42:23.759748 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0613 01:42:23.759927 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0613 01:42:23.760178 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0613 01:42:23.760343 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0613 01:42:23.760606 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0613 01:42:23.760800 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0613 01:42:23.761059 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0613 01:42:23.761232 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0613 01:42:23.761507 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0613 01:42:23.761694 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0613 01:42:23.761944 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0613 01:42:23.762110 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0613 01:42:23.762354 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0613 01:42:23.762537 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0613 01:42:23.762789 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0613 01:42:23.762966 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0613 01:42:23.763211 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0613 01:42:23.763392 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0613 01:42:23.763699 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0613 01:42:23.763900 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0613 01:42:23.764156 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0613 01:42:23.764322 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0613 01:42:23.764593 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0613 01:42:23.764791 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0613 01:42:23.764975 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0613 01:42:23.765133 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0613 01:42:23.765281 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0613 01:42:23.765429 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0613 01:42:23.765598 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0613 01:42:23.765748 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0613 01:42:23.765910 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0613 01:42:24.882966 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-13T01:42:24Z\n","I0613 01:42:24.907122 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-13T01:42:24Z\n","INFO:tensorflow:Graph was finalized.\n","I0613 01:42:25.864846 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-4684\n","I0613 01:42:25.869401 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-4684\n","INFO:tensorflow:Running local_init_op.\n","I0613 01:42:27.532142 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0613 01:42:27.762967 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0613 01:42:39.428156 140537697679104 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0613 01:42:39.428731 140537697679104 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.00s)\n","I0613 01:42:39.433001 140537697679104 coco_tools.py:137] DONE (t=0.00s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.36s).\n","Accumulating evaluation results...\n","DONE (t=0.10s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.601\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.955\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.652\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.500\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.605\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.500\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.650\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.658\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.500\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.661\n","INFO:tensorflow:Finished evaluation at 2020-06-13-01:42:40\n","I0613 01:42:40.682591 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-13-01:42:40\n","INFO:tensorflow:Saving dict for global step 4684: DetectionBoxes_Precision/mAP = 0.600907, DetectionBoxes_Precision/mAP (large) = 0.60502875, DetectionBoxes_Precision/mAP (medium) = 0.5, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9552948, DetectionBoxes_Precision/mAP@.75IOU = 0.6519981, DetectionBoxes_Recall/AR@1 = 0.50040066, DetectionBoxes_Recall/AR@10 = 0.6495727, DetectionBoxes_Recall/AR@100 = 0.6583467, DetectionBoxes_Recall/AR@100 (large) = 0.661055, DetectionBoxes_Recall/AR@100 (medium) = 0.5, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.083279, Loss/localization_loss = 0.44912598, Loss/regularization_loss = 0.35229132, Loss/total_loss = 3.8846955, global_step = 4684, learning_rate = 0.004, loss = 3.8846955\n","I0613 01:42:40.682889 140539975640960 estimator.py:2049] Saving dict for global step 4684: DetectionBoxes_Precision/mAP = 0.600907, DetectionBoxes_Precision/mAP (large) = 0.60502875, DetectionBoxes_Precision/mAP (medium) = 0.5, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9552948, DetectionBoxes_Precision/mAP@.75IOU = 0.6519981, DetectionBoxes_Recall/AR@1 = 0.50040066, DetectionBoxes_Recall/AR@10 = 0.6495727, DetectionBoxes_Recall/AR@100 = 0.6583467, DetectionBoxes_Recall/AR@100 (large) = 0.661055, DetectionBoxes_Recall/AR@100 (medium) = 0.5, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 3.083279, Loss/localization_loss = 0.44912598, Loss/regularization_loss = 0.35229132, Loss/total_loss = 3.8846955, global_step = 4684, learning_rate = 0.004, loss = 3.8846955\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 4684: /content/gdrive/My Drive/capstone/model/model.ckpt-4684\n","I0613 01:42:40.691093 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 4684: /content/gdrive/My Drive/capstone/model/model.ckpt-4684\n","INFO:tensorflow:global_step/sec: 0.192834\n","I0613 01:44:03.050549 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.192834\n","INFO:tensorflow:loss = 2.3364322, step = 4701 (518.581 sec)\n","I0613 01:44:03.051745 140539975640960 basic_session_run_hooks.py:260] loss = 2.3364322, step = 4701 (518.581 sec)\n","INFO:tensorflow:Saving checkpoints for 4801 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0613 01:52:14.984073 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 4801 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd1748c10b8>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0613 01:52:17.981210 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd1748c10b8>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd1712296a8> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0613 01:52:18.156770 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd1712296a8> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0613 01:52:18.623518 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:52:21.076522 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:52:21.111878 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:52:21.145683 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:52:21.180593 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:52:21.212254 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 01:52:21.243856 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0613 01:52:23.595343 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0613 01:52:23.595728 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0613 01:52:23.596009 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0613 01:52:23.596185 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0613 01:52:23.596429 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0613 01:52:23.596627 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0613 01:52:23.596873 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0613 01:52:23.597031 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0613 01:52:23.597266 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0613 01:52:23.597450 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0613 01:52:23.597695 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0613 01:52:23.597877 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0613 01:52:23.598117 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0613 01:52:23.598288 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0613 01:52:23.598535 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0613 01:52:23.598714 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0613 01:52:23.598961 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0613 01:52:23.599127 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0613 01:52:23.599366 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0613 01:52:23.599535 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0613 01:52:23.599778 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0613 01:52:23.599960 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0613 01:52:23.600234 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0613 01:52:23.600419 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0613 01:52:23.600681 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0613 01:52:23.600850 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0613 01:52:23.601092 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0613 01:52:23.601253 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0613 01:52:23.601506 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0613 01:52:23.601667 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0613 01:52:23.601911 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0613 01:52:23.602068 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0613 01:52:23.602301 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0613 01:52:23.602472 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0613 01:52:23.602719 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0613 01:52:23.602877 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0613 01:52:23.603028 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0613 01:52:23.603180 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0613 01:52:23.603328 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0613 01:52:23.603488 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0613 01:52:23.603636 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0613 01:52:23.603779 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0613 01:52:23.603953 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0613 01:52:24.609214 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-13T01:52:24Z\n","I0613 01:52:24.626972 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-13T01:52:24Z\n","INFO:tensorflow:Graph was finalized.\n","I0613 01:52:25.576785 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-4801\n","I0613 01:52:25.581521 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-4801\n","INFO:tensorflow:Running local_init_op.\n","I0613 01:52:27.209244 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0613 01:52:27.419112 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0613 01:52:38.758846 140537680893696 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0613 01:52:38.759568 140537680893696 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.00s)\n","I0613 01:52:38.762781 140537680893696 coco_tools.py:137] DONE (t=0.00s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.48s).\n","Accumulating evaluation results...\n","DONE (t=0.11s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.631\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.941\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.679\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.450\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.636\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.550\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.687\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.699\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.500\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.703\n","INFO:tensorflow:Finished evaluation at 2020-06-13-01:52:40\n","I0613 01:52:40.110766 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-13-01:52:40\n","INFO:tensorflow:Saving dict for global step 4801: DetectionBoxes_Precision/mAP = 0.63134617, DetectionBoxes_Precision/mAP (large) = 0.636282, DetectionBoxes_Precision/mAP (medium) = 0.45, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9405865, DetectionBoxes_Precision/mAP@.75IOU = 0.6785169, DetectionBoxes_Recall/AR@1 = 0.5495059, DetectionBoxes_Recall/AR@10 = 0.6869525, DetectionBoxes_Recall/AR@100 = 0.69933224, DetectionBoxes_Recall/AR@100 (large) = 0.70276976, DetectionBoxes_Recall/AR@100 (medium) = 0.5, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.838252, Loss/localization_loss = 0.3848492, Loss/regularization_loss = 0.35238832, Loss/total_loss = 3.5754895, global_step = 4801, learning_rate = 0.004, loss = 3.5754895\n","I0613 01:52:40.111075 140539975640960 estimator.py:2049] Saving dict for global step 4801: DetectionBoxes_Precision/mAP = 0.63134617, DetectionBoxes_Precision/mAP (large) = 0.636282, DetectionBoxes_Precision/mAP (medium) = 0.45, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.9405865, DetectionBoxes_Precision/mAP@.75IOU = 0.6785169, DetectionBoxes_Recall/AR@1 = 0.5495059, DetectionBoxes_Recall/AR@10 = 0.6869525, DetectionBoxes_Recall/AR@100 = 0.69933224, DetectionBoxes_Recall/AR@100 (large) = 0.70276976, DetectionBoxes_Recall/AR@100 (medium) = 0.5, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.838252, Loss/localization_loss = 0.3848492, Loss/regularization_loss = 0.35238832, Loss/total_loss = 3.5754895, global_step = 4801, learning_rate = 0.004, loss = 3.5754895\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 4801: /content/gdrive/My Drive/capstone/model/model.ckpt-4801\n","I0613 01:52:40.120543 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 4801: /content/gdrive/My Drive/capstone/model/model.ckpt-4801\n","INFO:tensorflow:global_step/sec: 0.193397\n","I0613 01:52:40.122792 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.193397\n","INFO:tensorflow:loss = 1.9404601, step = 4801 (517.072 sec)\n","I0613 01:52:40.123740 140539975640960 basic_session_run_hooks.py:260] loss = 1.9404601, step = 4801 (517.072 sec)\n","INFO:tensorflow:global_step/sec: 0.20336\n","I0613 02:00:51.861541 140539975640960 basic_session_run_hooks.py:692] global_step/sec: 0.20336\n","INFO:tensorflow:loss = 1.7316799, step = 4901 (491.740 sec)\n","I0613 02:00:51.863529 140539975640960 basic_session_run_hooks.py:260] loss = 1.7316799, step = 4901 (491.740 sec)\n","INFO:tensorflow:Saving checkpoints for 4918 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0613 02:02:15.099362 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 4918 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd171d668d0>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0613 02:02:18.289178 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd171d668d0>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd175246b70> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0613 02:02:18.475340 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd175246b70> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0613 02:02:18.939494 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:02:21.250458 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:02:21.282842 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:02:21.315373 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:02:21.349598 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:02:21.382327 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:02:21.415593 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0613 02:02:24.513725 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0613 02:02:24.514120 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0613 02:02:24.514407 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0613 02:02:24.514609 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0613 02:02:24.514886 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0613 02:02:24.515079 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0613 02:02:24.515341 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0613 02:02:24.515528 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0613 02:02:24.515794 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0613 02:02:24.515973 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0613 02:02:24.516231 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0613 02:02:24.516401 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0613 02:02:24.516694 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0613 02:02:24.516877 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0613 02:02:24.517137 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0613 02:02:24.517309 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0613 02:02:24.517581 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0613 02:02:24.517753 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0613 02:02:24.518017 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0613 02:02:24.518190 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0613 02:02:24.518467 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0613 02:02:24.518641 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0613 02:02:24.518903 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0613 02:02:24.519075 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0613 02:02:24.519335 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0613 02:02:24.519520 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0613 02:02:24.519784 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0613 02:02:24.519962 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0613 02:02:24.520220 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0613 02:02:24.520391 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0613 02:02:24.520693 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0613 02:02:24.520896 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0613 02:02:24.521162 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0613 02:02:24.521341 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0613 02:02:24.521620 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0613 02:02:24.521788 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0613 02:02:24.521953 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0613 02:02:24.522111 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0613 02:02:24.522266 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0613 02:02:24.522420 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0613 02:02:24.522591 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0613 02:02:24.522751 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0613 02:02:24.522915 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0613 02:02:25.625515 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-13T02:02:25Z\n","I0613 02:02:25.648165 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-13T02:02:25Z\n","INFO:tensorflow:Graph was finalized.\n","I0613 02:02:26.507155 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-4918\n","I0613 02:02:26.511136 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-4918\n","INFO:tensorflow:Running local_init_op.\n","I0613 02:02:28.192814 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0613 02:02:28.432521 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0613 02:02:42.552273 140537697679104 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0613 02:02:42.553024 140537697679104 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.01s)\n","I0613 02:02:42.558358 140537697679104 coco_tools.py:137] DONE (t=0.01s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.70s).\n","Accumulating evaluation results...\n","DONE (t=0.13s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.601\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.962\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.741\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.600\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.602\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.507\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.663\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.664\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.600\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.665\n","INFO:tensorflow:Finished evaluation at 2020-06-13-02:02:43\n","I0613 02:02:43.880545 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-13-02:02:43\n","INFO:tensorflow:Saving dict for global step 4918: DetectionBoxes_Precision/mAP = 0.60108954, DetectionBoxes_Precision/mAP (large) = 0.60209024, DetectionBoxes_Precision/mAP (medium) = 0.6, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.96228534, DetectionBoxes_Precision/mAP@.75IOU = 0.7410267, DetectionBoxes_Recall/AR@1 = 0.5071848, DetectionBoxes_Recall/AR@10 = 0.66295403, DetectionBoxes_Recall/AR@100 = 0.6639156, DetectionBoxes_Recall/AR@100 (large) = 0.6646448, DetectionBoxes_Recall/AR@100 (medium) = 0.6, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.5835814, Loss/localization_loss = 0.42062512, Loss/regularization_loss = 0.35248178, Loss/total_loss = 3.3566885, global_step = 4918, learning_rate = 0.004, loss = 3.3566885\n","I0613 02:02:43.880838 140539975640960 estimator.py:2049] Saving dict for global step 4918: DetectionBoxes_Precision/mAP = 0.60108954, DetectionBoxes_Precision/mAP (large) = 0.60209024, DetectionBoxes_Precision/mAP (medium) = 0.6, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.96228534, DetectionBoxes_Precision/mAP@.75IOU = 0.7410267, DetectionBoxes_Recall/AR@1 = 0.5071848, DetectionBoxes_Recall/AR@10 = 0.66295403, DetectionBoxes_Recall/AR@100 = 0.6639156, DetectionBoxes_Recall/AR@100 (large) = 0.6646448, DetectionBoxes_Recall/AR@100 (medium) = 0.6, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.5835814, Loss/localization_loss = 0.42062512, Loss/regularization_loss = 0.35248178, Loss/total_loss = 3.3566885, global_step = 4918, learning_rate = 0.004, loss = 3.3566885\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 4918: /content/gdrive/My Drive/capstone/model/model.ckpt-4918\n","I0613 02:02:43.890902 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 4918: /content/gdrive/My Drive/capstone/model/model.ckpt-4918\n","INFO:tensorflow:Saving checkpoints for 5000 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","I0613 02:09:28.928679 140539975640960 basic_session_run_hooks.py:606] Saving checkpoints for 5000 into /content/gdrive/My Drive/capstone/model/model.ckpt.\n","INFO:tensorflow:Skip the current checkpoint eval due to throttle secs (600 secs).\n","I0613 02:09:31.923670 140539975640960 training.py:527] Skip the current checkpoint eval due to throttle secs (600 secs).\n","WARNING:tensorflow:Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd1708c9828>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","W0613 02:09:31.999753 140539975640960 ag_logging.py:146] Entity <bound method TfExampleDecoder.decode of <object_detection.data_decoders.tf_example_decoder.TfExampleDecoder object at 0x7fd1708c9828>> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n","WARNING:tensorflow:Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd176165620> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","W0613 02:09:32.177214 140539975640960 ag_logging.py:146] Entity <function eval_input.<locals>.transform_and_pad_input_data_fn at 0x7fd176165620> could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n","INFO:tensorflow:Calling model_fn.\n","I0613 02:09:32.636192 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:09:34.916401 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:09:34.948611 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:09:34.980591 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:09:35.014248 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:09:35.045720 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:09:35.077877 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0613 02:09:37.497082 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0613 02:09:37.497426 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0613 02:09:37.497739 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0613 02:09:37.497949 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0613 02:09:37.498219 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0613 02:09:37.498400 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0613 02:09:37.498677 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0613 02:09:37.498888 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0613 02:09:37.499154 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0613 02:09:37.499330 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0613 02:09:37.499609 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0613 02:09:37.499790 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0613 02:09:37.500059 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0613 02:09:37.500305 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0613 02:09:37.500599 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0613 02:09:37.500797 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0613 02:09:37.501070 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0613 02:09:37.501252 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0613 02:09:37.501525 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0613 02:09:37.501739 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0613 02:09:37.501997 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0613 02:09:37.502185 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0613 02:09:37.502461 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0613 02:09:37.502665 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0613 02:09:37.502926 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0613 02:09:37.503100 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0613 02:09:37.503356 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0613 02:09:37.503544 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0613 02:09:37.503805 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0613 02:09:37.503983 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0613 02:09:37.504239 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0613 02:09:37.504412 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0613 02:09:37.504683 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0613 02:09:37.504863 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0613 02:09:37.505124 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0613 02:09:37.505291 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0613 02:09:37.505469 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0613 02:09:37.505634 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0613 02:09:37.505796 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0613 02:09:37.505966 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0613 02:09:37.506128 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0613 02:09:37.506289 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0613 02:09:37.506464 140539975640960 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","INFO:tensorflow:Done calling model_fn.\n","I0613 02:09:38.528329 140539975640960 estimator.py:1150] Done calling model_fn.\n","INFO:tensorflow:Starting evaluation at 2020-06-13T02:09:38Z\n","I0613 02:09:38.546192 140539975640960 evaluation.py:255] Starting evaluation at 2020-06-13T02:09:38Z\n","INFO:tensorflow:Graph was finalized.\n","I0613 02:09:39.470078 140539975640960 monitored_session.py:240] Graph was finalized.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-5000\n","I0613 02:09:39.474675 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-5000\n","INFO:tensorflow:Running local_init_op.\n","I0613 02:09:41.128094 140539975640960 session_manager.py:500] Running local_init_op.\n","INFO:tensorflow:Done running local_init_op.\n","I0613 02:09:41.347579 140539975640960 session_manager.py:502] Done running local_init_op.\n","INFO:tensorflow:Performing evaluation on 54 images.\n","I0613 02:09:52.821998 140537672500992 coco_evaluation.py:236] Performing evaluation on 54 images.\n","creating index...\n","index created!\n","INFO:tensorflow:Loading and preparing annotation results...\n","I0613 02:09:52.822717 140537672500992 coco_tools.py:115] Loading and preparing annotation results...\n","INFO:tensorflow:DONE (t=0.00s)\n","I0613 02:09:52.827594 140537672500992 coco_tools.py:137] DONE (t=0.00s)\n","creating index...\n","index created!\n","Running per image evaluation...\n","Evaluate annotation type *bbox*\n","DONE (t=0.68s).\n","Accumulating evaluation results...\n","DONE (t=0.11s).\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.661\n"," Average Precision  (AP) @[ IoU=0.50      | area=   all | maxDets=100 ] = 0.966\n"," Average Precision  (AP) @[ IoU=0.75      | area=   all | maxDets=100 ] = 0.766\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.600\n"," Average Precision  (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.663\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=  1 ] = 0.550\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets= 10 ] = 0.709\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=   all | maxDets=100 ] = 0.709\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.600\n"," Average Recall     (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.711\n","INFO:tensorflow:Finished evaluation at 2020-06-13-02:09:54\n","I0613 02:09:54.170989 140539975640960 evaluation.py:275] Finished evaluation at 2020-06-13-02:09:54\n","INFO:tensorflow:Saving dict for global step 5000: DetectionBoxes_Precision/mAP = 0.6605925, DetectionBoxes_Precision/mAP (large) = 0.66296273, DetectionBoxes_Precision/mAP (medium) = 0.6, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.96608704, DetectionBoxes_Precision/mAP@.75IOU = 0.7657489, DetectionBoxes_Recall/AR@1 = 0.5500801, DetectionBoxes_Recall/AR@10 = 0.70897436, DetectionBoxes_Recall/AR@100 = 0.70897436, DetectionBoxes_Recall/AR@100 (large) = 0.71084934, DetectionBoxes_Recall/AR@100 (medium) = 0.6, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.7408361, Loss/localization_loss = 0.35107684, Loss/regularization_loss = 0.3525518, Loss/total_loss = 3.4444637, global_step = 5000, learning_rate = 0.004, loss = 3.4444637\n","I0613 02:09:54.171283 140539975640960 estimator.py:2049] Saving dict for global step 5000: DetectionBoxes_Precision/mAP = 0.6605925, DetectionBoxes_Precision/mAP (large) = 0.66296273, DetectionBoxes_Precision/mAP (medium) = 0.6, DetectionBoxes_Precision/mAP (small) = -1.0, DetectionBoxes_Precision/mAP@.50IOU = 0.96608704, DetectionBoxes_Precision/mAP@.75IOU = 0.7657489, DetectionBoxes_Recall/AR@1 = 0.5500801, DetectionBoxes_Recall/AR@10 = 0.70897436, DetectionBoxes_Recall/AR@100 = 0.70897436, DetectionBoxes_Recall/AR@100 (large) = 0.71084934, DetectionBoxes_Recall/AR@100 (medium) = 0.6, DetectionBoxes_Recall/AR@100 (small) = -1.0, Loss/classification_loss = 2.7408361, Loss/localization_loss = 0.35107684, Loss/regularization_loss = 0.3525518, Loss/total_loss = 3.4444637, global_step = 5000, learning_rate = 0.004, loss = 3.4444637\n","INFO:tensorflow:Saving 'checkpoint_path' summary for global step 5000: /content/gdrive/My Drive/capstone/model/model.ckpt-5000\n","I0613 02:09:54.180180 140539975640960 estimator.py:2109] Saving 'checkpoint_path' summary for global step 5000: /content/gdrive/My Drive/capstone/model/model.ckpt-5000\n","INFO:tensorflow:Performing the final export in the end of training.\n","I0613 02:09:54.181497 140539975640960 exporter.py:410] Performing the final export in the end of training.\n","INFO:tensorflow:Calling model_fn.\n","I0613 02:09:54.452692 140539975640960 estimator.py:1148] Calling model_fn.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:09:57.595485 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:09:57.629324 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:09:57.663939 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:09:57.696666 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:09:57.728228 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:09:57.759271 140539975640960 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:Done calling model_fn.\n","I0613 02:09:58.561459 140539975640960 estimator.py:1150] Done calling model_fn.\n","WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/python/saved_model/signature_def_utils_impl.py:201: build_tensor_info (from tensorflow.python.saved_model.utils_impl) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","This function will only be available through the v1 compatibility library as tf.compat.v1.saved_model.utils.build_tensor_info or tf.compat.v1.saved_model.build_tensor_info.\n","W0613 02:09:58.561756 140539975640960 deprecation.py:323] From /tensorflow-1.15.2/python3.6/tensorflow_core/python/saved_model/signature_def_utils_impl.py:201: build_tensor_info (from tensorflow.python.saved_model.utils_impl) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","This function will only be available through the v1 compatibility library as tf.compat.v1.saved_model.utils.build_tensor_info or tf.compat.v1.saved_model.build_tensor_info.\n","INFO:tensorflow:Signatures INCLUDED in export for Classify: None\n","I0613 02:09:58.562437 140539975640960 export_utils.py:170] Signatures INCLUDED in export for Classify: None\n","INFO:tensorflow:Signatures INCLUDED in export for Regress: None\n","I0613 02:09:58.562578 140539975640960 export_utils.py:170] Signatures INCLUDED in export for Regress: None\n","INFO:tensorflow:Signatures INCLUDED in export for Predict: ['tensorflow/serving/predict', 'serving_default']\n","I0613 02:09:58.562644 140539975640960 export_utils.py:170] Signatures INCLUDED in export for Predict: ['tensorflow/serving/predict', 'serving_default']\n","INFO:tensorflow:Signatures INCLUDED in export for Train: None\n","I0613 02:09:58.562704 140539975640960 export_utils.py:170] Signatures INCLUDED in export for Train: None\n","INFO:tensorflow:Signatures INCLUDED in export for Eval: None\n","I0613 02:09:58.562753 140539975640960 export_utils.py:170] Signatures INCLUDED in export for Eval: None\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-5000\n","I0613 02:09:58.567910 140539975640960 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-5000\n","INFO:tensorflow:Assets added to graph.\n","I0613 02:09:59.047980 140539975640960 builder_impl.py:665] Assets added to graph.\n","INFO:tensorflow:No assets to write.\n","I0613 02:09:59.048180 140539975640960 builder_impl.py:460] No assets to write.\n","INFO:tensorflow:SavedModel written to: /content/gdrive/My Drive/capstone/model/export/Servo/temp-b'1592014194'/saved_model.pb\n","I0613 02:09:59.962776 140539975640960 builder_impl.py:425] SavedModel written to: /content/gdrive/My Drive/capstone/model/export/Servo/temp-b'1592014194'/saved_model.pb\n","INFO:tensorflow:Loss for final step: 2.7371361.\n","I0613 02:10:00.729266 140539975640960 estimator.py:371] Loss for final step: 2.7371361.\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"yH7YdH9dCtFx","colab_type":"code","outputId":"e4f63b48-4e58-497d-cb30-08b24218f3bc","executionInfo":{"status":"ok","timestamp":1592050580223,"user_tz":-540,"elapsed":1839,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":416}},"source":["!ls -ltra '{model_dir}'"],"execution_count":0,"outputs":[{"output_type":"stream","text":["total 534075\n","-rw------- 1 root root 31343921 Jun 12 19:10 graph.pbtxt\n","drwx------ 2 root root     4096 Jun 12 19:21 eval_0\n","-rw------- 1 root root    68731 Jun 13 01:32 model.ckpt-4568.index\n","-rw------- 1 root root 74714000 Jun 13 01:32 model.ckpt-4568.data-00000-of-00001\n","-rw------- 1 root root 16450672 Jun 13 01:32 model.ckpt-4568.meta\n","-rw------- 1 root root    68731 Jun 13 01:42 model.ckpt-4684.index\n","-rw------- 1 root root 74714000 Jun 13 01:42 model.ckpt-4684.data-00000-of-00001\n","-rw------- 1 root root 16450672 Jun 13 01:42 model.ckpt-4684.meta\n","-rw------- 1 root root    68731 Jun 13 01:52 model.ckpt-4801.index\n","-rw------- 1 root root 74714000 Jun 13 01:52 model.ckpt-4801.data-00000-of-00001\n","-rw------- 1 root root 16450672 Jun 13 01:52 model.ckpt-4801.meta\n","-rw------- 1 root root    68731 Jun 13 02:02 model.ckpt-4918.index\n","-rw------- 1 root root 74714000 Jun 13 02:02 model.ckpt-4918.data-00000-of-00001\n","-rw------- 1 root root 16450672 Jun 13 02:02 model.ckpt-4918.meta\n","-rw------- 1 root root    68731 Jun 13 02:09 model.ckpt-5000.index\n","-rw------- 1 root root 74714000 Jun 13 02:09 model.ckpt-5000.data-00000-of-00001\n","-rw------- 1 root root 16450672 Jun 13 02:09 model.ckpt-5000.meta\n","-rw------- 1 root root      271 Jun 13 02:09 checkpoint\n","drwx------ 2 root root     4096 Jun 13 02:09 export\n","-rw------- 1 root root 59364133 Jun 13 02:10 events.out.tfevents.1591989015.40fa69cdd51a\n","drwx------ 3 root root     4096 Jun 13 12:12 fine_tuned_model\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"F3NeT-XlQzSs","colab_type":"text"},"source":["# 5단계. 모델 저장 & tflite 변환"]},{"cell_type":"markdown","metadata":{"id":"nCFDXa39Q6cU","colab_type":"text"},"source":["## 1) Exporting a Trained Inference Graph"]},{"cell_type":"code","metadata":{"id":"wfpQGk7mC2mS","colab_type":"code","colab":{}},"source":["import os\n","import re\n","import numpy as np\n","\n","model_dir = '/content/gdrive/My Drive/capstone/model'\n","\n","output_directory = '%s/fine_tuned_model' % model_dir\n","os.makedirs(output_directory, exist_ok=True)"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"M7t3CCysDB1w","colab_type":"code","outputId":"523b4cb8-9b3f-4b6d-949a-277463aeb19c","executionInfo":{"status":"ok","timestamp":1592062527234,"user_tz":-540,"elapsed":837,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":35}},"source":["# 마지막 model ckpt 파일 선택\n","lst = os.listdir(model_dir)\n","lst = [l for l in lst if 'model.ckpt-' in l and '.meta' in l]\n","steps=np.array([int(re.findall('\\d+', l)[0]) for l in lst])\n","last_model = lst[steps.argmax()].replace('.meta', '')\n","\n","last_model_path = os.path.join(model_dir, last_model)\n","print(last_model_path)"],"execution_count":24,"outputs":[{"output_type":"stream","text":["/content/gdrive/My Drive/capstone/model/model.ckpt-5000\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"pf9tPXd4DKSq","colab_type":"code","outputId":"53921701-b54b-4c7f-ba27-cf0ad5ed7374","executionInfo":{"status":"ok","timestamp":1592014686934,"user_tz":-540,"elapsed":22804,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":1000}},"source":["# 학습되 모델 파일 동결\n","# frozen_inference_graph.pb 생성\n","#!echo creates the frozen inference graph in fine_tune_model\n","!python /content/gdrive/My\\ Drive/capstone/models/research/object_detection/export_inference_graph.py \\\n","    --input_type=image_tensor \\\n","    --pipeline_config_path='{pipeline_fname}' \\\n","    --output_directory='{output_directory}' \\\n","    --trained_checkpoint_prefix='{last_model_path}'"],"execution_count":0,"outputs":[{"output_type":"stream","text":["WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tf_slim/layers/layers.py:1089: Layer.apply (from tensorflow.python.keras.engine.base_layer) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","Please use `layer.__call__` method instead.\n","W0613 02:17:49.556644 139782415091584 deprecation.py:323] From /usr/local/lib/python3.6/dist-packages/tf_slim/layers/layers.py:1089: Layer.apply (from tensorflow.python.keras.engine.base_layer) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","Please use `layer.__call__` method instead.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:17:52.066635 139782415091584 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:17:52.111626 139782415091584 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:17:52.283757 139782415091584 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:17:52.328312 139782415091584 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:17:52.377122 139782415091584 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:17:52.421121 139782415091584 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","WARNING:tensorflow:From /content/gdrive/My Drive/capstone/models/research/object_detection/core/post_processing.py:583: where (from tensorflow.python.ops.array_ops) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","Use tf.where in 2.0, which has the same broadcast rule as np.where\n","W0613 02:17:52.715606 139782415091584 deprecation.py:323] From /content/gdrive/My Drive/capstone/models/research/object_detection/core/post_processing.py:583: where (from tensorflow.python.ops.array_ops) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","Use tf.where in 2.0, which has the same broadcast rule as np.where\n","WARNING:tensorflow:From /content/gdrive/My Drive/capstone/models/research/object_detection/exporter.py:400: get_or_create_global_step (from tf_slim.ops.variables) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","Please switch to tf.train.get_or_create_global_step\n","W0613 02:17:53.219620 139782415091584 deprecation.py:323] From /content/gdrive/My Drive/capstone/models/research/object_detection/exporter.py:400: get_or_create_global_step (from tf_slim.ops.variables) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","Please switch to tf.train.get_or_create_global_step\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0613 02:17:54.888925 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0613 02:17:54.889309 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0613 02:17:54.889601 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0613 02:17:54.889801 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0613 02:17:54.890067 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0613 02:17:54.890243 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0613 02:17:54.890512 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0613 02:17:54.890694 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0613 02:17:54.890957 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0613 02:17:54.891130 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0613 02:17:54.891381 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0613 02:17:54.891592 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0613 02:17:54.891902 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0613 02:17:54.892094 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0613 02:17:54.892354 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0613 02:17:54.892555 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0613 02:17:54.892814 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0613 02:17:54.893012 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0613 02:17:54.893269 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0613 02:17:54.893455 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0613 02:17:54.893723 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0613 02:17:54.893949 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0613 02:17:54.894219 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0613 02:17:54.894395 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0613 02:17:54.894670 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0613 02:17:54.894848 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0613 02:17:54.895113 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0613 02:17:54.895288 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0613 02:17:54.895558 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0613 02:17:54.895725 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0613 02:17:54.895989 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0613 02:17:54.896161 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0613 02:17:54.896418 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0613 02:17:54.896600 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0613 02:17:54.896926 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0613 02:17:54.897135 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0613 02:17:54.897294 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0613 02:17:54.897500 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0613 02:17:54.897665 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0613 02:17:54.897822 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0613 02:17:54.897984 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0613 02:17:54.898140 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0613 02:17:54.898296 139782415091584 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","WARNING:tensorflow:From /content/gdrive/My Drive/capstone/models/research/object_detection/exporter.py:555: print_model_analysis (from tensorflow.contrib.tfprof.model_analyzer) is deprecated and will be removed after 2018-01-01.\n","Instructions for updating:\n","Use `tf.profiler.profile(graph, run_meta, op_log, cmd, options)`. Build `options` with `tf.profiler.ProfileOptionBuilder`. See README.md for details\n","W0613 02:17:54.901494 139782415091584 deprecation.py:323] From /content/gdrive/My Drive/capstone/models/research/object_detection/exporter.py:555: print_model_analysis (from tensorflow.contrib.tfprof.model_analyzer) is deprecated and will be removed after 2018-01-01.\n","Instructions for updating:\n","Use `tf.profiler.profile(graph, run_meta, op_log, cmd, options)`. Build `options` with `tf.profiler.ProfileOptionBuilder`. See README.md for details\n","WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/python/profiler/internal/flops_registry.py:142: tensor_shape_from_node_def_name (from tensorflow.python.framework.graph_util_impl) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","Use `tf.compat.v1.graph_util.tensor_shape_from_node_def_name`\n","W0613 02:17:54.902747 139782415091584 deprecation.py:323] From /tensorflow-1.15.2/python3.6/tensorflow_core/python/profiler/internal/flops_registry.py:142: tensor_shape_from_node_def_name (from tensorflow.python.framework.graph_util_impl) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","Use `tf.compat.v1.graph_util.tensor_shape_from_node_def_name`\n","259 ops no flops stats due to incomplete shapes.\n","Parsing Inputs...\n","Incomplete shape.\n","\n","=========================Options=============================\n","-max_depth                  10000\n","-min_bytes                  0\n","-min_peak_bytes             0\n","-min_residual_bytes         0\n","-min_output_bytes           0\n","-min_micros                 0\n","-min_accelerator_micros     0\n","-min_cpu_micros             0\n","-min_params                 0\n","-min_float_ops              0\n","-min_occurrence             0\n","-step                       -1\n","-order_by                   name\n","-account_type_regexes       _trainable_variables\n","-start_name_regexes         .*\n","-trim_name_regexes          .*BatchNorm.*\n","-show_name_regexes          .*\n","-hide_name_regexes          \n","-account_displayed_op_only  true\n","-select                     params\n","-output                     stdout:\n","\n","==================Model Analysis Report======================\n","Incomplete shape.\n","\n","Doc:\n","scope: The nodes in the model graph are organized by their names, which is hierarchical like filesystem.\n","param: Number of parameters (in the Variable).\n","\n","Profile:\n","node name | # parameters\n","_TFProfRoot (--/4.62m params)\n","  BoxPredictor_0 (--/15.58k params)\n","    BoxPredictor_0/BoxEncodingPredictor (--/6.92k params)\n","      BoxPredictor_0/BoxEncodingPredictor/biases (12, 12/12 params)\n","      BoxPredictor_0/BoxEncodingPredictor/weights (1x1x576x12, 6.91k/6.91k params)\n","    BoxPredictor_0/ClassPredictor (--/8.65k params)\n","      BoxPredictor_0/ClassPredictor/biases (15, 15/15 params)\n","      BoxPredictor_0/ClassPredictor/weights (1x1x576x15, 8.64k/8.64k params)\n","  BoxPredictor_1 (--/69.17k params)\n","    BoxPredictor_1/BoxEncodingPredictor (--/30.74k params)\n","      BoxPredictor_1/BoxEncodingPredictor/biases (24, 24/24 params)\n","      BoxPredictor_1/BoxEncodingPredictor/weights (1x1x1280x24, 30.72k/30.72k params)\n","    BoxPredictor_1/ClassPredictor (--/38.43k params)\n","      BoxPredictor_1/ClassPredictor/biases (30, 30/30 params)\n","      BoxPredictor_1/ClassPredictor/weights (1x1x1280x30, 38.40k/38.40k params)\n","  BoxPredictor_2 (--/27.70k params)\n","    BoxPredictor_2/BoxEncodingPredictor (--/12.31k params)\n","      BoxPredictor_2/BoxEncodingPredictor/biases (24, 24/24 params)\n","      BoxPredictor_2/BoxEncodingPredictor/weights (1x1x512x24, 12.29k/12.29k params)\n","    BoxPredictor_2/ClassPredictor (--/15.39k params)\n","      BoxPredictor_2/ClassPredictor/biases (30, 30/30 params)\n","      BoxPredictor_2/ClassPredictor/weights (1x1x512x30, 15.36k/15.36k params)\n","  BoxPredictor_3 (--/13.88k params)\n","    BoxPredictor_3/BoxEncodingPredictor (--/6.17k params)\n","      BoxPredictor_3/BoxEncodingPredictor/biases (24, 24/24 params)\n","      BoxPredictor_3/BoxEncodingPredictor/weights (1x1x256x24, 6.14k/6.14k params)\n","    BoxPredictor_3/ClassPredictor (--/7.71k params)\n","      BoxPredictor_3/ClassPredictor/biases (30, 30/30 params)\n","      BoxPredictor_3/ClassPredictor/weights (1x1x256x30, 7.68k/7.68k params)\n","  BoxPredictor_4 (--/13.88k params)\n","    BoxPredictor_4/BoxEncodingPredictor (--/6.17k params)\n","      BoxPredictor_4/BoxEncodingPredictor/biases (24, 24/24 params)\n","      BoxPredictor_4/BoxEncodingPredictor/weights (1x1x256x24, 6.14k/6.14k params)\n","    BoxPredictor_4/ClassPredictor (--/7.71k params)\n","      BoxPredictor_4/ClassPredictor/biases (30, 30/30 params)\n","      BoxPredictor_4/ClassPredictor/weights (1x1x256x30, 7.68k/7.68k params)\n","  BoxPredictor_5 (--/6.97k params)\n","    BoxPredictor_5/BoxEncodingPredictor (--/3.10k params)\n","      BoxPredictor_5/BoxEncodingPredictor/biases (24, 24/24 params)\n","      BoxPredictor_5/BoxEncodingPredictor/weights (1x1x128x24, 3.07k/3.07k params)\n","    BoxPredictor_5/ClassPredictor (--/3.87k params)\n","      BoxPredictor_5/ClassPredictor/biases (30, 30/30 params)\n","      BoxPredictor_5/ClassPredictor/weights (1x1x128x30, 3.84k/3.84k params)\n","  FeatureExtractor (--/4.48m params)\n","    FeatureExtractor/MobilenetV2 (--/4.48m params)\n","      FeatureExtractor/MobilenetV2/Conv (--/864 params)\n","        FeatureExtractor/MobilenetV2/Conv/BatchNorm (--/0 params)\n","        FeatureExtractor/MobilenetV2/Conv/weights (3x3x3x32, 864/864 params)\n","      FeatureExtractor/MobilenetV2/Conv_1 (--/409.60k params)\n","        FeatureExtractor/MobilenetV2/Conv_1/BatchNorm (--/0 params)\n","        FeatureExtractor/MobilenetV2/Conv_1/weights (1x1x320x1280, 409.60k/409.60k params)\n","      FeatureExtractor/MobilenetV2/expanded_conv (--/800 params)\n","        FeatureExtractor/MobilenetV2/expanded_conv/depthwise (--/288 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv/depthwise/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv/depthwise/depthwise_weights (3x3x32x1, 288/288 params)\n","        FeatureExtractor/MobilenetV2/expanded_conv/project (--/512 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv/project/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv/project/weights (1x1x32x16, 512/512 params)\n","      FeatureExtractor/MobilenetV2/expanded_conv_1 (--/4.70k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise (--/864 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/depthwise_weights (3x3x96x1, 864/864 params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_1/expand (--/1.54k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_1/expand/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_1/expand/weights (1x1x16x96, 1.54k/1.54k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_1/project (--/2.30k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_1/project/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_1/project/weights (1x1x96x24, 2.30k/2.30k params)\n","      FeatureExtractor/MobilenetV2/expanded_conv_10 (--/64.90k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise (--/3.46k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/depthwise_weights (3x3x384x1, 3.46k/3.46k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_10/expand (--/24.58k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_10/expand/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_10/expand/weights (1x1x64x384, 24.58k/24.58k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_10/project (--/36.86k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_10/project/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_10/project/weights (1x1x384x96, 36.86k/36.86k params)\n","      FeatureExtractor/MobilenetV2/expanded_conv_11 (--/115.78k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise (--/5.18k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/depthwise_weights (3x3x576x1, 5.18k/5.18k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_11/expand (--/55.30k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_11/expand/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_11/expand/weights (1x1x96x576, 55.30k/55.30k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_11/project (--/55.30k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_11/project/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_11/project/weights (1x1x576x96, 55.30k/55.30k params)\n","      FeatureExtractor/MobilenetV2/expanded_conv_12 (--/115.78k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise (--/5.18k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/depthwise_weights (3x3x576x1, 5.18k/5.18k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_12/expand (--/55.30k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_12/expand/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_12/expand/weights (1x1x96x576, 55.30k/55.30k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_12/project (--/55.30k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_12/project/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_12/project/weights (1x1x576x96, 55.30k/55.30k params)\n","      FeatureExtractor/MobilenetV2/expanded_conv_13 (--/152.64k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise (--/5.18k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/depthwise_weights (3x3x576x1, 5.18k/5.18k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_13/expand (--/55.30k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_13/expand/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_13/expand/weights (1x1x96x576, 55.30k/55.30k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_13/project (--/92.16k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_13/project/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_13/project/weights (1x1x576x160, 92.16k/92.16k params)\n","      FeatureExtractor/MobilenetV2/expanded_conv_14 (--/315.84k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise (--/8.64k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/depthwise_weights (3x3x960x1, 8.64k/8.64k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_14/expand (--/153.60k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_14/expand/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_14/expand/weights (1x1x160x960, 153.60k/153.60k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_14/project (--/153.60k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_14/project/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_14/project/weights (1x1x960x160, 153.60k/153.60k params)\n","      FeatureExtractor/MobilenetV2/expanded_conv_15 (--/315.84k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise (--/8.64k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/depthwise_weights (3x3x960x1, 8.64k/8.64k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_15/expand (--/153.60k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_15/expand/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_15/expand/weights (1x1x160x960, 153.60k/153.60k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_15/project (--/153.60k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_15/project/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_15/project/weights (1x1x960x160, 153.60k/153.60k params)\n","      FeatureExtractor/MobilenetV2/expanded_conv_16 (--/469.44k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise (--/8.64k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/depthwise_weights (3x3x960x1, 8.64k/8.64k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_16/expand (--/153.60k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_16/expand/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_16/expand/weights (1x1x160x960, 153.60k/153.60k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_16/project (--/307.20k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_16/project/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_16/project/weights (1x1x960x320, 307.20k/307.20k params)\n","      FeatureExtractor/MobilenetV2/expanded_conv_2 (--/8.21k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise (--/1.30k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/depthwise_weights (3x3x144x1, 1.30k/1.30k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_2/expand (--/3.46k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_2/expand/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_2/expand/weights (1x1x24x144, 3.46k/3.46k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_2/project (--/3.46k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_2/project/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_2/project/weights (1x1x144x24, 3.46k/3.46k params)\n","      FeatureExtractor/MobilenetV2/expanded_conv_3 (--/9.36k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise (--/1.30k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/depthwise_weights (3x3x144x1, 1.30k/1.30k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_3/expand (--/3.46k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_3/expand/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_3/expand/weights (1x1x24x144, 3.46k/3.46k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_3/project (--/4.61k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_3/project/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_3/project/weights (1x1x144x32, 4.61k/4.61k params)\n","      FeatureExtractor/MobilenetV2/expanded_conv_4 (--/14.02k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise (--/1.73k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/depthwise_weights (3x3x192x1, 1.73k/1.73k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_4/expand (--/6.14k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_4/expand/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_4/expand/weights (1x1x32x192, 6.14k/6.14k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_4/project (--/6.14k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_4/project/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_4/project/weights (1x1x192x32, 6.14k/6.14k params)\n","      FeatureExtractor/MobilenetV2/expanded_conv_5 (--/14.02k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise (--/1.73k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/depthwise_weights (3x3x192x1, 1.73k/1.73k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_5/expand (--/6.14k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_5/expand/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_5/expand/weights (1x1x32x192, 6.14k/6.14k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_5/project (--/6.14k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_5/project/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_5/project/weights (1x1x192x32, 6.14k/6.14k params)\n","      FeatureExtractor/MobilenetV2/expanded_conv_6 (--/20.16k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise (--/1.73k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/depthwise_weights (3x3x192x1, 1.73k/1.73k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_6/expand (--/6.14k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_6/expand/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_6/expand/weights (1x1x32x192, 6.14k/6.14k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_6/project (--/12.29k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_6/project/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_6/project/weights (1x1x192x64, 12.29k/12.29k params)\n","      FeatureExtractor/MobilenetV2/expanded_conv_7 (--/52.61k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise (--/3.46k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/depthwise_weights (3x3x384x1, 3.46k/3.46k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_7/expand (--/24.58k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_7/expand/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_7/expand/weights (1x1x64x384, 24.58k/24.58k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_7/project (--/24.58k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_7/project/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_7/project/weights (1x1x384x64, 24.58k/24.58k params)\n","      FeatureExtractor/MobilenetV2/expanded_conv_8 (--/52.61k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise (--/3.46k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/depthwise_weights (3x3x384x1, 3.46k/3.46k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_8/expand (--/24.58k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_8/expand/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_8/expand/weights (1x1x64x384, 24.58k/24.58k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_8/project (--/24.58k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_8/project/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_8/project/weights (1x1x384x64, 24.58k/24.58k params)\n","      FeatureExtractor/MobilenetV2/expanded_conv_9 (--/52.61k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise (--/3.46k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/depthwise_weights (3x3x384x1, 3.46k/3.46k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_9/expand (--/24.58k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_9/expand/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_9/expand/weights (1x1x64x384, 24.58k/24.58k params)\n","        FeatureExtractor/MobilenetV2/expanded_conv_9/project (--/24.58k params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_9/project/BatchNorm (--/0 params)\n","          FeatureExtractor/MobilenetV2/expanded_conv_9/project/weights (1x1x384x64, 24.58k/24.58k params)\n","      FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256 (--/327.68k params)\n","        FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/BatchNorm (--/0 params)\n","        FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/weights (1x1x1280x256, 327.68k/327.68k params)\n","      FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128 (--/65.54k params)\n","        FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/BatchNorm (--/0 params)\n","        FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/weights (1x1x512x128, 65.54k/65.54k params)\n","      FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128 (--/32.77k params)\n","        FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/BatchNorm (--/0 params)\n","        FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/weights (1x1x256x128, 32.77k/32.77k params)\n","      FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64 (--/16.38k params)\n","        FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/BatchNorm (--/0 params)\n","        FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/weights (1x1x256x64, 16.38k/16.38k params)\n","      FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512 (--/1.18m params)\n","        FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/BatchNorm (--/0 params)\n","        FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/weights (3x3x256x512, 1.18m/1.18m params)\n","      FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256 (--/294.91k params)\n","        FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/BatchNorm (--/0 params)\n","        FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/weights (3x3x128x256, 294.91k/294.91k params)\n","      FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256 (--/294.91k params)\n","        FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/BatchNorm (--/0 params)\n","        FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/weights (3x3x128x256, 294.91k/294.91k params)\n","      FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128 (--/73.73k params)\n","        FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/BatchNorm (--/0 params)\n","        FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/weights (3x3x64x128, 73.73k/73.73k params)\n","\n","======================End of Report==========================\n","259 ops no flops stats due to incomplete shapes.\n","Parsing Inputs...\n","Incomplete shape.\n","\n","=========================Options=============================\n","-max_depth                  10000\n","-min_bytes                  0\n","-min_peak_bytes             0\n","-min_residual_bytes         0\n","-min_output_bytes           0\n","-min_micros                 0\n","-min_accelerator_micros     0\n","-min_cpu_micros             0\n","-min_params                 0\n","-min_float_ops              1\n","-min_occurrence             0\n","-step                       -1\n","-order_by                   float_ops\n","-account_type_regexes       .*\n","-start_name_regexes         .*\n","-trim_name_regexes          .*BatchNorm.*,.*Initializer.*,.*Regularizer.*,.*BiasAdd.*\n","-show_name_regexes          .*\n","-hide_name_regexes          \n","-account_displayed_op_only  true\n","-select                     float_ops\n","-output                     stdout:\n","\n","==================Model Analysis Report======================\n","Incomplete shape.\n","\n","Doc:\n","scope: The nodes in the model graph are organized by their names, which is hierarchical like filesystem.\n","flops: Number of float operations. Note: Please read the implementation for the math behind it.\n","\n","Profile:\n","node name | # float_ops\n","_TFProfRoot (--/4.49m flops)\n","  FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/mul_fold (1.18m/1.18m flops)\n","  FeatureExtractor/MobilenetV2/Conv_1/mul_fold (409.60k/409.60k flops)\n","  FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/mul_fold (327.68k/327.68k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_16/project/mul_fold (307.20k/307.20k flops)\n","  FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/mul_fold (294.91k/294.91k flops)\n","  FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/mul_fold (294.91k/294.91k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_16/expand/mul_fold (153.60k/153.60k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_15/project/mul_fold (153.60k/153.60k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_15/expand/mul_fold (153.60k/153.60k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_14/project/mul_fold (153.60k/153.60k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_14/expand/mul_fold (153.60k/153.60k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_13/project/mul_fold (92.16k/92.16k flops)\n","  FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/mul_fold (73.73k/73.73k flops)\n","  FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/mul_fold (65.54k/65.54k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_11/expand/mul_fold (55.30k/55.30k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_12/expand/mul_fold (55.30k/55.30k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_12/project/mul_fold (55.30k/55.30k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_13/expand/mul_fold (55.30k/55.30k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_11/project/mul_fold (55.30k/55.30k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_10/project/mul_fold (36.86k/36.86k flops)\n","  FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/mul_fold (32.77k/32.77k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_10/expand/mul_fold (24.58k/24.58k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_9/project/mul_fold (24.58k/24.58k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_9/expand/mul_fold (24.58k/24.58k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_8/project/mul_fold (24.58k/24.58k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_8/expand/mul_fold (24.58k/24.58k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_7/project/mul_fold (24.58k/24.58k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_7/expand/mul_fold (24.58k/24.58k flops)\n","  FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/mul_fold (16.38k/16.38k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_6/project/mul_fold (12.29k/12.29k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/mul_fold (8.64k/8.64k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/mul_fold (8.64k/8.64k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/mul_fold (8.64k/8.64k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_5/expand/mul_fold (6.14k/6.14k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_6/expand/mul_fold (6.14k/6.14k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_5/project/mul_fold (6.14k/6.14k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_4/project/mul_fold (6.14k/6.14k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_4/expand/mul_fold (6.14k/6.14k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/mul_fold (5.18k/5.18k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/mul_fold (5.18k/5.18k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/mul_fold (5.18k/5.18k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_3/project/mul_fold (4.61k/4.61k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/mul_fold (3.46k/3.46k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_3/expand/mul_fold (3.46k/3.46k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/mul_fold (3.46k/3.46k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_2/project/mul_fold (3.46k/3.46k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_2/expand/mul_fold (3.46k/3.46k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/mul_fold (3.46k/3.46k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/mul_fold (3.46k/3.46k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_1/project/mul_fold (2.30k/2.30k flops)\n","  MultipleGridAnchorGenerator/sub (2.17k/2.17k flops)\n","  MultipleGridAnchorGenerator/mul_19 (2.17k/2.17k flops)\n","  MultipleGridAnchorGenerator/mul_20 (2.17k/2.17k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/mul_fold (1.73k/1.73k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/mul_fold (1.73k/1.73k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/mul_fold (1.73k/1.73k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_1/expand/mul_fold (1.54k/1.54k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/mul_fold (1.30k/1.30k flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/mul_fold (1.30k/1.30k flops)\n","  MultipleGridAnchorGenerator/sub_1 (1.20k/1.20k flops)\n","  MultipleGridAnchorGenerator/mul_28 (1.20k/1.20k flops)\n","  MultipleGridAnchorGenerator/mul_27 (1.20k/1.20k flops)\n","  MultipleGridAnchorGenerator/mul_21 (1.08k/1.08k flops)\n","  FeatureExtractor/MobilenetV2/Conv/mul_fold (864/864 flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/mul_fold (864/864 flops)\n","  MultipleGridAnchorGenerator/mul_29 (600/600 flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv/project/mul_fold (512/512 flops)\n","  MultipleGridAnchorGenerator/sub_2 (300/300 flops)\n","  MultipleGridAnchorGenerator/mul_36 (300/300 flops)\n","  MultipleGridAnchorGenerator/mul_35 (300/300 flops)\n","  FeatureExtractor/MobilenetV2/expanded_conv/depthwise/mul_fold (288/288 flops)\n","  MultipleGridAnchorGenerator/mul_37 (150/150 flops)\n","  MultipleGridAnchorGenerator/mul_44 (108/108 flops)\n","  MultipleGridAnchorGenerator/mul_43 (108/108 flops)\n","  MultipleGridAnchorGenerator/sub_3 (108/108 flops)\n","  MultipleGridAnchorGenerator/mul_45 (54/54 flops)\n","  MultipleGridAnchorGenerator/mul_51 (48/48 flops)\n","  MultipleGridAnchorGenerator/mul_52 (48/48 flops)\n","  MultipleGridAnchorGenerator/sub_4 (48/48 flops)\n","  MultipleGridAnchorGenerator/mul_53 (24/24 flops)\n","  MultipleGridAnchorGenerator/mul_18 (19/19 flops)\n","  MultipleGridAnchorGenerator/mul_17 (19/19 flops)\n","  MultipleGridAnchorGenerator/sub_5 (12/12 flops)\n","  MultipleGridAnchorGenerator/mul_60 (12/12 flops)\n","  MultipleGridAnchorGenerator/mul_59 (12/12 flops)\n","  MultipleGridAnchorGenerator/mul_25 (10/10 flops)\n","  MultipleGridAnchorGenerator/mul_26 (10/10 flops)\n","  MultipleGridAnchorGenerator/mul_61 (6/6 flops)\n","  MultipleGridAnchorGenerator/mul_46 (6/6 flops)\n","  MultipleGridAnchorGenerator/mul_47 (6/6 flops)\n","  MultipleGridAnchorGenerator/mul_48 (6/6 flops)\n","  MultipleGridAnchorGenerator/mul_54 (6/6 flops)\n","  MultipleGridAnchorGenerator/mul_55 (6/6 flops)\n","  MultipleGridAnchorGenerator/mul_56 (6/6 flops)\n","  MultipleGridAnchorGenerator/mul_22 (6/6 flops)\n","  MultipleGridAnchorGenerator/truediv_15 (6/6 flops)\n","  MultipleGridAnchorGenerator/truediv_16 (6/6 flops)\n","  MultipleGridAnchorGenerator/truediv_17 (6/6 flops)\n","  MultipleGridAnchorGenerator/truediv_18 (6/6 flops)\n","  MultipleGridAnchorGenerator/truediv_19 (6/6 flops)\n","  MultipleGridAnchorGenerator/mul_32 (6/6 flops)\n","  MultipleGridAnchorGenerator/mul_24 (6/6 flops)\n","  MultipleGridAnchorGenerator/mul_23 (6/6 flops)\n","  MultipleGridAnchorGenerator/mul_30 (6/6 flops)\n","  MultipleGridAnchorGenerator/mul_31 (6/6 flops)\n","  MultipleGridAnchorGenerator/mul_38 (6/6 flops)\n","  MultipleGridAnchorGenerator/mul_39 (6/6 flops)\n","  MultipleGridAnchorGenerator/mul_40 (6/6 flops)\n","  MultipleGridAnchorGenerator/mul_33 (5/5 flops)\n","  MultipleGridAnchorGenerator/mul_34 (5/5 flops)\n","  MultipleGridAnchorGenerator/mul_41 (3/3 flops)\n","  MultipleGridAnchorGenerator/mul_42 (3/3 flops)\n","  MultipleGridAnchorGenerator/truediv_14 (3/3 flops)\n","  MultipleGridAnchorGenerator/mul_15 (3/3 flops)\n","  MultipleGridAnchorGenerator/mul_14 (3/3 flops)\n","  MultipleGridAnchorGenerator/mul_16 (3/3 flops)\n","  MultipleGridAnchorGenerator/mul_49 (2/2 flops)\n","  MultipleGridAnchorGenerator/mul_50 (2/2 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/MultiClassNonMaxSuppression/SortByField_1/Equal (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/sub (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/MultiClassNonMaxSuppression/sub (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/Greater_9 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/Greater_8 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/Greater_7 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/Greater_6 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/Greater_5 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/MultiClassNonMaxSuppression/SortByField/Equal (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/Greater_4 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/Greater_3 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/Greater_2 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/Greater_1 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/Greater (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/MultiClassNonMaxSuppression/sub_7 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/MultiClassNonMaxSuppression/sub_6 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/sub_14 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/MultiClassNonMaxSuppression/sub_5 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/MultiClassNonMaxSuppression/sub_4 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/MultiClassNonMaxSuppression/sub_3 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/MultiClassNonMaxSuppression/sub_2 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/MultiClassNonMaxSuppression/sub_1 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/sub_2 (1/1 flops)\n","  Preprocessor/map/while/Less_1 (1/1 flops)\n","  Preprocessor/map/while/Less (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/ones/Less (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/sub_9 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/sub_8 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/sub_7 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/sub_6 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/sub_5 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/sub_4 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/sub_3 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/sub_1 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/sub_19 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/sub_18 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/sub_17 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/sub_16 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/sub_15 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/MultiClassNonMaxSuppression/ChangeCoordinateFrame/sub (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/sub_13 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/sub_12 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/sub_11 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/PadOrClipBoxList/sub_10 (1/1 flops)\n","  MultipleGridAnchorGenerator/mul_9 (1/1 flops)\n","  MultipleGridAnchorGenerator/truediv_13 (1/1 flops)\n","  MultipleGridAnchorGenerator/truediv_12 (1/1 flops)\n","  MultipleGridAnchorGenerator/truediv_11 (1/1 flops)\n","  MultipleGridAnchorGenerator/truediv_10 (1/1 flops)\n","  MultipleGridAnchorGenerator/truediv_1 (1/1 flops)\n","  MultipleGridAnchorGenerator/truediv (1/1 flops)\n","  MultipleGridAnchorGenerator/Minimum (1/1 flops)\n","  MultipleGridAnchorGenerator/assert_equal_1/Equal (1/1 flops)\n","  MultipleGridAnchorGenerator/mul (1/1 flops)\n","  MultipleGridAnchorGenerator/mul_1 (1/1 flops)\n","  MultipleGridAnchorGenerator/mul_3 (1/1 flops)\n","  MultipleGridAnchorGenerator/mul_8 (1/1 flops)\n","  MultipleGridAnchorGenerator/mul_7 (1/1 flops)\n","  MultipleGridAnchorGenerator/mul_6 (1/1 flops)\n","  MultipleGridAnchorGenerator/mul_58 (1/1 flops)\n","  MultipleGridAnchorGenerator/mul_57 (1/1 flops)\n","  MultipleGridAnchorGenerator/mul_13 (1/1 flops)\n","  MultipleGridAnchorGenerator/mul_4 (1/1 flops)\n","  MultipleGridAnchorGenerator/mul_5 (1/1 flops)\n","  MultipleGridAnchorGenerator/mul_10 (1/1 flops)\n","  MultipleGridAnchorGenerator/mul_11 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/Less (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/MultiClassNonMaxSuppression/Minimum_3 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/MultiClassNonMaxSuppression/Minimum_2 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/MultiClassNonMaxSuppression/Minimum_1 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/MultiClassNonMaxSuppression/Minimum (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/MultiClassNonMaxSuppression/Greater (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/MultiClassNonMaxSuppression/ChangeCoordinateFrame/truediv_1 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/MultiClassNonMaxSuppression/ChangeCoordinateFrame/truediv (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/MultiClassNonMaxSuppression/ChangeCoordinateFrame/sub_1 (1/1 flops)\n","  MultipleGridAnchorGenerator/mul_12 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/Less_1 (1/1 flops)\n","  Postprocessor/BatchMultiClassNonMaxSuppression/map/while/MultiClassNonMaxSuppression/Minimum_4 (1/1 flops)\n","  MultipleGridAnchorGenerator/truediv_9 (1/1 flops)\n","  MultipleGridAnchorGenerator/truediv_8 (1/1 flops)\n","  MultipleGridAnchorGenerator/truediv_7 (1/1 flops)\n","  MultipleGridAnchorGenerator/truediv_6 (1/1 flops)\n","  MultipleGridAnchorGenerator/truediv_5 (1/1 flops)\n","  MultipleGridAnchorGenerator/truediv_4 (1/1 flops)\n","  MultipleGridAnchorGenerator/truediv_3 (1/1 flops)\n","  MultipleGridAnchorGenerator/truediv_2 (1/1 flops)\n","  MultipleGridAnchorGenerator/mul_2 (1/1 flops)\n","\n","======================End of Report==========================\n","2020-06-13 02:17:58.235790: I tensorflow/stream_executor/platform/default/dso_loader.cc:44] Successfully opened dynamic library libcuda.so.1\n","2020-06-13 02:17:58.239788: E tensorflow/stream_executor/cuda/cuda_driver.cc:318] failed call to cuInit: CUDA_ERROR_NO_DEVICE: no CUDA-capable device is detected\n","2020-06-13 02:17:58.239853: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (40fa69cdd51a): /proc/driver/nvidia/version does not exist\n","2020-06-13 02:17:58.247587: I tensorflow/core/platform/profile_utils/cpu_utils.cc:94] CPU Frequency: 2300000000 Hz\n","2020-06-13 02:17:58.248155: I tensorflow/compiler/xla/service/service.cc:168] XLA service 0x1ec0bc0 initialized for platform Host (this does not guarantee that XLA will be used). Devices:\n","2020-06-13 02:17:58.248213: I tensorflow/compiler/xla/service/service.cc:176]   StreamExecutor device (0): Host, Default Version\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-5000\n","I0613 02:17:58.253843 139782415091584 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-5000\n","WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/python/tools/freeze_graph.py:127: checkpoint_exists (from tensorflow.python.training.checkpoint_management) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","Use standard file APIs to check for files with this prefix.\n","W0613 02:18:00.540854 139782415091584 deprecation.py:323] From /tensorflow-1.15.2/python3.6/tensorflow_core/python/tools/freeze_graph.py:127: checkpoint_exists (from tensorflow.python.training.checkpoint_management) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","Use standard file APIs to check for files with this prefix.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-5000\n","I0613 02:18:01.414689 139782415091584 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-5000\n","WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/python/tools/freeze_graph.py:233: convert_variables_to_constants (from tensorflow.python.framework.graph_util_impl) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","Use `tf.compat.v1.graph_util.convert_variables_to_constants`\n","W0613 02:18:02.658184 139782415091584 deprecation.py:323] From /tensorflow-1.15.2/python3.6/tensorflow_core/python/tools/freeze_graph.py:233: convert_variables_to_constants (from tensorflow.python.framework.graph_util_impl) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","Use `tf.compat.v1.graph_util.convert_variables_to_constants`\n","WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/python/framework/graph_util_impl.py:277: extract_sub_graph (from tensorflow.python.framework.graph_util_impl) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","Use `tf.compat.v1.graph_util.extract_sub_graph`\n","W0613 02:18:02.658519 139782415091584 deprecation.py:323] From /tensorflow-1.15.2/python3.6/tensorflow_core/python/framework/graph_util_impl.py:277: extract_sub_graph (from tensorflow.python.framework.graph_util_impl) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","Use `tf.compat.v1.graph_util.extract_sub_graph`\n","INFO:tensorflow:Froze 632 variables.\n","I0613 02:18:03.358210 139782415091584 graph_util_impl.py:334] Froze 632 variables.\n","INFO:tensorflow:Converted 632 variables to const ops.\n","I0613 02:18:03.503803 139782415091584 graph_util_impl.py:394] Converted 632 variables to const ops.\n","WARNING:tensorflow:From /content/gdrive/My Drive/capstone/models/research/object_detection/exporter.py:326: build_tensor_info (from tensorflow.python.saved_model.utils_impl) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","This function will only be available through the v1 compatibility library as tf.compat.v1.saved_model.utils.build_tensor_info or tf.compat.v1.saved_model.build_tensor_info.\n","W0613 02:18:04.662033 139782415091584 deprecation.py:323] From /content/gdrive/My Drive/capstone/models/research/object_detection/exporter.py:326: build_tensor_info (from tensorflow.python.saved_model.utils_impl) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","This function will only be available through the v1 compatibility library as tf.compat.v1.saved_model.utils.build_tensor_info or tf.compat.v1.saved_model.build_tensor_info.\n","INFO:tensorflow:No assets to save.\n","I0613 02:18:04.662898 139782415091584 builder_impl.py:640] No assets to save.\n","INFO:tensorflow:No assets to write.\n","I0613 02:18:04.663059 139782415091584 builder_impl.py:460] No assets to write.\n","INFO:tensorflow:SavedModel written to: /content/gdrive/My Drive/capstone/model/fine_tuned_model/saved_model/saved_model.pb\n","I0613 02:18:05.123261 139782415091584 builder_impl.py:425] SavedModel written to: /content/gdrive/My Drive/capstone/model/fine_tuned_model/saved_model/saved_model.pb\n","INFO:tensorflow:Writing pipeline config file to /content/gdrive/My Drive/capstone/model/fine_tuned_model/pipeline.config\n","I0613 02:18:05.167714 139782415091584 config_util.py:225] Writing pipeline config file to /content/gdrive/My Drive/capstone/model/fine_tuned_model/pipeline.config\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"uAyqVz9oDl7V","colab_type":"code","outputId":"02df9f57-6913-4448-81d8-15d011508c0c","executionInfo":{"status":"ok","timestamp":1592014759233,"user_tz":-540,"elapsed":14758,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":1000}},"source":["# https://medium.com/tensorflow/training-and-serving-a-realtime-mobile-object-detector-in-30-minutes-with-cloud-tpus-b78971cf1193\n","# create the tensorflow lite graph\n","!python /content/gdrive/My\\ Drive/capstone/models/research/object_detection/export_tflite_ssd_graph.py \\\n","    --pipeline_config_path='{pipeline_fname}' \\\n","    --trained_checkpoint_prefix='{last_model_path}' \\\n","    --output_directory='{output_directory}' \\\n","    --add_postprocessing_op=true"],"execution_count":0,"outputs":[{"output_type":"stream","text":["WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tf_slim/layers/layers.py:1089: Layer.apply (from tensorflow.python.keras.engine.base_layer) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","Please use `layer.__call__` method instead.\n","W0613 02:19:09.153008 139657997989760 deprecation.py:323] From /usr/local/lib/python3.6/dist-packages/tf_slim/layers/layers.py:1089: Layer.apply (from tensorflow.python.keras.engine.base_layer) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","Please use `layer.__call__` method instead.\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:19:11.486626 139657997989760 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:19:11.521879 139657997989760 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:19:11.555258 139657997989760 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:19:11.588026 139657997989760 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:19:11.620829 139657997989760 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","INFO:tensorflow:depth of additional conv before box predictor: 0\n","I0613 02:19:11.654421 139657997989760 convolutional_box_predictor.py:156] depth of additional conv before box predictor: 0\n","2020-06-13 02:19:11.703038: I tensorflow/stream_executor/platform/default/dso_loader.cc:44] Successfully opened dynamic library libcuda.so.1\n","2020-06-13 02:19:11.706466: E tensorflow/stream_executor/cuda/cuda_driver.cc:318] failed call to cuInit: CUDA_ERROR_NO_DEVICE: no CUDA-capable device is detected\n","2020-06-13 02:19:11.706505: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (40fa69cdd51a): /proc/driver/nvidia/version does not exist\n","2020-06-13 02:19:11.712222: I tensorflow/core/platform/profile_utils/cpu_utils.cc:94] CPU Frequency: 2300000000 Hz\n","2020-06-13 02:19:11.712539: I tensorflow/compiler/xla/service/service.cc:168] XLA service 0x280c680 initialized for platform Host (this does not guarantee that XLA will be used). Devices:\n","2020-06-13 02:19:11.712580: I tensorflow/compiler/xla/service/service.cc:176]   StreamExecutor device (0): Host, Default Version\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","I0613 02:19:13.504072 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","I0613 02:19:13.504499 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","I0613 02:19:13.504804 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","I0613 02:19:13.504992 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_1/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","I0613 02:19:13.505250 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","I0613 02:19:13.505452 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_2/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","I0613 02:19:13.505710 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","I0613 02:19:13.505883 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_3/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","I0613 02:19:13.506134 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","I0613 02:19:13.506309 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_4/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","I0613 02:19:13.506572 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","I0613 02:19:13.506789 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_5/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","I0613 02:19:13.507047 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","I0613 02:19:13.507224 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_6/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","I0613 02:19:13.507485 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","I0613 02:19:13.507656 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_7/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","I0613 02:19:13.507905 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","I0613 02:19:13.508084 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_8/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","I0613 02:19:13.508328 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","I0613 02:19:13.508515 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_9/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","I0613 02:19:13.508762 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","I0613 02:19:13.508932 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_10/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","I0613 02:19:13.509176 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","I0613 02:19:13.509343 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_11/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","I0613 02:19:13.509599 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","I0613 02:19:13.509768 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_12/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","I0613 02:19:13.510019 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","I0613 02:19:13.510184 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_13/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","I0613 02:19:13.510425 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","I0613 02:19:13.510611 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_14/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","I0613 02:19:13.510865 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","I0613 02:19:13.511042 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_15/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","I0613 02:19:13.511290 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/expand/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","I0613 02:19:13.511474 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/expanded_conv_16/depthwise/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","I0613 02:19:13.511785 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/Conv_1/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","I0613 02:19:13.512110 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_2_1x1_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","I0613 02:19:13.512421 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_2_3x3_s2_512/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","I0613 02:19:13.512741 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_3_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","I0613 02:19:13.513016 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_3_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","I0613 02:19:13.513213 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_4_1x1_128/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","I0613 02:19:13.513374 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_4_3x3_s2_256/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","I0613 02:19:13.513566 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_1_Conv2d_5_1x1_64/add_fold\n","INFO:tensorflow:Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","I0613 02:19:13.513747 139657997989760 quantize.py:299] Skipping quant after FeatureExtractor/MobilenetV2/layer_19_2_Conv2d_5_3x3_s2_128/add_fold\n","WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/python/tools/freeze_graph.py:127: checkpoint_exists (from tensorflow.python.training.checkpoint_management) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","Use standard file APIs to check for files with this prefix.\n","W0613 02:19:14.067324 139657997989760 deprecation.py:323] From /tensorflow-1.15.2/python3.6/tensorflow_core/python/tools/freeze_graph.py:127: checkpoint_exists (from tensorflow.python.training.checkpoint_management) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","Use standard file APIs to check for files with this prefix.\n","INFO:tensorflow:Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-5000\n","I0613 02:19:14.857852 139657997989760 saver.py:1284] Restoring parameters from /content/gdrive/My Drive/capstone/model/model.ckpt-5000\n","WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/python/tools/freeze_graph.py:233: convert_variables_to_constants (from tensorflow.python.framework.graph_util_impl) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","Use `tf.compat.v1.graph_util.convert_variables_to_constants`\n","W0613 02:19:15.910874 139657997989760 deprecation.py:323] From /tensorflow-1.15.2/python3.6/tensorflow_core/python/tools/freeze_graph.py:233: convert_variables_to_constants (from tensorflow.python.framework.graph_util_impl) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","Use `tf.compat.v1.graph_util.convert_variables_to_constants`\n","WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/python/framework/graph_util_impl.py:277: extract_sub_graph (from tensorflow.python.framework.graph_util_impl) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","Use `tf.compat.v1.graph_util.extract_sub_graph`\n","W0613 02:19:15.911130 139657997989760 deprecation.py:323] From /tensorflow-1.15.2/python3.6/tensorflow_core/python/framework/graph_util_impl.py:277: extract_sub_graph (from tensorflow.python.framework.graph_util_impl) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","Use `tf.compat.v1.graph_util.extract_sub_graph`\n","INFO:tensorflow:Froze 632 variables.\n","I0613 02:19:16.460760 139657997989760 graph_util_impl.py:334] Froze 632 variables.\n","INFO:tensorflow:Converted 632 variables to const ops.\n","I0613 02:19:16.551098 139657997989760 graph_util_impl.py:394] Converted 632 variables to const ops.\n","2020-06-13 02:19:16.686791: I tensorflow/tools/graph_transforms/transform_graph.cc:317] Applying strip_unused_nodes\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"nrvYKEv9Dw61","colab_type":"code","outputId":"cd1d801f-273c-450e-b508-a182e584f85a","executionInfo":{"status":"ok","timestamp":1592014777366,"user_tz":-540,"elapsed":10569,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":144}},"source":["!echo \"CONVERTING frozen graph to quantized TF Lite file...\"\n","!tflite_convert \\\n","  --output_file='{output_directory}/detection_quantized.tflite' \\\n","  --graph_def_file='{output_directory}/tflite_graph.pb' \\\n","  --inference_type=QUANTIZED_UINT8 \\\n","  --input_arrays='normalized_input_image_tensor' \\\n","  --output_arrays='TFLite_Detection_PostProcess,TFLite_Detection_PostProcess:1,TFLite_Detection_PostProcess:2,TFLite_Detection_PostProcess:3' \\\n","  --mean_values=128 \\\n","  --std_dev_values=128 \\\n","  --input_shapes=1,300,300,3 \\\n","  --change_concat_input_ranges=false \\\n","  --allow_nudging_weights_to_use_fast_gemm_kernel=true \\\n","  --allow_custom_ops"],"execution_count":0,"outputs":[{"output_type":"stream","text":["CONVERTING frozen graph to quantized TF Lite file...\n","2020-06-13 02:19:32.506161: I tensorflow/stream_executor/platform/default/dso_loader.cc:44] Successfully opened dynamic library libcuda.so.1\n","2020-06-13 02:19:32.508891: E tensorflow/stream_executor/cuda/cuda_driver.cc:318] failed call to cuInit: CUDA_ERROR_NO_DEVICE: no CUDA-capable device is detected\n","2020-06-13 02:19:32.508927: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (40fa69cdd51a): /proc/driver/nvidia/version does not exist\n","2020-06-13 02:19:32.514607: I tensorflow/core/platform/profile_utils/cpu_utils.cc:94] CPU Frequency: 2300000000 Hz\n","2020-06-13 02:19:32.514923: I tensorflow/compiler/xla/service/service.cc:168] XLA service 0x178aa00 initialized for platform Host (this does not guarantee that XLA will be used). Devices:\n","2020-06-13 02:19:32.514959: I tensorflow/compiler/xla/service/service.cc:176]   StreamExecutor device (0): Host, Default Version\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"iNByDwfBElHT","colab_type":"code","outputId":"c16fd40b-a96b-4fab-d095-16396b13a571","executionInfo":{"status":"ok","timestamp":1592062538567,"user_tz":-540,"elapsed":3823,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":252}},"source":["print(output_directory)\n","!ls -ltra '{output_directory}'\n","pb_fname = os.path.join(os.path.abspath(output_directory), \"frozen_inference_graph.pb\")  # this is tflite graph\n","!cp '{label_map_pbtxt_fname}' '{output_directory}'"],"execution_count":25,"outputs":[{"output_type":"stream","text":["/content/gdrive/My Drive/capstone/model/fine_tuned_model\n","total 115759\n","-rw------- 1 root root    23537 Jun 13 02:17 model.ckpt.index\n","-rw------- 1 root root 18791804 Jun 13 02:17 model.ckpt.data-00000-of-00001\n","-rw------- 1 root root       77 Jun 13 02:17 checkpoint\n","-rw------- 1 root root  2181897 Jun 13 02:18 model.ckpt.meta\n","-rw------- 1 root root 19652627 Jun 13 02:18 frozen_inference_graph.pb\n","drwx------ 2 root root     4096 Jun 13 02:18 saved_model\n","-rw------- 1 root root     4283 Jun 13 02:18 pipeline.config\n","-rw------- 1 root root 19277300 Jun 13 02:19 tflite_graph.pb\n","-rw------- 1 root root 53838313 Jun 13 02:19 tflite_graph.pbtxt\n","-rw------- 1 root root  4760576 Jun 13 02:19 detection_quantized.tflite\n","-rw------- 1 root root      159 Jun 13 15:34 label_map.pbtxt\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"wFgOqVyWRCYf","colab_type":"text"},"source":["## 2) 테스트 이미지로 테스트"]},{"cell_type":"code","metadata":{"id":"a82bkn-HE0zG","colab_type":"code","outputId":"2ac0ca31-447b-4fc4-ab18-b0dccfb189eb","executionInfo":{"status":"ok","timestamp":1592062541062,"user_tz":-540,"elapsed":1437,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":73}},"source":["import os\n","import glob\n","\n","# Path to frozen detection graph. This is the actual model that is used for the object detection.\n","PATH_TO_CKPT = pb_fname\n","print(PATH_TO_CKPT)\n","\n","# List of the strings that is used to add correct label for each box.\n","PATH_TO_LABELS = label_map_pbtxt_fname\n","\n","# 테스트 이미지 디렉토리\n","PATH_TO_TEST_IMAGES_DIR = '/content/gdrive/My Drive/capstone/data/images/test'\n","assert os.path.isfile(pb_fname)\n","assert os.path.isfile(PATH_TO_LABELS)\n","# 이미지 확장자에 따라 변경하기\n","TEST_IMAGE_PATHS = glob.glob(os.path.join(PATH_TO_TEST_IMAGES_DIR, \"*.jpg\"))\n","assert len(TEST_IMAGE_PATHS) > 0, 'No image found in `{}`.'.format(PATH_TO_TEST_IMAGES_DIR)\n","print(TEST_IMAGE_PATHS)"],"execution_count":26,"outputs":[{"output_type":"stream","text":["/content/gdrive/My Drive/capstone/model/fine_tuned_model/frozen_inference_graph.pb\n","['/content/gdrive/My Drive/capstone/data/images/test/unnamed.jpg', '/content/gdrive/My Drive/capstone/data/images/test/507039_267037_3257.jpg', '/content/gdrive/My Drive/capstone/data/images/test/thumb-1028419091_59437bc7_BEEEB8B0C0CCBAB8C8A3B1B8BFAAC7A5C1F6C6C7_600x300.jpg', '/content/gdrive/My Drive/capstone/data/images/test/SSI_20181116103136_V.jpg', '/content/gdrive/My Drive/capstone/data/images/test/NISI20190927_0000402757_web_20190927120306_20190927142607031 (1).jpg', '/content/gdrive/My Drive/capstone/data/images/test/1159958190.jpg', '/content/gdrive/My Drive/capstone/data/images/test/images (1).jpg', '/content/gdrive/My Drive/capstone/data/images/test/KakaoTalk_20200521_174227553_04.jpg', '/content/gdrive/My Drive/capstone/data/images/test/KakaoTalk_20200521_174227553_05.jpg', '/content/gdrive/My Drive/capstone/data/images/test/KakaoTalk_20200521_174227553_07.jpg', '/content/gdrive/My Drive/capstone/data/images/test/KakaoTalk_20200521_174227553_08.jpg', '/content/gdrive/My Drive/capstone/data/images/test/KakaoTalk_20200521_174227553_06.jpg', '/content/gdrive/My Drive/capstone/data/images/test/KakaoTalk_20200527_031424697_10.jpg', '/content/gdrive/My Drive/capstone/data/images/test/KakaoTalk_20200527_031424697_11.jpg', '/content/gdrive/My Drive/capstone/data/images/test/KakaoTalk_20200527_031424697_12.jpg', '/content/gdrive/My Drive/capstone/data/images/test/KakaoTalk_20200610_084334003.jpg', '/content/gdrive/My Drive/capstone/data/images/test/images(2).jpg', '/content/gdrive/My Drive/capstone/data/images/test/images(1).jpg', '/content/gdrive/My Drive/capstone/data/images/test/images.jpg']\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"MtCCfT7yFIfV","colab_type":"code","outputId":"58cbc4fc-ea0e-4eae-f7c2-c6e76f3017ab","executionInfo":{"status":"ok","timestamp":1592062545040,"user_tz":-540,"elapsed":2072,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":35}},"source":["%cd /content/gdrive/My Drive/capstone/models/research/object_detection\n","\n","import numpy as np\n","import os\n","import six.moves.urllib as urllib\n","import sys\n","import tarfile\n","import tensorflow as tf\n","import zipfile\n","\n","from collections import defaultdict\n","from io import StringIO\n","from matplotlib import pyplot as plt\n","from PIL import Image\n","\n","sys.path.append(\"..\")\n","from object_detection.utils import ops as utils_ops\n","\n","%matplotlib inline\n","\n","from object_detection.utils import label_map_util\n","from object_detection.utils import visualization_utils as vis_util\n","\n","detection_graph = tf.Graph()\n","with detection_graph.as_default():\n","    od_graph_def = tf.GraphDef()\n","    with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid:\n","        serialized_graph = fid.read()\n","        od_graph_def.ParseFromString(serialized_graph)\n","        tf.import_graph_def(od_graph_def, name='')\n","\n","label_map = label_map_util.load_labelmap(PATH_TO_LABELS)\n","categories = label_map_util.convert_label_map_to_categories(\n","    label_map, max_num_classes=num_classes, use_display_name=True)\n","category_index = label_map_util.create_category_index(categories)\n","\n","def load_image_into_numpy_array(image):\n","    (im_width, im_height) = image.size\n","    return np.array(image.getdata()).reshape(\n","        (im_height, im_width, 3)).astype(np.uint8)\n","\n","# Size, in inches, of the output images.\n","IMAGE_SIZE = (12, 8)\n","\n","def run_inference_for_single_image(image, graph):\n","    with graph.as_default():\n","        with tf.Session() as sess:\n","            # Get handles to input and output tensors\n","            ops = tf.get_default_graph().get_operations()\n","            all_tensor_names = {\n","                output.name for op in ops for output in op.outputs}\n","            tensor_dict = {}\n","            for key in [\n","                'num_detections', 'detection_boxes', 'detection_scores',\n","                'detection_classes', 'detection_masks'\n","            ]:\n","                tensor_name = key + ':0'\n","                if tensor_name in all_tensor_names:\n","                    tensor_dict[key] = tf.get_default_graph().get_tensor_by_name(\n","                        tensor_name)\n","            if 'detection_masks' in tensor_dict:\n","                # The following processing is only for single image\n","                detection_boxes = tf.squeeze(\n","                    tensor_dict['detection_boxes'], [0])\n","                detection_masks = tf.squeeze(\n","                    tensor_dict['detection_masks'], [0])\n","                # Reframe is required to translate mask from box coordinates to image coordinates and fit the image size.\n","                real_num_detection = tf.cast(\n","                    tensor_dict['num_detections'][0], tf.int32)\n","                detection_boxes = tf.slice(detection_boxes, [0, 0], [\n","                                           real_num_detection, -1])\n","                detection_masks = tf.slice(detection_masks, [0, 0, 0], [\n","                                           real_num_detection, -1, -1])\n","                detection_masks_reframed = utils_ops.reframe_box_masks_to_image_masks(\n","                    detection_masks, detection_boxes, image.shape[0], image.shape[1])\n","                detection_masks_reframed = tf.cast(\n","                    tf.greater(detection_masks_reframed, 0.5), tf.uint8)\n","                # Follow the convention by adding back the batch dimension\n","                tensor_dict['detection_masks'] = tf.expand_dims(\n","                    detection_masks_reframed, 0)\n","            image_tensor = tf.get_default_graph().get_tensor_by_name('image_tensor:0')\n","\n","            # Run inference\n","            output_dict = sess.run(tensor_dict,\n","                                   feed_dict={image_tensor: np.expand_dims(image, 0)})\n","\n","            # all outputs are float32 numpy arrays, so convert types as appropriate\n","            output_dict['num_detections'] = int(\n","                output_dict['num_detections'][0])\n","            output_dict['detection_classes'] = output_dict[\n","                'detection_classes'][0].astype(np.uint8)\n","            output_dict['detection_boxes'] = output_dict['detection_boxes'][0]\n","            output_dict['detection_scores'] = output_dict['detection_scores'][0]\n","            if 'detection_masks' in output_dict:\n","                output_dict['detection_masks'] = output_dict['detection_masks'][0]\n","    return output_dict"],"execution_count":27,"outputs":[{"output_type":"stream","text":["/content/gdrive/My Drive/capstone/models/research/object_detection\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"sJ9tS6vSFXUC","colab_type":"code","outputId":"779c4f5f-11c0-4949-9d07-83adfbf0fd66","executionInfo":{"status":"ok","timestamp":1592062702405,"user_tz":-540,"elapsed":154458,"user":{"displayName":"‍신아형[학생](소프트웨어융합대학 컴퓨터공학과)","photoUrl":"","userId":"02057930622140727302"}},"colab":{"base_uri":"https://localhost:8080/","height":1000,"output_embedded_package_id":"1LtiueD9JiTky7TMsQyJOaxaSlqrWkyiK"}},"source":["%matplotlib inline\n","from PIL import Image\n","\n","print('Running inferences on %s' % TEST_IMAGE_PATHS)\n","for image_path in TEST_IMAGE_PATHS:\n","    image = Image.open(image_path).convert('RGB')\n","    # the array based representation of the image will be used later in order to prepare the\n","    # result image with boxes and labels on it.\n","    image_np = load_image_into_numpy_array(image)\n","    # Expand dimensions since the model expects images to have shape: [1, None, None, 3]\n","    image_np_expanded = np.expand_dims(image_np, axis=0)\n","    # Actual detection.\n","    output_dict = run_inference_for_single_image(image_np, detection_graph)\n","    # Visualization of the results of a detection.\n","    vis_util.visualize_boxes_and_labels_on_image_array(\n","        image_np,\n","        output_dict['detection_boxes'],\n","        output_dict['detection_classes'],\n","        output_dict['detection_scores'],\n","        category_index,\n","        instance_masks=output_dict.get('detection_masks'),\n","        use_normalized_coordinates=True,\n","        line_thickness=2)\n","    plt.figure(figsize=IMAGE_SIZE)\n","    plt.imshow(image_np)\n","    \n","    lst=image_path.split('/')\n","    image_name=lst[-1]\n","    print(image_name)\n","    path='/content/gdrive/My Drive/capstone/result/'+image_name\n","    plt.savefig(path)"],"execution_count":28,"outputs":[{"output_type":"display_data","data":{"text/plain":"Output hidden; open in https://colab.research.google.com to view."},"metadata":{}}]}]}