Toggle navigation
Toggle navigation
This project
Loading...
Sign in
2020-1-capstone-design2
/
2016104167
Go to a project
Toggle navigation
Toggle navigation pinning
Projects
Groups
Snippets
Help
Project
Activity
Repository
Pipelines
Graphs
Issues
0
Merge Requests
0
Wiki
Snippets
Network
Create a new issue
Builds
Commits
Issue Boards
Authored by
조현아
2020-04-02 12:42:25 +0900
Browse Files
Options
Browse Files
Download
Email Patches
Plain Diff
Commit
9c71da6c079245fb02023763ac0d6ea5c1d6f839
9c71da6c
1 parent
75bcfcee
train max_step_6500
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
455 additions
and
6 deletions
code/FAA2/FAA2.ipynb
code/FAA2/utils.py
code/FAA2/FAA2.ipynb
0 → 100644
View file @
9c71da6
{
"nbformat"
:
4
,
"nbformat_minor"
:
0
,
"metadata"
:
{
"colab"
:
{
"name"
:
"FAA2.ipynb"
,
"provenance"
:
[],
"collapsed_sections"
:
[],
"toc_visible"
:
true
},
"kernelspec"
:
{
"name"
:
"python3"
,
"display_name"
:
"Python 3"
},
"accelerator"
:
"GPU"
},
"cells"
:
[
{
"cell_type"
:
"code"
,
"metadata"
:
{
"id"
:
"sWjZQ8LCWcZv"
,
"colab_type"
:
"code"
,
"outputId"
:
"3d4f5ec9-214c-4365-b43c-a3946f447631"
,
"colab"
:
{
"base_uri"
:
"https://localhost:8080/"
,
"height"
:
35
}
},
"source"
:
[
"from google.colab import drive
\n
"
,
"drive.mount('/content/drive')"
],
"execution_count"
:
0
,
"outputs"
:
[
{
"output_type"
:
"stream"
,
"text"
:
[
"Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount(
\"
/content/drive
\"
, force_remount=True).
\n
"
],
"name"
:
"stdout"
}
]
},
{
"cell_type"
:
"code"
,
"metadata"
:
{
"id"
:
"3arNqMB_Wgbx"
,
"colab_type"
:
"code"
,
"outputId"
:
"7f1de510-e87c-4a78-8f63-8349aeba3a8b"
,
"colab"
:
{
"base_uri"
:
"https://localhost:8080/"
,
"height"
:
35
}
},
"source"
:
[
"!git clone http://khuhub.khu.ac.kr/2020-1-capstone-design2/2016104167.git"
],
"execution_count"
:
0
,
"outputs"
:
[
{
"output_type"
:
"stream"
,
"text"
:
[
"fatal: destination path '2016104167' already exists and is not an empty directory.
\n
"
],
"name"
:
"stdout"
}
]
},
{
"cell_type"
:
"code"
,
"metadata"
:
{
"id"
:
"ISXM-edL-lGF"
,
"colab_type"
:
"code"
,
"outputId"
:
"b3d9b459-bdbf-4bcf-8c23-3ae0dd99a913"
,
"colab"
:
{
"base_uri"
:
"https://localhost:8080/"
,
"height"
:
35
}
},
"source"
:
[
"%cd '2016104167/code/FAA2/'"
],
"execution_count"
:
0
,
"outputs"
:
[
{
"output_type"
:
"stream"
,
"text"
:
[
"/content/2016104167/code/FAA2
\n
"
],
"name"
:
"stdout"
}
]
},
{
"cell_type"
:
"code"
,
"metadata"
:
{
"id"
:
"43zJwd05_Tst"
,
"colab_type"
:
"code"
,
"outputId"
:
"bb293b7c-5b79-4720-fff8-5bfe077b6694"
,
"colab"
:
{
"base_uri"
:
"https://localhost:8080/"
,
"height"
:
718
}
},
"source"
:
[
"!python -m pip install -r
\"
requirements.txt
\"
"
],
"execution_count"
:
0
,
"outputs"
:
[
{
"output_type"
:
"stream"
,
"text"
:
[
"Requirement already satisfied: future in /usr/local/lib/python3.6/dist-packages (from -r requirements.txt (line 1)) (0.16.0)
\n
"
,
"Requirement already satisfied: tb-nightly in /usr/local/lib/python3.6/dist-packages (from -r requirements.txt (line 2)) (2.3.0a20200331)
\n
"
,
"Requirement already satisfied: torchvision in /usr/local/lib/python3.6/dist-packages (from -r requirements.txt (line 3)) (0.5.0)
\n
"
,
"Requirement already satisfied: torch in /usr/local/lib/python3.6/dist-packages (from -r requirements.txt (line 4)) (1.4.0)
\n
"
,
"Requirement already satisfied: hyperopt in /usr/local/lib/python3.6/dist-packages (from -r requirements.txt (line 5)) (0.1.2)
\n
"
,
"Requirement already satisfied: pillow==6.2.1 in /usr/local/lib/python3.6/dist-packages (from -r requirements.txt (line 6)) (6.2.1)
\n
"
,
"Requirement already satisfied: natsort in /usr/local/lib/python3.6/dist-packages (from -r requirements.txt (line 7)) (5.5.0)
\n
"
,
"Requirement already satisfied: fire in /usr/local/lib/python3.6/dist-packages (from -r requirements.txt (line 8)) (0.3.0)
\n
"
,
"Requirement already satisfied: werkzeug>=0.11.15 in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (1.0.0)
\n
"
,
"Requirement already satisfied: numpy>=1.12.0 in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (1.18.2)
\n
"
,
"Requirement already satisfied: requests<3,>=2.21.0 in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (2.21.0)
\n
"
,
"Requirement already satisfied: setuptools>=41.0.0 in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (46.0.0)
\n
"
,
"Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (3.2.1)
\n
"
,
"Requirement already satisfied: protobuf>=3.6.0 in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (3.10.0)
\n
"
,
"Requirement already satisfied: google-auth-oauthlib<0.5,>=0.4.1 in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (0.4.1)
\n
"
,
"Requirement already satisfied: google-auth<2,>=1.6.3 in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (1.7.2)
\n
"
,
"Requirement already satisfied: grpcio>=1.24.3 in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (1.27.2)
\n
"
,
"Requirement already satisfied: wheel>=0.26; python_version >=
\"
3
\"
in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (0.34.2)
\n
"
,
"Requirement already satisfied: six>=1.10.0 in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (1.12.0)
\n
"
,
"Requirement already satisfied: tensorboard-plugin-wit>=1.6.0 in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (1.6.0.post2)
\n
"
,
"Requirement already satisfied: absl-py>=0.4 in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (0.9.0)
\n
"
,
"Requirement already satisfied: scipy in /usr/local/lib/python3.6/dist-packages (from hyperopt->-r requirements.txt (line 5)) (1.4.1)
\n
"
,
"Requirement already satisfied: pymongo in /usr/local/lib/python3.6/dist-packages (from hyperopt->-r requirements.txt (line 5)) (3.10.1)
\n
"
,
"Requirement already satisfied: networkx in /usr/local/lib/python3.6/dist-packages (from hyperopt->-r requirements.txt (line 5)) (2.4)
\n
"
,
"Requirement already satisfied: tqdm in /usr/local/lib/python3.6/dist-packages (from hyperopt->-r requirements.txt (line 5)) (4.38.0)
\n
"
,
"Requirement already satisfied: termcolor in /usr/local/lib/python3.6/dist-packages (from fire->-r requirements.txt (line 8)) (1.1.0)
\n
"
,
"Requirement already satisfied: chardet<3.1.0,>=3.0.2 in /usr/local/lib/python3.6/dist-packages (from requests<3,>=2.21.0->tb-nightly->-r requirements.txt (line 2)) (3.0.4)
\n
"
,
"Requirement already satisfied: urllib3<1.25,>=1.21.1 in /usr/local/lib/python3.6/dist-packages (from requests<3,>=2.21.0->tb-nightly->-r requirements.txt (line 2)) (1.24.3)
\n
"
,
"Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.6/dist-packages (from requests<3,>=2.21.0->tb-nightly->-r requirements.txt (line 2)) (2019.11.28)
\n
"
,
"Requirement already satisfied: idna<2.9,>=2.5 in /usr/local/lib/python3.6/dist-packages (from requests<3,>=2.21.0->tb-nightly->-r requirements.txt (line 2)) (2.8)
\n
"
,
"Requirement already satisfied: requests-oauthlib>=0.7.0 in /usr/local/lib/python3.6/dist-packages (from google-auth-oauthlib<0.5,>=0.4.1->tb-nightly->-r requirements.txt (line 2)) (1.3.0)
\n
"
,
"Requirement already satisfied: pyasn1-modules>=0.2.1 in /usr/local/lib/python3.6/dist-packages (from google-auth<2,>=1.6.3->tb-nightly->-r requirements.txt (line 2)) (0.2.8)
\n
"
,
"Requirement already satisfied: rsa<4.1,>=3.1.4 in /usr/local/lib/python3.6/dist-packages (from google-auth<2,>=1.6.3->tb-nightly->-r requirements.txt (line 2)) (4.0)
\n
"
,
"Requirement already satisfied: cachetools<3.2,>=2.0.0 in /usr/local/lib/python3.6/dist-packages (from google-auth<2,>=1.6.3->tb-nightly->-r requirements.txt (line 2)) (3.1.1)
\n
"
,
"Requirement already satisfied: decorator>=4.3.0 in /usr/local/lib/python3.6/dist-packages (from networkx->hyperopt->-r requirements.txt (line 5)) (4.4.2)
\n
"
,
"Requirement already satisfied: oauthlib>=3.0.0 in /usr/local/lib/python3.6/dist-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tb-nightly->-r requirements.txt (line 2)) (3.1.0)
\n
"
,
"Requirement already satisfied: pyasn1<0.5.0,>=0.4.6 in /usr/local/lib/python3.6/dist-packages (from pyasn1-modules>=0.2.1->google-auth<2,>=1.6.3->tb-nightly->-r requirements.txt (line 2)) (0.4.8)
\n
"
],
"name"
:
"stdout"
}
]
},
{
"cell_type"
:
"code"
,
"metadata"
:
{
"id"
:
"16kGbCYwfhYF"
,
"colab_type"
:
"code"
,
"colab"
:
{}
},
"source"
:
[
"# !pip3 install http://download.pytorch.org/whl/cu80/torch-0.3.0.post4-cp36-cp36m-linux_x86_64.whl
\n
"
,
"# !pip3 install torchvision"
],
"execution_count"
:
0
,
"outputs"
:
[]
},
{
"cell_type"
:
"code"
,
"metadata"
:
{
"id"
:
"hofwjBN3ZY_h"
,
"colab_type"
:
"code"
,
"colab"
:
{}
},
"source"
:
[
"use_cuda = True"
],
"execution_count"
:
0
,
"outputs"
:
[]
},
{
"cell_type"
:
"code"
,
"metadata"
:
{
"id"
:
"0h78dEdg_Jsg"
,
"colab_type"
:
"code"
,
"colab"
:
{}
},
"source"
:
[
"# try CIFAR10
\n
"
,
"#!python
\"
train.py
\"
--seed=24 --scale=3 --optimizer=sgd --fast_auto_augment=True --use_cuda=True --network=ResNet50"
],
"execution_count"
:
0
,
"outputs"
:
[]
},
{
"cell_type"
:
"code"
,
"metadata"
:
{
"id"
:
"nz8P9CpzES4L"
,
"colab_type"
:
"code"
,
"outputId"
:
"913ec5c8-4a66-45fd-8f76-a8367376c270"
,
"colab"
:
{
"base_uri"
:
"https://localhost:8080/"
,
"height"
:
1000
}
},
"source"
:
[
"# BraTS, grayResNet2
\n
"
,
"!python
\"
train.py
\"
--use_cuda=True --network=resnet50 --dataset=BraTS --optimizer=adam --fast_auto_augment=True"
],
"execution_count"
:
0
,
"outputs"
:
[
{
"output_type"
:
"stream"
,
"text"
:
[
"
\n
"
,
"[+] Parse arguments
\n
"
,
"Args(augment_path=None, batch_size=128, dataset='BraTS', fast_auto_augment=True, learning_rate=0.0001, max_step=10000, network='resnet50', num_workers=4, optimizer='adam', print_step=500, scheduler='exp', seed=None, start_step=0, use_cuda=True, val_step=500)
\n
"
,
"
\n
"
,
"[+] Create log dir
\n
"
,
"2020-04-01 05:45:32.118038: I tensorflow/stream_executor/platform/default/dso_loader.cc:44] Successfully opened dynamic library libcudart.so.10.1
\n
"
,
"
\n
"
,
"[+] Create network
\n
"
,
"BaseNet(
\n
"
,
" (first): Sequential(
\n
"
,
" (0): Conv2d(1, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)
\n
"
,
" )
\n
"
,
" (after): Sequential(
\n
"
,
" (0): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (1): ReLU(inplace=True)
\n
"
,
" (2): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)
\n
"
,
" (3): Sequential(
\n
"
,
" (0): Bottleneck(
\n
"
,
" (conv1): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
\n
"
,
" (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (relu): ReLU(inplace=True)
\n
"
,
" (downsample): Sequential(
\n
"
,
" (0): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" )
\n
"
,
" )
\n
"
,
" (1): Bottleneck(
\n
"
,
" (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
\n
"
,
" (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (relu): ReLU(inplace=True)
\n
"
,
" )
\n
"
,
" (2): Bottleneck(
\n
"
,
" (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
\n
"
,
" (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (relu): ReLU(inplace=True)
\n
"
,
" )
\n
"
,
" )
\n
"
,
" (4): Sequential(
\n
"
,
" (0): Bottleneck(
\n
"
,
" (conv1): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
\n
"
,
" (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (relu): ReLU(inplace=True)
\n
"
,
" (downsample): Sequential(
\n
"
,
" (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)
\n
"
,
" (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" )
\n
"
,
" )
\n
"
,
" (1): Bottleneck(
\n
"
,
" (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
\n
"
,
" (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (relu): ReLU(inplace=True)
\n
"
,
" )
\n
"
,
" (2): Bottleneck(
\n
"
,
" (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
\n
"
,
" (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (relu): ReLU(inplace=True)
\n
"
,
" )
\n
"
,
" (3): Bottleneck(
\n
"
,
" (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
\n
"
,
" (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (relu): ReLU(inplace=True)
\n
"
,
" )
\n
"
,
" )
\n
"
,
" (5): Sequential(
\n
"
,
" (0): Bottleneck(
\n
"
,
" (conv1): Conv2d(512, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
\n
"
,
" (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (relu): ReLU(inplace=True)
\n
"
,
" (downsample): Sequential(
\n
"
,
" (0): Conv2d(512, 1024, kernel_size=(1, 1), stride=(2, 2), bias=False)
\n
"
,
" (1): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" )
\n
"
,
" )
\n
"
,
" (1): Bottleneck(
\n
"
,
" (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
\n
"
,
" (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (relu): ReLU(inplace=True)
\n
"
,
" )
\n
"
,
" (2): Bottleneck(
\n
"
,
" (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
\n
"
,
" (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (relu): ReLU(inplace=True)
\n
"
,
" )
\n
"
,
" (3): Bottleneck(
\n
"
,
" (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
\n
"
,
" (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (relu): ReLU(inplace=True)
\n
"
,
" )
\n
"
,
" (4): Bottleneck(
\n
"
,
" (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
\n
"
,
" (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (relu): ReLU(inplace=True)
\n
"
,
" )
\n
"
,
" (5): Bottleneck(
\n
"
,
" (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
\n
"
,
" (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (relu): ReLU(inplace=True)
\n
"
,
" )
\n
"
,
" )
\n
"
,
" (6): Sequential(
\n
"
,
" (0): Bottleneck(
\n
"
,
" (conv1): Conv2d(1024, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
\n
"
,
" (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (relu): ReLU(inplace=True)
\n
"
,
" (downsample): Sequential(
\n
"
,
" (0): Conv2d(1024, 2048, kernel_size=(1, 1), stride=(2, 2), bias=False)
\n
"
,
" (1): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" )
\n
"
,
" )
\n
"
,
" (1): Bottleneck(
\n
"
,
" (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
\n
"
,
" (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (relu): ReLU(inplace=True)
\n
"
,
" )
\n
"
,
" (2): Bottleneck(
\n
"
,
" (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
\n
"
,
" (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)
\n
"
,
" (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
\n
"
,
" (relu): ReLU(inplace=True)
\n
"
,
" )
\n
"
,
" )
\n
"
,
" (7): AdaptiveAvgPool2d(output_size=(1, 1))
\n
"
,
" )
\n
"
,
" (fc): Linear(in_features=2048, out_features=1000, bias=True)
\n
"
,
")
\n
"
,
"
\n
"
,
"[+] Load dataset
\n
"
,
"[+] Child 0 training started (GPU: 0)
\n
"
,
"
\n
"
,
"[+] Training step: 0/10000
\t
Elapsed time: 0.24min
\t
Learning rate: 9.999283e-05
\t
Device name: Tesla P100-PCIE-16GB
\n
"
,
" Acc@1 : 0.000%
\n
"
,
" Acc@5 : 0.000%
\n
"
,
" Loss : 7.242412567138672
\n
"
,
"
\n
"
,
"[+] Training step: 500/10000
\t
Elapsed time: 9.44min
\t
Learning rate: 9.647145853624023e-05
\t
Device name: Tesla P100-PCIE-16GB
\n
"
,
" Acc@1 : 100.000%
\n
"
,
" Acc@5 : 100.000%
\n
"
,
" Loss : 0.00023103877902030945
\n
"
],
"name"
:
"stdout"
}
]
},
{
"cell_type"
:
"code"
,
"metadata"
:
{
"id"
:
"3iBnXLMsES7H"
,
"colab_type"
:
"code"
,
"colab"
:
{}
},
"source"
:
[
""
],
"execution_count"
:
0
,
"outputs"
:
[]
},
{
"cell_type"
:
"code"
,
"metadata"
:
{
"id"
:
"Wc8cguWUhp9l"
,
"colab_type"
:
"code"
,
"colab"
:
{}
},
"source"
:
[
""
],
"execution_count"
:
0
,
"outputs"
:
[]
}
]
}
\ No newline at end of file
code/FAA2/utils.py
View file @
9c71da6
...
...
@@ -104,20 +104,20 @@ def dict_to_namedtuple(d):
def
parse_args
(
kwargs
):
# combine with default args
kwargs
[
'dataset'
]
=
kwargs
[
'dataset'
]
if
'dataset'
in
kwargs
else
'
cifar10
'
kwargs
[
'network'
]
=
kwargs
[
'network'
]
if
'network'
in
kwargs
else
'resnet
_cifar1
0'
kwargs
[
'dataset'
]
=
kwargs
[
'dataset'
]
if
'dataset'
in
kwargs
else
'
BraTS
'
kwargs
[
'network'
]
=
kwargs
[
'network'
]
if
'network'
in
kwargs
else
'resnet
5
0'
kwargs
[
'optimizer'
]
=
kwargs
[
'optimizer'
]
if
'optimizer'
in
kwargs
else
'adam'
kwargs
[
'learning_rate'
]
=
kwargs
[
'learning_rate'
]
if
'learning_rate'
in
kwargs
else
0.1
kwargs
[
'learning_rate'
]
=
kwargs
[
'learning_rate'
]
if
'learning_rate'
in
kwargs
else
0.
000
1
kwargs
[
'seed'
]
=
kwargs
[
'seed'
]
if
'seed'
in
kwargs
else
None
kwargs
[
'use_cuda'
]
=
kwargs
[
'use_cuda'
]
if
'use_cuda'
in
kwargs
else
True
kwargs
[
'use_cuda'
]
=
kwargs
[
'use_cuda'
]
and
torch
.
cuda
.
is_available
()
kwargs
[
'num_workers'
]
=
kwargs
[
'num_workers'
]
if
'num_workers'
in
kwargs
else
4
kwargs
[
'print_step'
]
=
kwargs
[
'print_step'
]
if
'print_step'
in
kwargs
else
20
00
kwargs
[
'val_step'
]
=
kwargs
[
'val_step'
]
if
'val_step'
in
kwargs
else
20
00
kwargs
[
'print_step'
]
=
kwargs
[
'print_step'
]
if
'print_step'
in
kwargs
else
5
00
kwargs
[
'val_step'
]
=
kwargs
[
'val_step'
]
if
'val_step'
in
kwargs
else
5
00
kwargs
[
'scheduler'
]
=
kwargs
[
'scheduler'
]
if
'scheduler'
in
kwargs
else
'exp'
kwargs
[
'batch_size'
]
=
kwargs
[
'batch_size'
]
if
'batch_size'
in
kwargs
else
128
kwargs
[
'start_step'
]
=
kwargs
[
'start_step'
]
if
'start_step'
in
kwargs
else
0
kwargs
[
'max_step'
]
=
kwargs
[
'max_step'
]
if
'max_step'
in
kwargs
else
6
40
00
kwargs
[
'max_step'
]
=
kwargs
[
'max_step'
]
if
'max_step'
in
kwargs
else
6
5
00
kwargs
[
'fast_auto_augment'
]
=
kwargs
[
'fast_auto_augment'
]
if
'fast_auto_augment'
in
kwargs
else
False
kwargs
[
'augment_path'
]
=
kwargs
[
'augment_path'
]
if
'augment_path'
in
kwargs
else
None
...
...
Please
register
or
login
to post a comment