In [3]:
import torch
import torch.nn as nn
import torch.optim as optim
from torch.optim import lr_scheduler
from torch.autograd import Variable
import numpy as np
import torchvision
from torchvision import datasets, models, transforms
import matplotlib.pyplot as plt
import time
import os
import copy

plt.ion()
  • Transfer Learning을 이용해 Network를 학습하는 방법에 작성된 글입니다
  • 충분한 크기의 데이터 세트를 갖긴 힘들기 때문에 잘 학습된 네트워크를 Pretrain한 후 사용하는 경우가 많음

  • convnet finetuning : 무작위 초기화 대신 imagenet 1000 데이터셋에서 학습한 네트워크를 사용해 네트워크 초기화하고 훈련은 동일하게 진행

  • 고정된 Feature 추출 : 최종 Fully Connected Layer를 제외한 모든 네트워크의 가중치를 고정. 마지막 레이어는 새 레이어로 대체되고 이 레이어만 학습
  • ants와 bees를 분류하는 모델을 만들려고 합니다. 각 클래스별 120개의 이미지가 있고, 75개의 validation 데이터가 있습니다. 굉장히 작은 데이터지만 transfer learning을 통해 합리적으로 만들어보겠습니다

In [6]:
data_transforms = {
    'train': transforms.Compose([
        transforms.RandomSizedCrop(224),
        transforms.RandomHorizontalFlip(),
        transforms.ToTensor(),
        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
    ]),
    'val': transforms.Compose([
        transforms.Scale(256),
        transforms.CenterCrop(224),
        transforms.ToTensor(),
        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
    ]),
}


/usr/local/lib/python3.6/site-packages/torchvision/transforms/transforms.py:397: UserWarning: The use of the transforms.RandomSizedCrop transform is deprecated, please use transforms.RandomResizedCrop instead.
  "please use transforms.RandomResizedCrop instead.")
/usr/local/lib/python3.6/site-packages/torchvision/transforms/transforms.py:156: UserWarning: The use of the transforms.Scale transform is deprecated, please use transforms.Resize instead.
  "please use transforms.Resize instead.")

In [7]:
data_dir = 'hymenoptera_data'
image_datasets = {x: datasets.ImageFolder(os.path.join(data_dir, x),
                                          data_transforms[x])
                  for x in ['train', 'val']}
dataloaders = {x: torch.utils.data.DataLoader(image_datasets[x], batch_size=4,
                                             shuffle=True, num_workers=4)
              for x in ['train', 'val']}
dataset_sizes = {x: len(image_datasets[x]) for x in ['train', 'val']}
class_names = image_datasets['train'].classes

use_gpu = torch.cuda.is_available()

시각화


In [8]:
def imshow(inp, title=None):
    """Imshow for Tensor."""
    inp = inp.numpy().transpose((1, 2, 0)) # 데이터를 numpy 객체로 바꾼 후 Transpose
    mean = np.array([0.485, 0.456, 0.406])
    std = np.array([0.229, 0.224, 0.225])
    inp = std * inp + mean
    inp = np.clip(inp, 0, 1)
    plt.imshow(inp)
    if title is not None:
        plt.title(title)
    plt.pause(0.001)  # pause a bit so that plots are updated

In [9]:
# Get a batch of training data
# dataloaders의 객체는 iterable함
inputs, classes = next(iter(dataloaders['train']))

In [12]:
# Make a grid from batch
'''
torchvision.utils.make_grid(tensor, nrow=8, padding=2, normalize=False, range=None, scale_each=False, pad_value=0)
이미지의 그리드를 만드는 함수

Args:
    tensor (Tensor or list): 4D mini-batch Tensor of shape (B x C x H x W)
        or a list of images all of the same size.
    nrow (int, optional): Number of images displayed in each row of the grid.
        The Final grid size is (B / nrow, nrow). Default is 8.
    padding (int, optional): amount of padding. Default is 2.
    normalize (bool, optional): If True, shift the image to the range (0, 1),
        by subtracting the minimum and dividing by the maximum pixel value.
    range (tuple, optional): tuple (min, max) where min and max are numbers,
        then these numbers are used to normalize the image. By default, min and max
        are computed from the tensor.
    scale_each (bool, optional): If True, scale each image in the batch of
        images separately rather than the (min, max) over all images.
    pad_value (float, optional): Value for the padded pixels.
'''
out = torchvision.utils.make_grid(inputs)

imshow(out, title=[class_names[x] for x in classes])



In [13]:
class_names


Out[13]:
['ants', 'bees']

In [14]:
classes


Out[14]:
 1
 0
 0
 1
[torch.LongTensor of size 4]
  • learning rate를 스케쥴링
  • 가장 좋은 Model을 저장

In [34]:
def train_model(model, criterion, optimizer, scheduler, num_epochs=25):
    since = time.time()
    
    best_model_wts = copy.deepcopy(model.state_dict())
    best_acc = 0.0
    
    for epoch in range(num_epochs):
        print('Epoch {}/{}'.format(epoch, num_epochs-1))
        print('-'*10)
        
        for phase in ['train', 'val']:
            if phase == 'train':
                scheduler.step()
                model.train(True) # train
            else:
                model.eval()
                # model.train(False) # eval
            running_loss = 0.0
            running_corrects = 0
        
            for data in dataloaders[phase]:
                inputs, labels = data
                
                inputs, labels = Variable(inputs), Variable(labels)
                optimizer.zero_grad()
                
                outputs = model(inputs)
                _, preds = torch.max(outputs.data, 1) #argmax
                loss = criterion(outputs, labels)
                
                if phase == 'train':
                    loss.backward()
                    optimizer.step()
                    
                running_loss += loss.data[0] * inputs.size(0)
                running_corrects += torch.sum(preds == labels.data)
                
            epoch_loss = running_loss / dataset_sizes[phase]
            epoch_acc = running_corrects / dataset_sizes[phase]
            
            print('{} Loss: {:.4f} Acc: {:.4f}'.format(phase, epoch_loss, epoch_acc))
            
            if phase == 'val' and epoch_acc > best_acc:
                best_acc = epoch_acc
                best_model_wts = copy.deepcopy(model.state_dict())
    
    time_elapsed = time.time() - since
    print('Training complete in {:.0f}m {:.0f}s'.format(
        time_elapsed // 60, time_elapsed % 60))
    print('Best val Acc: {:4f}'.format(best_acc))

    # load best model weights
    model.load_state_dict(best_model_wts)
    return model

In [18]:
def visualize_model(model, num_images=6):
    images_so_far = 0
    fig = plt.figure()
    
    for i, data in enmerate(dataloaders['val']):
        inputs, labels = data
        inputs, labels = Variable(inputs), Variable(labels)
        
        outputs = model(inputs)
        _, preds = torch.max(outputs.data, 1)
        
        for j in range(inputs.size()[0]):
            image_so_far += 1
            ax = plt.subplot(num_images//2, 2, images_so_far)
            ax.axis('off')
            ax.set_title('predicted: {}'.format(class_names[preds[j]]))
            imshow(inputs.cpu().data[j])

            if images_so_far == num_images:
                return

FineTuning


In [19]:
model_ft = models.resnet18(pretrained=True)


Downloading: "https://download.pytorch.org/models/resnet18-5c106cde.pth" to /Users/kyle/.torch/models/resnet18-5c106cde.pth
100%|██████████| 46827520/46827520 [00:04<00:00, 10896080.35it/s]

In [20]:
model_ft


Out[20]:
ResNet(
  (conv1): Conv2d (3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)
  (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True)
  (relu): ReLU(inplace)
  (maxpool): MaxPool2d(kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1))
  (layer1): Sequential(
    (0): BasicBlock(
      (conv1): Conv2d (64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True)
      (relu): ReLU(inplace)
      (conv2): Conv2d (64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True)
    )
    (1): BasicBlock(
      (conv1): Conv2d (64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True)
      (relu): ReLU(inplace)
      (conv2): Conv2d (64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True)
    )
  )
  (layer2): Sequential(
    (0): BasicBlock(
      (conv1): Conv2d (64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True)
      (relu): ReLU(inplace)
      (conv2): Conv2d (128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True)
      (downsample): Sequential(
        (0): Conv2d (64, 128, kernel_size=(1, 1), stride=(2, 2), bias=False)
        (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True)
      )
    )
    (1): BasicBlock(
      (conv1): Conv2d (128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True)
      (relu): ReLU(inplace)
      (conv2): Conv2d (128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True)
    )
  )
  (layer3): Sequential(
    (0): BasicBlock(
      (conv1): Conv2d (128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True)
      (relu): ReLU(inplace)
      (conv2): Conv2d (256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True)
      (downsample): Sequential(
        (0): Conv2d (128, 256, kernel_size=(1, 1), stride=(2, 2), bias=False)
        (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True)
      )
    )
    (1): BasicBlock(
      (conv1): Conv2d (256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True)
      (relu): ReLU(inplace)
      (conv2): Conv2d (256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True)
    )
  )
  (layer4): Sequential(
    (0): BasicBlock(
      (conv1): Conv2d (256, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True)
      (relu): ReLU(inplace)
      (conv2): Conv2d (512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True)
      (downsample): Sequential(
        (0): Conv2d (256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)
        (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True)
      )
    )
    (1): BasicBlock(
      (conv1): Conv2d (512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True)
      (relu): ReLU(inplace)
      (conv2): Conv2d (512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True)
    )
  )
  (avgpool): AvgPool2d(kernel_size=7, stride=1, padding=0, ceil_mode=False, count_include_pad=True)
  (fc): Linear(in_features=512, out_features=1000)
)

In [22]:
type(model_ft)


Out[22]:
torchvision.models.resnet.ResNet

In [25]:
dir(model_ft)


Out[25]:
['__call__',
 '__class__',
 '__delattr__',
 '__dict__',
 '__dir__',
 '__doc__',
 '__eq__',
 '__format__',
 '__ge__',
 '__getattr__',
 '__getattribute__',
 '__gt__',
 '__hash__',
 '__init__',
 '__init_subclass__',
 '__le__',
 '__lt__',
 '__module__',
 '__ne__',
 '__new__',
 '__reduce__',
 '__reduce_ex__',
 '__repr__',
 '__setattr__',
 '__setstate__',
 '__sizeof__',
 '__str__',
 '__subclasshook__',
 '__weakref__',
 '_all_buffers',
 '_apply',
 '_backend',
 '_backward_hooks',
 '_buffers',
 '_forward_hooks',
 '_forward_pre_hooks',
 '_make_layer',
 '_modules',
 '_parameters',
 'add_module',
 'apply',
 'avgpool',
 'bn1',
 'children',
 'conv1',
 'cpu',
 'cuda',
 'double',
 'dump_patches',
 'eval',
 'fc',
 'float',
 'forward',
 'half',
 'inplanes',
 'layer1',
 'layer2',
 'layer3',
 'layer4',
 'load_state_dict',
 'maxpool',
 'modules',
 'named_children',
 'named_modules',
 'named_parameters',
 'parameters',
 'register_backward_hook',
 'register_buffer',
 'register_forward_hook',
 'register_forward_pre_hook',
 'register_parameter',
 'relu',
 'share_memory',
 'state_dict',
 'train',
 'training',
 'type',
 'zero_grad']

In [24]:
num_ftrs = model_ft.fc.in_features

In [26]:
num_ftrs


Out[26]:
512

In [27]:
model_ft.fc = nn.Linear(num_ftrs, 2)

In [28]:
if use_gpu:
    model_ft = model_ft.cuda()

In [29]:
model_ft.parameters()


Out[29]:
<generator object Module.parameters at 0x1060bea98>

In [30]:
[i for i in model_ft.parameters()]


Out[30]:
[Parameter containing:
 (0 ,0 ,.,.) = 
  -1.0419e-02 -6.1356e-03 -1.8098e-03  ...   5.6615e-02  1.7083e-02 -1.2694e-02
   1.1083e-02  9.5276e-03 -1.0993e-01  ...  -2.7124e-01 -1.2907e-01  3.7424e-03
  -6.9434e-03  5.9089e-02  2.9548e-01  ...   5.1972e-01  2.5632e-01  6.3573e-02
                  ...                   ⋱                   ...                
  -2.7535e-02  1.6045e-02  7.2595e-02  ...  -3.3285e-01 -4.2058e-01 -2.5781e-01
   3.0613e-02  4.0960e-02  6.2850e-02  ...   4.1384e-01  3.9359e-01  1.6606e-01
  -1.3736e-02 -3.6746e-03 -2.4084e-02  ...  -1.5070e-01 -8.2230e-02 -5.7828e-03
 
 (0 ,1 ,.,.) = 
  -1.1397e-02 -2.6619e-02 -3.4641e-02  ...   3.2521e-02  6.6221e-04 -2.5743e-02
   4.5687e-02  3.3603e-02 -1.0453e-01  ...  -3.1253e-01 -1.6051e-01 -1.2826e-03
  -8.3730e-04  9.8420e-02  4.0210e-01  ...   7.0789e-01  3.6887e-01  1.2455e-01
                  ...                   ⋱                   ...                
  -5.5926e-02 -5.2239e-03  2.7081e-02  ...  -4.6178e-01 -5.7080e-01 -3.6552e-01
   3.2860e-02  5.5574e-02  9.9670e-02  ...   5.4636e-01  4.8276e-01  1.9867e-01
   5.3051e-03  6.6938e-03 -1.7254e-02  ...  -1.4822e-01 -7.7248e-02  7.2183e-04
 
 (0 ,2 ,.,.) = 
  -2.0315e-03 -9.1617e-03  2.1209e-02  ...   8.9177e-02  3.3655e-02 -2.0102e-02
   1.5398e-02 -1.8648e-02 -1.2591e-01  ...  -2.5342e-01 -1.2980e-01 -2.7975e-02
   9.8454e-03  4.9047e-02  2.1699e-01  ...   3.4872e-01  1.0433e-01  1.8413e-02
                  ...                   ⋱                   ...                
  -2.8356e-02  1.8404e-02  9.8647e-02  ...  -1.1740e-01 -2.5760e-01 -1.5451e-01
   2.0766e-02 -2.6286e-03 -3.7825e-02  ...   2.4141e-01  2.4345e-01  1.1796e-01
   7.4684e-04  7.7677e-04 -1.0050e-02  ...  -1.4865e-01 -1.1754e-01 -3.8350e-02
      ⋮ 
 
 (1 ,0 ,.,.) = 
  -4.4154e-03 -4.0645e-03  3.1589e-03  ...  -3.7026e-02 -2.5158e-02 -4.7945e-02
   5.1310e-02  5.3402e-02  8.0436e-02  ...   1.4480e-01  1.4287e-01  1.2312e-01
  -7.3337e-03  2.1755e-03  3.7580e-02  ...   6.1517e-02  8.0324e-02  1.1715e-01
                  ...                   ⋱                   ...                
  -2.6754e-02 -1.2297e-01 -1.3653e-01  ...  -1.4068e-01 -1.1155e-01 -4.9556e-02
   2.3524e-02 -1.7288e-02 -1.1122e-02  ...  -1.8826e-02 -2.3320e-02 -2.9474e-02
   2.8689e-02  2.1659e-02  4.7888e-02  ...   2.5498e-02  3.5346e-02  1.1280e-02
 
 (1 ,1 ,.,.) = 
   4.6919e-04  1.2153e-02  4.2035e-02  ...   4.6403e-02  4.0423e-02 -1.4439e-02
   4.3463e-02  6.8779e-02  1.3268e-01  ...   2.8606e-01  2.6905e-01  2.0935e-01
  -5.7621e-02 -2.2642e-02  3.0547e-02  ...   1.3763e-01  1.6538e-01  1.7946e-01
                  ...                   ⋱                   ...                
  -1.0816e-01 -2.5227e-01 -2.9742e-01  ...  -2.8503e-01 -2.1493e-01 -1.0320e-01
   4.0709e-02 -3.2771e-02 -6.3450e-02  ...  -9.2360e-02 -6.9876e-02 -4.9841e-02
   8.2942e-02  8.7580e-02  1.0111e-01  ...   5.2714e-02  6.0968e-02  4.1198e-02
 
 (1 ,2 ,.,.) = 
  -1.6391e-02 -1.3870e-02  5.2810e-03  ...   4.3698e-02  2.2707e-02 -4.5983e-02
   3.3202e-02  4.2014e-02  9.3500e-02  ...   2.6162e-01  2.2970e-01  1.6694e-01
  -4.5987e-02 -1.6365e-02  2.6811e-02  ...   1.4951e-01  1.3216e-01  1.3579e-01
                  ...                   ⋱                   ...                
  -7.2129e-02 -1.8902e-01 -2.3389e-01  ...  -1.9038e-01 -1.5609e-01 -7.5974e-02
   5.1161e-02 -2.5815e-02 -6.9357e-02  ...  -5.8999e-02 -6.1550e-02 -4.4555e-02
   1.1174e-01  7.8979e-02  6.5849e-02  ...   3.1617e-02  2.5221e-02  7.4257e-03
      ⋮ 
 
 (2 ,0 ,.,.) = 
  -7.0826e-08 -6.4306e-08 -7.3806e-08  ...  -9.8000e-08 -1.0905e-07 -8.3421e-08
  -6.1125e-09  2.0613e-09 -8.0922e-09  ...  -4.9840e-08 -4.3836e-08 -3.0538e-09
   7.1953e-08  7.5616e-08  5.9282e-08  ...  -9.7509e-09 -1.0951e-09  4.2442e-08
                  ...                   ⋱                   ...                
   9.5889e-08  1.0039e-07  7.9817e-08  ...  -1.7491e-08 -4.7666e-08 -1.3265e-08
   1.2904e-07  1.4762e-07  1.7477e-07  ...   1.3233e-07  1.0628e-07  9.3316e-08
   1.2558e-07  1.3644e-07  1.8431e-07  ...   2.1399e-07  1.7710e-07  1.7166e-07
 
 (2 ,1 ,.,.) = 
  -1.2690e-07 -9.6139e-08 -1.0372e-07  ...  -1.1808e-07 -1.3309e-07 -1.0820e-07
  -5.7412e-08 -2.5055e-08 -3.0115e-08  ...  -7.2922e-08 -6.7022e-08 -2.2574e-08
   2.1813e-08  4.8608e-08  3.1222e-08  ...  -1.8694e-08 -7.9591e-09  3.9750e-08
                  ...                   ⋱                   ...                
   5.6013e-08  7.5526e-08  4.4496e-08  ...  -4.4128e-08 -5.9930e-08 -1.8247e-08
   7.7614e-08  9.8348e-08  1.0455e-07  ...   6.3272e-08  4.1781e-08  4.5901e-08
   5.9834e-08  7.1006e-08  9.0437e-08  ...   1.1654e-07  8.7550e-08  9.8837e-08
 
 (2 ,2 ,.,.) = 
  -4.3810e-08  1.3270e-08  7.8275e-09  ...  -5.8804e-09 -2.6217e-08 -1.5649e-08
   4.1700e-08  1.0778e-07  1.0946e-07  ...   7.6403e-08  7.1450e-08  9.7615e-08
   1.0436e-07  1.6586e-07  1.5933e-07  ...   1.3517e-07  1.3487e-07  1.6449e-07
                  ...                   ⋱                   ...                
   9.8763e-08  1.5072e-07  1.2547e-07  ...   6.8316e-08  6.8382e-08  1.1367e-07
   9.1435e-08  1.3576e-07  1.3793e-07  ...   1.1678e-07  1.1723e-07  1.4394e-07
   6.2183e-08  8.8184e-08  1.0456e-07  ...   1.3941e-07  1.3333e-07  1.5844e-07
 ...   
      ⋮ 
 
 (61,0 ,.,.) = 
  -6.1896e-02 -3.0206e-02  1.9225e-02  ...   4.3665e-02 -2.2114e-02 -4.2214e-02
  -3.8061e-02  6.0774e-03  4.5797e-02  ...   9.6029e-02  5.9254e-02  2.9958e-02
  -2.9672e-02  2.7766e-03  2.0457e-02  ...   5.9828e-02  4.1422e-02  2.3134e-02
                  ...                   ⋱                   ...                
   1.1916e-02  4.5701e-02  4.4892e-02  ...   4.7419e-02  2.2274e-02 -5.4993e-03
  -3.2468e-02 -1.2210e-02  2.2023e-02  ...   5.8061e-02 -7.5033e-03 -5.9736e-02
  -4.3314e-02 -2.8162e-02 -5.9126e-03  ...   8.8460e-02  8.4406e-03 -5.0019e-02
 
 (61,1 ,.,.) = 
  -6.1292e-02 -1.4004e-02  1.7229e-02  ...   1.8349e-02 -3.2708e-02 -4.1060e-02
  -3.1506e-02  2.4460e-02  4.5516e-02  ...   6.6806e-02  4.6687e-02  3.3248e-02
  -3.2216e-02  2.0718e-02  2.3343e-02  ...   3.5265e-02  3.6478e-02  3.1291e-02
                  ...                   ⋱                   ...                
   1.7739e-02  6.1040e-02  4.8247e-02  ...   3.7785e-02  2.8894e-02  1.3984e-02
  -1.0890e-02  2.2079e-02  4.2737e-02  ...   6.0247e-02  1.6197e-02 -1.2493e-02
  -2.2284e-02  1.3220e-02  3.0897e-02  ...   1.0403e-01  4.0119e-02 -5.3310e-03
 
 (61,2 ,.,.) = 
  -8.5322e-02 -4.2603e-02  6.8145e-03  ...   3.0751e-02 -3.4818e-02 -4.9945e-02
  -2.9215e-02  1.8165e-02  5.1092e-02  ...   9.0200e-02  5.3438e-02  4.0169e-02
  -3.9932e-02 -1.1100e-03  9.6176e-03  ...   2.4114e-02  2.6298e-02  2.5489e-02
                  ...                   ⋱                   ...                
  -3.1890e-03  3.0454e-02  1.6316e-02  ...   5.5054e-03 -6.2689e-03 -8.4638e-03
  -2.2995e-02 -2.8211e-03  2.3203e-02  ...   3.5888e-02 -1.4296e-02 -3.2419e-02
  -9.8894e-03  7.0542e-03  1.0659e-02  ...   7.0495e-02  1.2996e-02 -8.3417e-03
      ⋮ 
 
 (62,0 ,.,.) = 
  -7.8699e-03  1.9911e-02  3.4208e-02  ...   2.8694e-02  1.2820e-02  1.8142e-02
   8.7942e-03 -3.2875e-02 -3.5713e-02  ...   7.2533e-02  4.5889e-02  5.2383e-02
  -3.6122e-02 -1.1878e-01 -1.3767e-01  ...   3.3811e-02  3.7806e-02  2.6944e-02
                  ...                   ⋱                   ...                
   1.7322e-02  3.9589e-03 -8.2269e-03  ...   2.7543e-03  1.8313e-02  1.6057e-02
  -9.5007e-04  1.6428e-02  1.7156e-02  ...   3.3672e-03  2.2857e-02  6.5783e-04
   6.1727e-03  2.7145e-02  1.4340e-02  ...   7.5867e-03  1.8770e-02  1.5624e-02
 
 (62,1 ,.,.) = 
  -1.3423e-02 -5.0696e-04  8.0959e-03  ...  -6.0963e-03  9.2341e-03  1.5751e-02
  -1.8343e-02 -6.7982e-02 -7.0685e-02  ...   2.9855e-02  2.6264e-02  2.3773e-02
  -5.4359e-02 -1.4663e-01 -1.6211e-01  ...   1.1781e-02  3.2477e-02  1.1980e-02
                  ...                   ⋱                   ...                
   8.3686e-04 -1.7564e-02 -1.9535e-02  ...  -4.1382e-03  2.4658e-02  1.2893e-02
  -6.3183e-04  1.1788e-02  2.4810e-02  ...   6.1105e-03  3.9210e-02  9.6696e-03
  -7.1831e-03  6.6918e-03  5.2723e-03  ...  -7.6077e-03  2.7253e-02  1.7735e-02
 
 (62,2 ,.,.) = 
  -2.3753e-04 -4.9343e-03  2.2991e-03  ...  -4.7958e-02 -2.6154e-02 -2.3525e-02
  -3.3053e-04 -5.1502e-02 -5.9977e-02  ...  -1.7369e-02 -2.3337e-02 -3.7312e-02
  -2.2674e-02 -9.9412e-02 -1.1176e-01  ...  -1.1725e-02 -8.3744e-03 -4.0615e-02
                  ...                   ⋱                   ...                
   1.1437e-02 -8.0313e-03 -1.4955e-03  ...  -3.4133e-02 -8.7267e-03 -2.3526e-02
   2.9522e-03  6.7770e-04  1.9933e-02  ...  -2.2002e-02  1.4814e-02 -1.4487e-02
  -1.9085e-02 -2.9430e-02 -2.3284e-02  ...  -4.8587e-02 -1.3049e-02 -2.4368e-02
      ⋮ 
 
 (63,0 ,.,.) = 
  -3.6296e-02  7.1996e-03  1.9100e-02  ...   1.9602e-02  1.4870e-02 -1.7298e-02
  -1.1061e-02  8.5665e-02  1.2667e-01  ...   1.3744e-02 -5.5036e-05 -3.0162e-02
   1.1322e-01  1.8634e-01  5.0658e-02  ...  -1.7333e-01 -7.2041e-02 -6.2474e-02
                  ...                   ⋱                   ...                
  -5.3062e-02 -2.5781e-01 -2.6747e-01  ...   2.6781e-01  1.4344e-01  5.5145e-02
  -2.1009e-02 -2.9969e-02  1.0245e-01  ...   2.0843e-01 -4.1518e-03 -3.8118e-02
  -2.2155e-02  1.2380e-02  8.4302e-02  ...  -4.4992e-02 -1.4687e-01 -9.0890e-02
 
 (63,1 ,.,.) = 
  -5.3969e-03  3.2799e-02  1.5486e-02  ...  -7.7451e-03  3.0229e-03  1.1216e-03
   6.1723e-02  1.4899e-01  1.4645e-01  ...  -2.8897e-02 -2.0227e-02 -9.1878e-03
   1.6146e-01  2.0886e-01 -2.5589e-02  ...  -2.7278e-01 -1.0735e-01 -6.2971e-02
                  ...                   ⋱                   ...                
  -1.3723e-01 -4.0863e-01 -3.8551e-01  ...   4.0846e-01  2.6202e-01  1.3491e-01
  -5.9388e-02 -6.1187e-02  1.4197e-01  ...   3.5780e-01  9.0893e-02 -1.7392e-03
   7.8613e-03  5.8403e-02  1.5339e-01  ...   4.7045e-02 -1.0095e-01 -9.7920e-02
 
 (63,2 ,.,.) = 
  -5.6799e-03  1.3425e-02 -2.6461e-02  ...   4.4881e-03  2.0666e-03  1.3902e-02
   6.5943e-03  4.5181e-02  6.0260e-02  ...   1.4368e-02 -5.0725e-03  4.0505e-03
   5.5257e-02  1.2397e-01  4.3193e-02  ...  -1.4486e-01 -7.4489e-02 -5.7533e-02
                  ...                   ⋱                   ...                
  -3.1513e-02 -1.6334e-01 -1.5795e-01  ...   2.2904e-01  1.2017e-01  7.1998e-02
  -1.0456e-02 -1.1248e-03  8.4582e-02  ...   1.5748e-01  2.2142e-02 -1.0083e-02
  -4.8639e-03 -5.0065e-03  3.6341e-02  ...  -2.4361e-02 -7.1195e-02 -6.6788e-02
 [torch.FloatTensor of size 64x3x7x7], Parameter containing:
  2.3487e-01
  2.6626e-01
 -5.1096e-08
  5.1870e-01
  3.4404e-09
  2.2239e-01
  4.2289e-01
  1.3153e-07
  2.5093e-01
  1.5152e-06
  3.1687e-01
  2.5049e-01
  3.7893e-01
  1.0862e-05
  2.7526e-01
  2.3674e-01
  2.4202e-01
  3.9531e-01
  4.6935e-01
  2.9090e-01
  2.7268e-01
  2.7803e-01
  2.9069e-01
  2.0693e-01
  2.5899e-01
  2.7871e-01
  2.9115e-01
  3.1601e-01
  3.8889e-01
  3.0411e-01
  2.6776e-01
  2.1093e-01
  2.8708e-01
  3.3243e-01
  4.2673e-01
  3.7326e-01
  7.4804e-08
  1.9068e-01
  1.4740e-08
  2.2303e-01
  1.7908e-01
  2.4860e-01
  2.7400e-01
  2.5923e-01
  2.9420e-01
  2.9924e-01
  2.2369e-01
  2.6280e-01
  2.2001e-08
  2.6610e-01
  2.2089e-01
  2.8429e-01
  3.3072e-01
  2.2681e-01
  3.6538e-01
  2.1230e-01
  2.3965e-01
  2.4950e-01
  5.2583e-01
  2.4825e-01
  2.9565e-01
  2.5878e-01
  4.8326e-01
  2.6670e-01
 [torch.FloatTensor of size 64], Parameter containing:
  2.3072e-01
  2.5382e-01
 -1.0543e-06
 -6.6439e-01
 -1.6571e-08
  1.6152e-01
  4.5450e-01
 -4.3020e-07
  3.0051e-01
 -8.0052e-06
  3.4942e-01
  3.1148e-01
 -2.4953e-01
 -3.4749e-05
  1.0773e-01
  2.1897e-01
  3.8141e-01
 -5.2988e-01
 -6.2864e-01
  5.7140e-01
  2.9985e-01
  5.8430e-01
  4.8202e-01
  3.2853e-01
  1.9672e-01
  1.9496e-01
  1.5215e-01
  8.5522e-02
  5.1314e-01
  1.5237e-02
  1.6644e-01
  3.3239e-01
  2.4921e-01
  4.4337e-01
 -2.8017e-01
 -2.0385e-02
 -2.4507e-07
  3.2134e-01
 -4.9152e-08
  2.3777e-01
  2.3291e-01
  3.1527e-01
  4.2776e-01
  2.9313e-01
  2.6379e-01
  6.7598e-01
  4.2910e-01
  3.4566e-01
 -8.6909e-08
  2.4729e-01
  3.0316e-01
  6.1577e-01
  3.9835e-01
  3.3207e-01
 -4.1219e-01
  3.7807e-01
  1.7895e-01
  2.5748e-01
 -4.4908e-01
  2.1306e-01
  5.6934e-01
  5.7274e-01
 -4.0238e-01
  2.3406e-01
 [torch.FloatTensor of size 64], Parameter containing:
 (0 ,0 ,.,.) = 
   5.7593e-02 -9.5114e-02 -2.0272e-02
  -7.4556e-02 -7.9931e-01 -2.1284e-01
   6.5571e-02 -9.6534e-02 -1.2111e-02
 
 (0 ,1 ,.,.) = 
  -6.9944e-03  1.4266e-02  5.5824e-04
   4.1238e-02 -1.6125e-01 -2.3208e-02
   3.2887e-03  7.1779e-03  7.1686e-02
 
 (0 ,2 ,.,.) = 
  -2.3627e-09 -3.9270e-08 -3.2971e-08
   2.1737e-08  8.3299e-09  1.2543e-08
   1.1382e-08  8.8096e-09  1.5506e-08
    ...
 
 (0 ,61,.,.) = 
  -3.6921e-02  1.8294e-02 -2.9358e-02
  -9.8615e-02 -4.3645e-02 -5.2717e-02
  -7.9635e-02  2.9396e-02  4.1479e-03
 
 (0 ,62,.,.) = 
   1.6948e-02  1.3978e-02  9.6727e-03
   1.4297e-02 -6.6985e-04 -2.2077e-02
   1.2398e-02  3.5454e-02 -2.2320e-02
 
 (0 ,63,.,.) = 
  -2.2600e-02 -2.5331e-02 -2.3548e-02
   6.0860e-02 -9.6779e-02  2.4057e-02
  -1.2750e-02  9.2237e-02  4.0152e-03
      ⋮ 
 
 (1 ,0 ,.,.) = 
   2.2160e-02  4.2177e-02 -1.6428e-02
  -2.9667e-02  5.6865e-02  2.5486e-02
   4.3847e-03  5.1188e-02  1.0436e-02
 
 (1 ,1 ,.,.) = 
   2.5342e-02  5.4374e-02  5.3888e-02
  -2.8334e-02 -2.0139e-01 -5.6358e-02
   5.6774e-02  7.4188e-02  2.1585e-02
 
 (1 ,2 ,.,.) = 
  -3.1458e-08  3.5335e-08  5.3791e-08
  -2.6896e-08  5.1530e-08  5.4480e-08
  -3.8487e-08 -1.1234e-08 -7.5787e-09
    ...
 
 (1 ,61,.,.) = 
  -1.2754e-01  4.3552e-02 -6.5607e-02
  -6.0462e-02  1.5989e-01 -7.7070e-03
  -9.4202e-02  5.0750e-02 -7.8154e-02
 
 (1 ,62,.,.) = 
  -3.3309e-02  1.6631e-03 -8.8497e-03
   1.5553e-02 -5.8277e-02 -2.7437e-02
   1.3126e-02 -3.0268e-02 -2.1661e-03
 
 (1 ,63,.,.) = 
  -4.2313e-03  3.4517e-02  3.8193e-03
   5.4317e-02 -1.2457e-02  3.2900e-02
   2.2000e-04  1.6040e-02  1.2764e-01
      ⋮ 
 
 (2 ,0 ,.,.) = 
  -3.5247e-02  8.0748e-03  2.0353e-02
   1.7344e-02 -2.4320e-02 -1.5511e-04
  -2.7634e-04  2.8024e-02 -2.3777e-03
 
 (2 ,1 ,.,.) = 
  -2.3741e-02 -3.2057e-03 -5.7059e-03
  -1.1582e-02  1.7200e-03  2.1067e-02
   4.3606e-03 -4.6459e-02 -7.2954e-02
 
 (2 ,2 ,.,.) = 
   3.1002e-08  5.3568e-08  3.1873e-08
  -1.6063e-08 -1.8072e-08 -1.9508e-09
  -5.8339e-08 -4.5366e-08 -1.2395e-08
    ...
 
 (2 ,61,.,.) = 
  -1.9689e-03 -2.6809e-02 -4.3760e-02
   2.4518e-02 -2.8396e-02 -3.5896e-02
  -1.7883e-04 -2.4661e-02 -2.0085e-02
 
 (2 ,62,.,.) = 
   2.1551e-02  2.2789e-03 -2.5823e-02
   2.3272e-02 -7.9333e-03 -2.0814e-03
  -5.7062e-03 -2.6934e-02 -1.4421e-02
 
 (2 ,63,.,.) = 
  -1.9674e-02  2.7914e-02 -2.0025e-02
   6.3222e-02 -3.9077e-02 -3.3220e-03
  -2.7434e-02  1.1390e-02 -3.1608e-03
 ...   
      ⋮ 
 
 (61,0 ,.,.) = 
   4.3440e-03 -7.6970e-03 -6.4950e-02
   1.3846e-02 -2.2803e-02 -4.6478e-02
   2.7776e-02  1.6080e-02 -1.3363e-02
 
 (61,1 ,.,.) = 
   4.7379e-02 -2.4982e-02 -2.7605e-02
   7.0091e-02  4.2084e-03 -1.0805e-01
   1.7526e-02  4.5647e-02  7.8810e-03
 
 (61,2 ,.,.) = 
   2.6680e-09  2.7671e-08  2.4702e-08
   6.3905e-09  4.1020e-08  3.3631e-08
   5.8335e-09  1.3334e-08  9.6604e-09
    ...
 
 (61,61,.,.) = 
   4.5900e-03  4.7084e-02 -8.6949e-03
  -6.3011e-03  5.9585e-02  5.8667e-03
  -2.0255e-02  4.3285e-02  4.5094e-03
 
 (61,62,.,.) = 
   1.1253e-03 -5.7461e-03 -6.8411e-03
   6.0616e-03  7.3295e-03 -1.1784e-02
  -1.1455e-03  5.1868e-03 -1.9867e-02
 
 (61,63,.,.) = 
   1.7529e-02  4.4606e-02 -2.6595e-02
   2.2102e-02  4.5857e-02  2.3347e-02
   1.8052e-02  5.9689e-02  1.7129e-02
      ⋮ 
 
 (62,0 ,.,.) = 
  -2.9112e-02  3.4242e-03 -1.7523e-02
  -2.3682e-02  2.2716e-02 -3.8301e-02
  -1.0308e-02 -4.3802e-03 -2.3582e-02
 
 (62,1 ,.,.) = 
  -4.9607e-02 -3.2724e-03 -1.5345e-02
  -1.3524e-02  5.4842e-02  1.1187e-02
  -2.3549e-02 -2.8495e-02 -6.6371e-02
 
 (62,2 ,.,.) = 
  -4.9804e-08 -2.8211e-08 -2.0583e-08
  -5.2389e-08 -2.8522e-08 -3.5099e-08
  -3.2171e-08 -3.4110e-08 -4.3153e-08
    ...
 
 (62,61,.,.) = 
   3.4487e-03  2.6532e-02 -1.1202e-02
   7.0925e-03  3.7903e-02 -3.2481e-02
   4.1381e-02  3.2329e-02  2.8309e-03
 
 (62,62,.,.) = 
  -6.5955e-03  1.6476e-02  2.1810e-02
  -1.2293e-02  2.2310e-02  1.2645e-02
  -8.9897e-03  1.1948e-03 -5.2390e-03
 
 (62,63,.,.) = 
  -2.5295e-03  7.2689e-02 -7.8046e-03
  -4.2221e-02  7.9756e-02 -2.7738e-02
   4.6716e-03 -5.6596e-02 -8.2261e-02
      ⋮ 
 
 (63,0 ,.,.) = 
   5.2235e-02  3.5231e-03 -3.3131e-02
   3.1048e-02  1.6193e-02  1.7283e-02
   1.4446e-02  2.4302e-02 -1.9689e-03
 
 (63,1 ,.,.) = 
  -2.4717e-02  8.3009e-03 -6.1336e-02
  -1.6134e-02  5.5323e-02 -6.5029e-02
  -2.4715e-02  1.0030e-03  3.2437e-02
 
 (63,2 ,.,.) = 
   1.8496e-08  5.2798e-09  4.1820e-08
   3.7489e-08  2.5450e-08  3.0419e-08
   1.1246e-08 -5.6956e-09 -2.0008e-08
    ...
 
 (63,61,.,.) = 
   7.1194e-03 -4.1052e-02 -1.0002e-02
   2.5924e-02 -6.3819e-02  1.3366e-02
   2.9751e-02 -7.9476e-03  1.4007e-02
 
 (63,62,.,.) = 
  -2.5166e-03  2.2051e-02 -1.9967e-02
  -5.9436e-02  4.3872e-02  2.6832e-02
  -1.7509e-02  2.4625e-02  2.4822e-02
 
 (63,63,.,.) = 
   3.5832e-02 -7.0357e-02  3.9452e-03
  -2.9835e-02  9.2727e-02  1.9336e-02
  -2.9145e-02 -9.7087e-03 -7.3388e-02
 [torch.FloatTensor of size 64x64x3x3], Parameter containing:
  0.3090
  0.2147
  0.2366
  0.4259
  0.5137
  0.2181
  0.2204
  0.2300
  0.2640
  0.2695
  0.2138
  0.4602
  0.2661
  0.2319
  0.3900
  0.2389
  0.2660
  0.3634
  0.3474
  0.2477
  0.3285
  0.5349
  0.6440
  0.2275
  0.4482
  0.3078
  0.2604
  0.4651
  0.2179
  0.2858
  0.3426
  0.4420
  0.4450
  0.4500
  0.5516
  0.5092
  0.2564
  0.2634
  0.5664
  0.6410
  0.2228
  0.1986
  0.2460
  0.2242
  0.2143
  0.1982
  0.6368
  0.3106
  0.5049
  0.2403
  0.3065
  0.3760
  0.3794
  0.4281
  0.2991
  0.3326
  0.2596
  0.3345
  0.2006
  0.4351
  0.1683
  0.5149
  0.2629
  0.3254
 [torch.FloatTensor of size 64], Parameter containing:
  0.1657
  0.2420
  0.1780
 -0.0431
 -0.2053
  0.1598
  0.2929
  0.0912
  0.1116
  0.0884
  0.1104
 -0.2035
  0.1539
  0.0857
 -0.1094
  0.0654
  0.0766
 -0.2067
 -0.0212
  0.1396
  0.0401
 -0.2827
 -0.3257
 -0.0035
 -0.4373
 -0.1248
  0.1282
 -0.0874
  0.1199
 -0.0829
 -0.5315
 -0.0780
 -0.3876
 -0.0547
 -0.1816
 -0.1888
  0.1320
  0.0031
 -0.2697
 -0.2984
  0.1394
  0.2597
  0.1372
  0.0053
  0.0132
  0.3295
 -0.2715
 -0.0187
 -0.2467
  0.1579
  0.0165
 -0.0890
 -0.1903
 -0.0787
  0.1700
 -0.4832
  0.0619
 -0.0677
  0.3125
 -0.5064
  0.3138
 -0.2617
 -0.1545
  0.0063
 [torch.FloatTensor of size 64], Parameter containing:
 (0 ,0 ,.,.) = 
   2.5947e-02 -1.0458e-01 -4.7712e-03
  -8.6223e-02 -3.3021e-01 -1.0275e-01
  -5.7426e-02 -1.9074e-01 -5.4646e-02
 
 (0 ,1 ,.,.) = 
  -1.6951e-02  2.1384e-02 -2.1074e-03
  -3.2983e-03  4.5014e-02 -1.1510e-02
  -5.9602e-02  6.4942e-03  2.9080e-03
 
 (0 ,2 ,.,.) = 
  -4.4903e-03  1.9637e-02  1.3167e-02
   1.3050e-02 -7.7471e-03  1.1931e-02
   1.3454e-02  1.1103e-02  5.5145e-03
    ...
 
 (0 ,61,.,.) = 
   1.2706e-03 -7.7438e-03  2.0753e-02
  -4.0024e-02 -4.0383e-02 -3.4821e-02
  -2.0251e-02 -9.5164e-03  1.3954e-02
 
 (0 ,62,.,.) = 
  -2.3430e-03  3.2303e-02 -4.3342e-03
   8.6194e-03  1.0553e-02  1.8074e-03
  -1.2760e-02 -1.0232e-02  4.5711e-03
 
 (0 ,63,.,.) = 
   1.5302e-02  2.1361e-02 -7.0908e-03
  -1.4221e-02  4.5979e-02  2.1369e-02
   3.1312e-02  6.6428e-02  2.1465e-02
      ⋮ 
 
 (1 ,0 ,.,.) = 
   5.3422e-02  4.0515e-02  9.6680e-03
   3.2884e-02 -2.3474e-02  3.4642e-02
  -1.2861e-02  5.0066e-02  5.4579e-02
 
 (1 ,1 ,.,.) = 
   2.8764e-02  4.3431e-02  2.8258e-02
   2.8734e-02 -3.5459e-02 -5.2788e-02
  -5.5119e-02 -7.1813e-02 -8.2970e-02
 
 (1 ,2 ,.,.) = 
   9.5293e-02  1.2549e-01 -6.4001e-02
  -4.1166e-02 -9.0480e-04  5.1387e-02
  -1.1311e-01 -7.9823e-02  1.4373e-01
    ...
 
 (1 ,61,.,.) = 
  -7.6924e-03  2.0647e-02  1.9521e-02
  -6.7352e-03  1.2601e-04  4.8309e-03
  -6.2405e-03 -9.2119e-03 -2.5806e-04
 
 (1 ,62,.,.) = 
  -2.6153e-02 -2.4641e-02  4.0970e-02
  -1.9164e-02 -1.0160e-02  3.3163e-02
   5.4200e-03  9.0485e-04  6.7799e-04
 
 (1 ,63,.,.) = 
   7.7762e-03  2.6447e-02  6.3650e-02
  -3.0608e-02  2.4959e-02  1.2951e-02
  -2.0938e-02 -7.7342e-03 -3.8790e-02
      ⋮ 
 
 (2 ,0 ,.,.) = 
   1.0893e-02 -1.4409e-02  1.5730e-02
   1.6655e-02  4.4535e-02  6.3212e-02
   3.4121e-02  7.3135e-02  5.9203e-02
 
 (2 ,1 ,.,.) = 
   2.3195e-03  7.7598e-03  2.0308e-02
   2.0457e-02  4.0029e-02  3.4744e-02
  -4.7356e-02 -3.7286e-02  1.4542e-02
 
 (2 ,2 ,.,.) = 
  -2.2742e-02 -1.9000e-02 -8.4317e-03
  -9.8759e-04  2.1510e-02  6.3959e-03
  -9.4558e-03  2.6833e-03 -3.1136e-02
    ...
 
 (2 ,61,.,.) = 
  -7.5787e-03 -1.6056e-02 -6.4204e-04
  -5.5104e-03  1.4252e-02  4.5000e-02
  -9.2800e-03  2.2351e-02  4.1728e-02
 
 (2 ,62,.,.) = 
   2.5705e-02  4.8207e-02  7.9145e-02
  -4.4350e-03  3.8872e-03  4.1694e-02
   8.0536e-04 -1.0601e-02  9.2706e-03
 
 (2 ,63,.,.) = 
  -3.3892e-02  9.3543e-03  4.1746e-02
  -1.6470e-02  3.9542e-03  6.2438e-02
  -3.1055e-02 -3.6302e-03  7.0817e-02
 ...   
      ⋮ 
 
 (61,0 ,.,.) = 
  -7.1044e-05 -9.0020e-03 -2.6998e-03
   3.0072e-03  1.1579e-02  1.5214e-02
   3.4832e-03  1.1353e-05  1.6320e-02
 
 (61,1 ,.,.) = 
  -2.6334e-02  2.1967e-02 -6.0039e-02
   4.4519e-02  1.3203e-01 -9.1163e-03
   5.4242e-02  1.3726e-01  2.7454e-02
 
 (61,2 ,.,.) = 
   1.7122e-02  3.7646e-03  1.4872e-02
   1.2092e-02  1.1319e-02  3.4667e-02
   8.1790e-03 -2.0805e-02  2.7143e-02
    ...
 
 (61,61,.,.) = 
  -1.0111e-02 -1.0526e-02  2.8394e-02
  -2.5112e-02 -2.2196e-02  3.7229e-02
  -3.8220e-02 -4.6644e-02  1.5660e-02
 
 (61,62,.,.) = 
  -2.5913e-03 -2.4307e-02  1.0611e-02
  -2.1730e-02 -4.3938e-02 -7.1536e-03
  -2.5171e-02 -5.9467e-02 -2.5577e-02
 
 (61,63,.,.) = 
   2.8652e-02  2.5850e-04  1.1416e-03
   3.7812e-02 -1.1271e-03  9.6027e-03
   3.9350e-02  1.0134e-02  1.0449e-02
      ⋮ 
 
 (62,0 ,.,.) = 
  -7.9305e-03  7.0872e-03  2.1412e-02
  -6.0065e-02  1.4147e-03  9.7281e-02
  -6.0130e-02 -2.1725e-02  3.6863e-02
 
 (62,1 ,.,.) = 
   2.8024e-02  2.6183e-02 -2.3027e-02
   5.1900e-02 -2.0588e-03 -1.0940e-01
  -3.2729e-02 -6.2752e-03  8.0630e-03
 
 (62,2 ,.,.) = 
  -1.8062e-02 -1.9510e-02  4.3163e-02
   4.6080e-02  2.9494e-02  4.0844e-02
   5.9607e-03 -6.5891e-03 -6.4623e-03
    ...
 
 (62,61,.,.) = 
   2.2193e-02  8.4653e-03  3.6764e-03
   1.7549e-02  2.1971e-02 -4.5108e-03
   2.1124e-02  3.4591e-02 -1.6310e-02
 
 (62,62,.,.) = 
   3.8144e-02  4.8395e-02 -9.5556e-02
   1.8923e-02  1.1341e-02 -7.6311e-02
   4.7358e-03  3.2138e-02 -7.4777e-02
 
 (62,63,.,.) = 
  -1.9031e-02 -3.2568e-02 -3.8251e-02
   1.0705e-02  2.3121e-03 -7.5078e-02
   3.3316e-02  3.5515e-02 -2.1023e-03
      ⋮ 
 
 (63,0 ,.,.) = 
  -1.3330e-01  7.4683e-02 -3.8624e-03
   9.1377e-02  8.2415e-02  3.9469e-02
  -1.8265e-02 -5.9943e-02  8.9354e-02
 
 (63,1 ,.,.) = 
   1.5566e-02 -4.1716e-02  1.0633e-02
   7.2644e-03  3.1934e-02  1.2732e-03
  -2.0851e-02 -3.7593e-03 -7.0170e-02
 
 (63,2 ,.,.) = 
  -6.6139e-02  1.0627e-01  1.9590e-02
   5.4987e-02 -1.5552e-01 -1.8819e-02
  -4.2554e-03  4.4964e-02 -2.4632e-02
    ...
 
 (63,61,.,.) = 
  -6.1691e-02 -4.5531e-02 -9.1721e-03
   4.3995e-02  4.5703e-02 -7.0108e-02
   1.1388e-02  4.4678e-02 -4.5953e-02
 
 (63,62,.,.) = 
   4.3432e-03  2.3194e-02 -2.1895e-02
  -8.0216e-02 -5.7606e-02 -9.8455e-03
  -3.3285e-02 -1.1468e-01 -2.3779e-02
 
 (63,63,.,.) = 
  -6.3785e-02 -2.4485e-02 -4.9061e-02
  -6.1594e-02  1.0328e-01  5.9685e-03
   8.1863e-02 -3.0314e-02 -4.6373e-03
 [torch.FloatTensor of size 64x64x3x3], Parameter containing:
  0.2496
  0.2198
  0.2756
  0.6073
  0.2654
  0.2942
  0.1136
  0.4425
  0.2868
  0.2974
  0.2506
  0.4103
  0.4855
  0.3383
  0.4670
  0.1772
  0.2171
  0.5025
  0.2263
  0.3667
  0.4867
  0.4586
  0.4652
  0.2200
  0.1510
  0.2761
  0.3813
  0.2803
  0.2382
  0.3953
  0.3032
  0.3163
  0.2025
  0.2323
  0.2003
  0.1661
  0.4690
  0.3476
  0.3414
  0.2274
  0.2485
  0.2356
  0.2726
  0.4657
  0.3429
  0.2465
  0.4674
  0.2812
  0.6241
  0.4152
  0.3403
  0.4218
  0.1152
  0.2985
  0.5802
  0.2795
  0.4706
  0.4517
  0.4303
  0.2749
  0.3427
  0.1137
  0.5069
  0.4370
 [torch.FloatTensor of size 64], Parameter containing:
  0.2275
  0.0087
 -0.0673
 -0.0688
  0.3598
 -0.2017
 -0.0000
  0.0237
  0.3955
  0.0371
  0.0069
  0.2758
 -0.0703
 -0.2397
 -0.0818
 -0.0941
 -0.1454
  0.0373
 -0.3617
 -0.3956
 -0.4079
  0.0036
 -0.2788
 -0.0353
 -0.0703
  0.2101
 -0.0046
 -0.1966
 -0.2807
 -0.0165
  0.2645
 -0.0894
 -0.2105
 -0.1303
  0.1721
  0.0534
 -0.2230
 -0.0480
  0.2457
  0.2095
  0.1622
  0.1137
  0.1146
 -0.1487
 -0.0322
 -0.3055
  0.4912
  0.1087
  0.0128
  0.1004
  0.4155
 -0.0147
  0.0239
  0.0998
 -0.1727
  0.1008
 -0.1456
 -0.2274
  0.1364
  0.2013
 -0.0574
  0.2353
 -0.1130
  0.3093
 [torch.FloatTensor of size 64], Parameter containing:
 (0 ,0 ,.,.) = 
   1.9712e-02 -5.2562e-03 -3.7619e-03
  -1.9635e-02 -1.2336e-02 -3.5196e-02
   5.0761e-02  7.5668e-02  4.3344e-02
 
 (0 ,1 ,.,.) = 
   1.4160e-02 -8.6094e-03 -1.0541e-02
  -4.2586e-02 -2.3814e-02 -5.4694e-02
  -1.4018e-03  4.6720e-02  5.0898e-02
 
 (0 ,2 ,.,.) = 
   2.1559e-02  4.1633e-03 -9.7118e-03
  -9.3201e-03 -2.5432e-02 -2.8274e-02
  -3.0107e-02 -4.8230e-02 -2.6001e-02
    ...
 
 (0 ,61,.,.) = 
   5.4300e-03  9.1875e-02  3.1938e-03
  -1.7945e-02  5.7266e-02 -8.4098e-03
  -3.4961e-02 -2.3296e-02 -3.5089e-02
 
 (0 ,62,.,.) = 
   2.5603e-02 -3.1689e-02 -5.4160e-02
   6.9736e-02 -1.0716e-02 -6.8034e-02
   3.5578e-02  3.4749e-02 -1.9334e-02
 
 (0 ,63,.,.) = 
  -6.5420e-02 -4.6427e-03 -2.3362e-02
   7.5833e-02  9.1174e-03 -4.9701e-02
   6.2944e-02 -9.8735e-02  3.3158e-02
      ⋮ 
 
 (1 ,0 ,.,.) = 
  -9.0557e-03 -3.0753e-02  1.1953e-02
  -3.2539e-02 -6.2846e-03 -2.0235e-02
   4.7996e-03 -2.1462e-02 -4.1557e-03
 
 (1 ,1 ,.,.) = 
   1.7163e-02 -2.3303e-03  7.3972e-02
  -3.2105e-02 -7.7536e-02 -1.2648e-02
   3.8985e-02 -4.3170e-02  1.0904e-02
 
 (1 ,2 ,.,.) = 
  -2.9643e-02 -5.8534e-02 -5.9736e-02
  -2.9437e-02 -3.6441e-02 -1.2380e-02
  -2.2775e-02 -2.4485e-03 -1.6124e-03
    ...
 
 (1 ,61,.,.) = 
   2.6830e-02  1.4267e-02  6.2658e-02
   3.0585e-04 -5.3241e-03  3.2786e-03
   2.1097e-02 -2.3189e-02  1.2102e-02
 
 (1 ,62,.,.) = 
  -6.1182e-02 -2.9227e-02  2.0036e-02
  -7.6089e-02 -7.7057e-02  8.6544e-02
  -3.9228e-02 -3.2361e-02 -8.8970e-02
 
 (1 ,63,.,.) = 
  -1.3372e-01  8.8362e-02  8.3836e-02
  -1.1688e-02  4.3156e-01 -3.3629e-03
  -2.3925e-02 -1.0092e-01 -1.0184e-01
      ⋮ 
 
 (2 ,0 ,.,.) = 
   8.0165e-02  4.3042e-02  2.7325e-03
   3.5269e-02 -1.5504e-02 -3.5011e-02
  -1.7164e-02 -2.6827e-02 -3.3946e-02
 
 (2 ,1 ,.,.) = 
   4.5439e-02  5.1585e-02  1.8321e-02
  -3.9647e-02  2.3956e-02 -2.6609e-02
  -3.0358e-02 -6.4729e-02  2.5834e-02
 
 (2 ,2 ,.,.) = 
   3.8105e-02  4.0986e-02  4.1005e-02
   1.7584e-02 -1.6494e-02 -3.2716e-02
   5.5886e-03 -1.7068e-02 -3.0605e-02
    ...
 
 (2 ,61,.,.) = 
  -1.3694e-01 -1.4074e-01  5.1423e-02
  -1.2521e-01 -1.3128e-01  7.5733e-02
  -4.5032e-02 -1.7081e-02  7.1252e-02
 
 (2 ,62,.,.) = 
   6.3381e-02  1.5874e-02 -2.7322e-02
   8.0356e-02  3.6104e-02 -2.8506e-02
   2.6638e-02  2.2021e-02  3.2345e-02
 
 (2 ,63,.,.) = 
  -1.2068e-03 -4.6179e-02 -1.5351e-02
  -1.1276e-02  1.9200e-02  3.4336e-02
   1.6540e-02 -7.8592e-03 -2.5392e-02
 ...   
      ⋮ 
 
 (61,0 ,.,.) = 
   3.3384e-02  6.9963e-02  1.0745e-02
  -1.7518e-02 -5.3524e-02 -6.4960e-02
   3.4248e-04 -4.5557e-02 -4.7336e-02
 
 (61,1 ,.,.) = 
  -5.1031e-03  7.9784e-03 -8.6553e-04
  -1.6557e-03  1.4661e-02  5.3365e-03
  -3.1784e-02 -6.6940e-02 -4.6889e-02
 
 (61,2 ,.,.) = 
  -1.1775e-02  7.2759e-03  7.6622e-03
  -6.1288e-02 -5.2078e-02 -4.5152e-02
  -8.6584e-02 -9.7381e-02 -1.0405e-01
    ...
 
 (61,61,.,.) = 
   2.1243e-02  6.2456e-02  2.5188e-02
  -2.2911e-02 -2.1100e-03 -2.7573e-02
   4.6557e-02  6.4980e-02  3.1879e-02
 
 (61,62,.,.) = 
   6.2867e-03  2.4255e-02  8.9674e-02
  -7.7718e-03 -5.4311e-02 -4.6843e-02
  -6.7499e-03 -6.6857e-02 -4.9842e-02
 
 (61,63,.,.) = 
   4.7326e-03 -3.9533e-02  1.1500e-03
  -2.7957e-02 -1.3466e-01 -6.0753e-02
  -3.2010e-03  7.2213e-02  1.1009e-01
      ⋮ 
 
 (62,0 ,.,.) = 
   2.3763e-02 -1.7876e-02 -7.4843e-03
   1.6239e-02  5.4479e-04 -3.3735e-02
  -2.2854e-02 -1.4316e-03  1.1010e-02
 
 (62,1 ,.,.) = 
   5.2277e-03 -2.5941e-03  5.9594e-03
  -2.9058e-03 -7.3409e-03  3.0652e-02
   7.5540e-02  6.6445e-03  2.5518e-03
 
 (62,2 ,.,.) = 
  -6.5970e-02 -4.1286e-02 -3.0278e-02
  -3.5108e-02 -3.9099e-02 -1.6818e-02
  -1.0224e-02 -8.6995e-03 -5.9939e-04
    ...
 
 (62,61,.,.) = 
   2.1233e-02 -2.4559e-02 -7.4436e-03
  -4.3734e-03 -3.2864e-02 -3.3453e-02
   8.9269e-03 -1.7646e-02  3.8375e-04
 
 (62,62,.,.) = 
  -7.8930e-02 -7.2940e-02 -6.7911e-02
  -8.4146e-02 -8.3657e-02  5.3666e-02
  -3.5577e-02 -3.6835e-02  5.8987e-03
 
 (62,63,.,.) = 
   8.3767e-02  8.0476e-05  7.2164e-02
  -6.4219e-02 -1.2661e-01  4.6026e-02
   9.3033e-02 -4.7521e-02  3.6777e-02
      ⋮ 
 
 (63,0 ,.,.) = 
   4.1012e-02  1.3361e-03 -5.8616e-02
   4.2461e-02  2.9437e-03 -2.0445e-02
   7.6097e-02  5.2504e-02 -5.5636e-03
 
 (63,1 ,.,.) = 
   2.2046e-02  4.0888e-03  1.4645e-02
  -7.7532e-02 -1.1912e-01 -7.0892e-02
  -1.0618e-02 -3.2121e-02 -2.3969e-02
 
 (63,2 ,.,.) = 
  -2.1612e-02 -2.6110e-03 -3.1664e-02
  -3.2892e-02 -3.9771e-02 -5.1463e-02
  -2.6150e-02 -3.6554e-02 -2.3315e-02
    ...
 
 (63,61,.,.) = 
   4.4600e-03  8.4181e-02  2.3199e-02
   5.7595e-02  1.3036e-01  3.2172e-02
  -2.2774e-03  4.2065e-02 -4.8619e-02
 
 (63,62,.,.) = 
   3.1533e-02 -4.3655e-02  2.0361e-02
   3.9973e-03 -5.1430e-02 -6.3839e-02
   6.4002e-03  4.5347e-02  4.7346e-02
 
 (63,63,.,.) = 
  -9.1818e-02  1.0264e-02  9.6565e-02
  -2.1635e-03 -2.3452e-02 -5.9038e-02
   1.9402e-02  2.8854e-02 -9.6113e-02
 [torch.FloatTensor of size 64x64x3x3], Parameter containing:
  0.3910
  0.4375
  0.3746
  0.3990
  0.3404
  0.3503
  0.2618
  0.2707
  0.2865
  0.4308
  0.1895
  0.3041
  0.3837
  0.2944
  0.2105
  0.3304
  0.2943
  0.2887
  0.2060
  0.4627
  0.2335
  0.1831
  0.4489
  0.2830
  0.3389
  0.2997
  0.3503
  0.2735
  0.3908
  0.2817
  0.2636
  0.4462
  0.3282
  0.3776
  0.4471
  0.3878
  0.2516
  0.3172
  0.3661
  0.3166
  0.3818
  0.3128
  0.2274
  0.3627
  0.2902
  0.2381
  0.2988
  0.2469
  0.3840
  0.2886
  0.3197
  0.2879
  0.3218
  0.4559
  0.3500
  0.2420
  0.3396
  0.3519
  0.3839
  0.3806
  0.4039
  0.2826
  0.4594
  0.3342
 [torch.FloatTensor of size 64], Parameter containing:
 -0.0997
 -0.4755
 -0.0474
 -0.2698
 -0.0834
 -0.0072
  0.0474
  0.1022
 -0.0170
 -0.1471
  0.2307
  0.1447
 -0.1775
  0.0273
  0.1559
 -0.1836
  0.1238
 -0.1522
  0.0554
 -0.2881
 -0.2606
  0.2316
 -0.3242
 -0.0219
 -0.2645
  0.0576
 -0.2465
  0.0481
 -0.3530
  0.0950
 -0.1862
 -0.1707
 -0.0161
 -0.2604
 -0.3145
 -0.1083
  0.0659
 -0.1427
 -0.0570
 -0.0076
 -0.3006
 -0.0744
 -0.0683
 -0.1104
  0.0253
  0.0489
 -0.2515
  0.1150
 -0.3783
  0.0846
 -0.0368
  0.1439
 -0.0468
 -0.3087
 -0.0240
  0.1397
 -0.0908
 -0.1795
 -0.1129
 -0.0793
 -0.1491
  0.0594
 -0.4433
 -0.0138
 [torch.FloatTensor of size 64], Parameter containing:
 (0 ,0 ,.,.) = 
  -2.1574e-02 -4.5688e-03  4.5483e-03
  -8.1870e-03  4.1740e-02  2.3010e-02
  -8.9283e-03  5.7352e-02  2.9818e-02
 
 (0 ,1 ,.,.) = 
   5.8627e-02  4.2864e-02  4.4912e-02
   2.2281e-02 -1.2969e-02  7.6099e-03
   4.5373e-02  3.0712e-02  3.7700e-02
 
 (0 ,2 ,.,.) = 
  -1.5456e-02 -3.8692e-02 -4.6010e-02
  -2.3123e-02  2.8293e-02  4.7790e-03
  -2.0328e-02  1.3756e-02  2.5883e-02
    ...
 
 (0 ,61,.,.) = 
   5.1302e-02  4.2291e-02  5.7833e-02
   4.5210e-02  5.5850e-02  1.4318e-02
   1.4241e-02  1.7968e-02  1.4344e-02
 
 (0 ,62,.,.) = 
   4.6012e-03  1.2566e-02  4.8931e-02
  -6.5754e-03 -2.6431e-02  1.5855e-02
   1.3192e-02  1.9011e-02  1.3842e-02
 
 (0 ,63,.,.) = 
   6.1983e-02  6.9919e-02  6.1035e-02
   6.1253e-02  9.9557e-02  5.9060e-02
   5.8298e-02  8.1652e-02  8.1499e-02
      ⋮ 
 
 (1 ,0 ,.,.) = 
  -1.0088e-02 -1.2959e-02  9.7798e-03
   5.5408e-02  4.3501e-02  5.6983e-02
   5.3427e-02  3.5118e-02  3.6782e-02
 
 (1 ,1 ,.,.) = 
   2.4442e-03 -3.0207e-02 -1.0377e-02
  -4.5297e-02 -4.5318e-02  5.4623e-03
  -4.4762e-02 -1.5508e-02  6.9745e-03
 
 (1 ,2 ,.,.) = 
   3.9658e-02  3.6838e-02  5.8796e-03
   2.3207e-02  3.9240e-03 -2.0887e-02
  -1.4829e-02  5.3606e-03  1.7404e-03
    ...
 
 (1 ,61,.,.) = 
   3.2160e-02  5.9042e-02  4.8433e-02
  -2.6464e-02 -8.0667e-03 -1.0359e-02
  -2.6699e-02 -9.5411e-03 -2.8902e-02
 
 (1 ,62,.,.) = 
  -2.9235e-02 -3.9078e-02 -4.4955e-02
  -2.0346e-02 -4.4891e-02 -3.7477e-02
   1.9653e-02 -1.5562e-03 -5.8245e-03
 
 (1 ,63,.,.) = 
  -5.0696e-02 -4.8902e-02  9.1631e-03
   5.1668e-03  2.0509e-02  6.6874e-02
   2.8934e-02  4.6717e-02  2.1371e-02
      ⋮ 
 
 (2 ,0 ,.,.) = 
   2.1744e-02 -2.8354e-02 -3.2557e-02
   3.0519e-02  1.8536e-02  1.5244e-02
   1.3832e-03  1.7051e-02  3.2020e-02
 
 (2 ,1 ,.,.) = 
  -3.6293e-02  1.0914e-02  4.5371e-02
   1.3399e-02  6.4272e-02  8.8210e-02
   4.6697e-02  9.9653e-02  8.7606e-02
 
 (2 ,2 ,.,.) = 
  -2.4336e-02 -2.9627e-02  1.9537e-02
  -3.3412e-02 -2.2290e-02 -2.8879e-02
   1.4765e-02  1.7234e-02 -1.8185e-02
    ...
 
 (2 ,61,.,.) = 
  -3.9859e-02 -7.1075e-02 -5.8546e-02
   2.2902e-02  1.1184e-02 -2.3654e-02
   8.1897e-02  1.1996e-01  9.3242e-02
 
 (2 ,62,.,.) = 
   3.1984e-02  7.4931e-02  6.6020e-02
   2.8490e-02  1.1931e-01  1.2100e-01
   7.9259e-04  4.3812e-02  4.4648e-02
 
 (2 ,63,.,.) = 
   3.2748e-02  4.1444e-02 -8.1932e-03
   4.5541e-02  2.9426e-02 -8.5440e-03
   1.1634e-04  1.8045e-03  1.4826e-02
 ...   
      ⋮ 
 
 (61,0 ,.,.) = 
  -4.4144e-02 -8.3106e-02 -5.3073e-02
   3.2124e-02  1.0286e-02  2.4409e-02
   6.1606e-03 -1.9455e-02  4.0534e-02
 
 (61,1 ,.,.) = 
   5.6026e-04  9.6961e-03  2.5010e-03
   7.1679e-03 -1.7535e-02 -2.3857e-02
  -9.8745e-03 -1.8550e-02  1.7301e-03
 
 (61,2 ,.,.) = 
   4.3882e-03  4.2049e-02  7.5950e-02
  -6.5610e-02 -3.6130e-02 -1.9404e-02
  -3.8091e-02 -2.6749e-02 -1.3865e-02
    ...
 
 (61,61,.,.) = 
  -4.5593e-02 -4.6050e-02 -2.2809e-02
  -9.7648e-03  2.4910e-03  2.4503e-02
   2.0381e-02  5.2393e-02  6.9019e-02
 
 (61,62,.,.) = 
   9.3306e-04  1.2483e-02 -1.1817e-02
  -1.2627e-02 -1.8756e-02 -1.4144e-03
  -5.2490e-03 -4.6126e-03 -1.3224e-02
 
 (61,63,.,.) = 
   7.4689e-04 -1.0135e-02 -7.8264e-03
   1.2491e-02 -2.5865e-02  4.0514e-02
   5.8855e-03  4.5990e-02  1.0651e-01
      ⋮ 
 
 (62,0 ,.,.) = 
   1.2262e-02 -1.5378e-02  1.3862e-03
   4.1166e-02 -2.4944e-02 -2.6686e-02
  -1.7423e-02  5.2690e-03 -2.1861e-02
 
 (62,1 ,.,.) = 
  -3.1207e-02 -3.3025e-02  2.2114e-02
  -2.4009e-02  1.2988e-02  2.2430e-02
   1.0332e-02  4.3601e-03  4.7321e-03
 
 (62,2 ,.,.) = 
   2.0182e-02  6.1569e-02 -2.8771e-02
   5.8231e-02  4.6767e-02 -2.8417e-05
   3.7545e-02 -4.5886e-02  1.5849e-02
    ...
 
 (62,61,.,.) = 
   7.0431e-03 -3.6082e-03  7.1986e-03
   2.4895e-02  6.1671e-03 -3.2427e-02
   7.2338e-03  2.2406e-03 -5.3330e-02
 
 (62,62,.,.) = 
   2.8072e-02 -1.0571e-02 -1.3854e-02
  -1.0879e-02  6.1929e-03 -5.6713e-03
  -2.6083e-02  8.1861e-03 -3.2873e-02
 
 (62,63,.,.) = 
  -3.1032e-02 -6.0485e-02 -2.5583e-02
  -4.6239e-02 -2.2805e-02 -7.7678e-03
  -9.4698e-03  4.0247e-03 -4.8637e-03
      ⋮ 
 
 (63,0 ,.,.) = 
   2.3128e-02 -5.6038e-02 -3.4572e-02
   1.0638e-03  5.7929e-02 -7.6970e-03
  -3.0103e-02  3.5573e-02 -1.8143e-02
 
 (63,1 ,.,.) = 
   9.6840e-02 -1.1186e-01 -7.8766e-02
  -1.0444e-01 -1.0851e-01 -1.9553e-01
  -1.1986e-01 -7.1474e-02  3.6750e-02
 
 (63,2 ,.,.) = 
  -2.2194e-02  6.0298e-03  5.6914e-02
  -4.8342e-02  7.8893e-02 -5.1026e-02
  -5.1294e-02 -5.7434e-02 -1.9178e-02
    ...
 
 (63,61,.,.) = 
  -4.4896e-02 -8.1267e-02  5.1794e-02
  -8.3985e-02 -5.7778e-02  6.7891e-02
   2.3837e-02  3.8954e-02  4.1141e-02
 
 (63,62,.,.) = 
   4.6446e-03  2.7367e-02 -2.3154e-02
   2.0675e-02  2.3429e-02  6.4380e-04
  -5.2222e-02 -1.4854e-02 -2.5150e-02
 
 (63,63,.,.) = 
   2.1291e-02  1.2736e-02  8.4553e-03
  -8.2932e-02  7.2067e-02  1.3107e-01
   8.5491e-03  1.3677e-01  3.9867e-02
 [torch.FloatTensor of size 64x64x3x3], Parameter containing:
  0.2560
  0.5690
  0.4042
  0.5130
  0.2178
  0.4940
  0.3315
  0.5510
  0.4354
  0.5291
  0.2081
  0.4735
  0.5945
  0.5645
  0.2761
  0.2571
  0.4853
  0.6240
  0.4370
  0.2308
  0.4970
  0.3157
  0.5706
  0.2162
  0.1932
  0.1448
  0.2218
  0.2389
  0.5871
  0.3501
  0.4109
  0.3199
  0.5808
  0.3281
  0.2723
  0.1971
  0.6139
  0.4075
  0.6304
  0.3874
  0.7605
  0.2111
  0.3071
  0.4603
  0.3099
  0.1914
  0.4431
  0.2537
  0.5745
  0.6459
  0.3914
  0.3090
  0.6782
  0.1937
  0.5814
  0.2570
  0.3514
  0.2124
  0.5794
  0.3415
  0.2051
  0.0715
  0.4090
  0.4416
 [torch.FloatTensor of size 64], Parameter containing:
 -0.1778
 -0.1287
  0.0349
 -0.1452
  0.1864
 -0.1413
 -0.4201
 -0.1334
  0.2183
 -0.1912
  0.0311
 -0.0235
 -0.1724
 -0.0274
 -0.0295
 -0.1031
  0.0047
  0.0828
 -0.1521
  0.0183
 -0.2418
 -0.0831
 -0.0491
 -0.0688
 -0.2560
  0.1381
 -0.0165
  0.2092
 -0.0028
 -0.0265
 -0.0225
  0.0286
 -0.1065
 -0.3698
  0.2862
 -0.1036
  0.3080
 -0.0894
  0.2772
  0.1136
 -0.3157
  0.0423
  0.0567
  0.2369
 -0.0727
  0.0465
 -0.0536
  0.1309
  0.0282
 -0.1371
  0.1464
 -0.0717
 -0.3237
 -0.1583
 -0.0424
 -0.1278
 -0.1703
  0.0413
  0.0891
  0.0770
 -0.0730
  0.0683
 -0.0391
  0.0476
 [torch.FloatTensor of size 64], Parameter containing:
 ( 0 , 0 ,.,.) = 
  -7.1555e-02 -1.1031e-01 -1.3711e-01
   7.0593e-02 -1.4782e-02 -1.0053e-01
   1.1938e-01  8.7330e-02 -8.2206e-03
 
 ( 0 , 1 ,.,.) = 
  -2.3999e-02 -6.3682e-03  2.4303e-03
   6.1831e-03  1.8781e-02  2.5324e-02
   2.3656e-03 -4.0037e-03 -1.1949e-02
 
 ( 0 , 2 ,.,.) = 
   6.0344e-03  6.3784e-03 -1.2247e-02
   7.8854e-03 -1.3464e-02 -4.2702e-02
   1.7380e-02 -1.3862e-02 -4.7145e-02
     ... 
 
 ( 0 ,61 ,.,.) = 
   3.4324e-02  3.2257e-02  2.5819e-02
   8.4676e-03 -4.5413e-04 -1.0832e-02
  -6.7166e-03 -1.5052e-02 -2.6939e-02
 
 ( 0 ,62 ,.,.) = 
  -1.2089e-02 -2.3588e-02 -2.2689e-02
   1.0135e-02  1.8285e-02 -1.5695e-02
   2.1352e-02  5.8568e-02  4.2873e-02
 
 ( 0 ,63 ,.,.) = 
   1.4421e-02 -2.8298e-02 -7.0770e-03
   3.0260e-02 -6.6294e-03 -1.6901e-02
   3.9085e-02  1.4222e-02  2.2294e-02
       ⋮  
 
 ( 1 , 0 ,.,.) = 
  -7.7911e-02 -7.3929e-02 -3.6671e-02
  -3.4903e-02 -6.2355e-02 -3.7793e-02
  -2.8379e-02 -5.4291e-02 -4.9411e-02
 
 ( 1 , 1 ,.,.) = 
  -1.2970e-02 -2.1825e-02 -2.8767e-04
   7.6444e-03  1.7653e-02  1.6660e-02
   3.8337e-02  2.3006e-02 -1.6620e-03
 
 ( 1 , 2 ,.,.) = 
  -8.7592e-02 -8.4735e-02 -5.5818e-02
  -7.7731e-02 -8.0311e-02 -3.2554e-02
  -5.6313e-02 -4.2047e-02  1.5247e-02
     ... 
 
 ( 1 ,61 ,.,.) = 
  -3.2377e-02 -4.0018e-02 -2.9523e-02
  -1.5294e-02 -1.4165e-02  2.7086e-03
   1.1652e-02  2.3886e-02  2.4413e-02
 
 ( 1 ,62 ,.,.) = 
   2.0891e-03 -3.0475e-02 -3.3818e-02
   6.7829e-03  3.8681e-04 -1.4540e-02
  -3.1306e-03  6.7689e-03  8.4524e-03
 
 ( 1 ,63 ,.,.) = 
   3.0586e-02  4.6281e-02  3.8359e-04
   5.3079e-02  6.7488e-02  3.0547e-02
   2.3374e-02  4.3993e-02 -3.8713e-03
       ⋮  
 
 ( 2 , 0 ,.,.) = 
   1.3878e-02  3.2724e-02  4.6584e-02
  -8.0647e-03  1.6209e-03  1.5153e-02
  -7.0342e-02 -5.3299e-02 -4.5920e-02
 
 ( 2 , 1 ,.,.) = 
   4.6035e-02  3.5400e-02  3.4941e-02
   5.8351e-02  5.4640e-02  2.7162e-02
   2.6799e-02  4.5056e-02  6.6886e-03
 
 ( 2 , 2 ,.,.) = 
  -3.3766e-02 -3.8605e-02 -2.4172e-02
  -1.8285e-03  1.0888e-02  1.1425e-02
   2.2282e-02  1.4024e-02  3.6332e-03
     ... 
 
 ( 2 ,61 ,.,.) = 
  -1.6330e-02 -6.9552e-02 -8.9737e-02
   3.9766e-02  1.5501e-02 -2.2695e-02
   1.0290e-01  1.2294e-01  6.3867e-02
 
 ( 2 ,62 ,.,.) = 
  -4.2318e-03  4.9511e-02 -7.6289e-03
  -2.7720e-02  7.0398e-03 -9.4052e-03
  -6.7008e-02 -6.0542e-02 -2.5967e-02
 
 ( 2 ,63 ,.,.) = 
  -5.8560e-03 -1.7573e-02 -3.8016e-02
   2.8579e-03 -4.1603e-03  1.0113e-02
   2.6243e-02  3.5200e-02  3.1143e-02
 ...     
       ⋮  
 
 (125, 0 ,.,.) = 
  -4.4193e-02 -6.5322e-02 -1.7594e-02
  -9.3970e-02 -5.8291e-02  1.2093e-02
  -2.2998e-02  3.2463e-02  7.1731e-02
 
 (125, 1 ,.,.) = 
  -4.7220e-03 -3.0125e-03 -1.8075e-02
   1.2667e-02 -8.0509e-03 -1.4605e-02
   7.8220e-03 -1.0720e-02 -2.6515e-02
 
 (125, 2 ,.,.) = 
  -2.5299e-02 -4.9383e-02 -1.2720e-02
  -5.2206e-02 -4.7233e-02 -4.2470e-03
  -4.8697e-02 -2.5320e-02  8.6178e-03
     ... 
 
 (125,61 ,.,.) = 
  -3.7617e-03  7.8398e-03 -5.9525e-03
   4.0277e-03  7.3575e-03 -1.1667e-02
  -3.9997e-02 -3.8038e-02 -5.0469e-02
 
 (125,62 ,.,.) = 
  -3.8949e-03 -6.8965e-03  3.4102e-02
  -6.9814e-03 -4.9762e-02  5.8711e-02
   1.8361e-02  2.5874e-02  8.0028e-02
 
 (125,63 ,.,.) = 
  -3.3014e-02 -2.1510e-02 -2.1509e-03
  -4.3894e-02 -3.2009e-02 -1.6265e-02
  -1.1037e-02  2.8872e-04  3.0937e-02
       ⋮  
 
 (126, 0 ,.,.) = 
  -4.9907e-02 -5.0222e-02 -5.0985e-02
   2.2644e-02 -1.4098e-02 -2.4426e-02
   1.9960e-02  9.6426e-02  1.0580e-01
 
 (126, 1 ,.,.) = 
  -3.6873e-02  2.1413e-03  8.3469e-03
  -4.0796e-02 -3.3767e-02 -3.4955e-02
   3.9466e-02  7.0508e-02  8.6065e-02
 
 (126, 2 ,.,.) = 
   1.4842e-02  6.6914e-03  1.4324e-02
  -3.2621e-02 -4.4027e-02 -2.2269e-02
   7.1982e-03 -1.9187e-02 -4.9348e-03
     ... 
 
 (126,61 ,.,.) = 
  -4.9938e-03  1.6018e-02  1.1242e-02
  -4.7668e-03  2.1921e-02  2.2660e-02
  -2.6753e-02  2.6917e-04 -5.6827e-03
 
 (126,62 ,.,.) = 
  -8.7725e-03  1.0761e-02  7.3603e-03
  -1.8010e-05 -1.7926e-02  4.8229e-03
   4.2431e-02 -1.5764e-02  2.3554e-02
 
 (126,63 ,.,.) = 
  -1.3830e-02 -3.0793e-03 -4.0854e-03
   3.3363e-02  4.2952e-02  3.5867e-02
  -3.9653e-02 -3.0855e-02 -4.3189e-02
       ⋮  
 
 (127, 0 ,.,.) = 
  -3.8617e-02 -3.1549e-03  2.5739e-03
  -1.1592e-02  9.8761e-03  7.5235e-03
  -1.9339e-02 -9.8779e-03  2.1755e-03
 
 (127, 1 ,.,.) = 
   1.6889e-04  1.8302e-03 -8.9537e-03
   5.8343e-03  1.7360e-02 -1.9029e-02
   5.8642e-03 -7.4307e-04  1.4667e-03
 
 (127, 2 ,.,.) = 
  -1.6506e-02 -2.8401e-02  1.3986e-02
  -2.2922e-02 -4.3484e-02  1.0471e-02
  -2.5801e-03 -4.5258e-02  7.9791e-03
     ... 
 
 (127,61 ,.,.) = 
  -1.5260e-03 -7.6469e-03  1.3597e-02
   5.5301e-04 -2.9176e-03  2.2147e-02
   3.2763e-03 -1.0775e-05  1.3163e-02
 
 (127,62 ,.,.) = 
   5.1756e-03  1.8495e-02 -8.0268e-03
  -3.5030e-02  2.6403e-02 -7.1220e-03
  -5.2325e-02 -1.1185e-02  1.9146e-02
 
 (127,63 ,.,.) = 
  -6.8805e-02  5.1618e-02  1.9787e-02
   2.5533e-02 -6.1926e-02  4.9924e-02
   1.0532e-01 -4.4136e-02  4.9907e-02
 [torch.FloatTensor of size 128x64x3x3], Parameter containing:
  0.3248
  0.3613
  0.2960
  0.2913
  0.3407
  0.3435
  0.3049
  0.3308
  0.3447
  0.3860
  0.3196
  0.2622
  0.2994
  0.2189
  0.2397
  0.3744
  0.3555
  0.1948
  0.3349
  0.2159
  0.3349
  0.3454
  0.3094
  0.3769
  0.3546
  0.3267
  0.3178
  0.3272
  0.3832
  0.2585
  0.2973
  0.3481
  0.2827
  0.2995
  0.3451
  0.3471
  0.3440
  0.3344
  0.3211
  0.3180
  0.2940
  0.3353
  0.3253
  0.3733
  0.3198
  0.2987
  0.1620
  0.3262
  0.3271
  0.3410
  0.3693
  0.3320
  0.3357
  0.2951
  0.3115
  0.3185
  0.3139
  0.2633
  0.3089
  0.3601
  0.2734
  0.3433
  0.3335
  0.3288
  0.2706
  0.2879
  0.3318
  0.3310
  0.3170
  0.2977
  0.3300
  0.3216
  0.3205
  0.3231
  0.3481
  0.3130
  0.2826
  0.2856
  0.3279
  0.3666
  0.3288
  0.3575
  0.3377
  0.2904
  0.3273
  0.3214
  0.3332
  0.3452
  0.1842
  0.3916
  0.3337
  0.2325
  0.3285
  0.3358
  0.2885
  0.3149
  0.3288
  0.2236
  0.3159
  0.2993
  0.3403
  0.3220
  0.3171
  0.2950
  0.2847
  0.3224
  0.3119
  0.2613
  0.3374
  0.3333
  0.3330
  0.2959
  0.4087
  0.2192
  0.2982
  0.4006
  0.3081
  0.3171
  0.2862
  0.2952
  0.3070
  0.3583
  0.3232
  0.3345
  0.3453
  0.3043
  0.3327
  0.3337
 [torch.FloatTensor of size 128], Parameter containing:
 -0.0589
 -0.1686
 -0.0206
  0.0027
 -0.0955
 -0.1048
  0.0349
 -0.0885
 -0.2053
 -0.1764
 -0.1224
 -0.0364
 -0.0785
  0.2088
 -0.0403
 -0.1820
 -0.1076
  0.2989
 -0.0570
  0.2064
 -0.0921
 -0.1376
 -0.1304
 -0.1193
 -0.1006
 -0.0380
 -0.1108
 -0.0477
 -0.1087
  0.1581
 -0.1123
 -0.1584
  0.0976
 -0.0430
 -0.1349
 -0.1189
 -0.0986
 -0.0479
 -0.0837
 -0.0720
 -0.0836
 -0.2442
 -0.3376
 -0.2124
 -0.0693
 -0.0651
  0.4979
 -0.0811
 -0.1021
 -0.0788
 -0.1802
 -0.1011
 -0.1090
 -0.0617
 -0.0856
 -0.0495
 -0.0370
  0.0023
 -0.0508
 -0.2430
  0.0009
 -0.1525
 -0.0963
 -0.0516
 -0.0473
  0.0884
 -0.1028
 -0.0907
 -0.1086
 -0.0379
 -0.1030
 -0.1609
 -0.0903
 -0.0898
 -0.1282
 -0.0830
 -0.0186
 -0.0232
 -0.0045
 -0.2131
 -0.1431
 -0.1391
 -0.1303
 -0.0568
 -0.1862
 -0.1209
 -0.0340
 -0.1181
  0.2298
 -0.2085
 -0.1335
  0.1418
 -0.0891
 -0.1273
  0.0107
 -0.1029
 -0.1025
  0.1562
 -0.0937
 -0.0657
 -0.1245
 -0.0451
 -0.0707
 -0.0447
  0.0715
 -0.0484
 -0.0312
 -0.0437
 -0.0927
 -0.1465
 -0.1151
 -0.0183
 -0.1927
  0.2491
  0.0300
 -0.1310
 -0.0468
 -0.0851
 -0.0421
 -0.0413
 -0.0457
 -0.1433
 -0.0981
 -0.1046
 -0.1315
 -0.1249
 -0.0982
 -0.0961
 [torch.FloatTensor of size 128], Parameter containing:
 ( 0 , 0 ,.,.) = 
  -7.4379e-03 -9.8091e-03  2.7976e-03
  -1.0780e-02  2.5794e-02  4.5517e-02
  -2.7241e-02  5.3206e-03  1.3177e-02
 
 ( 0 , 1 ,.,.) = 
   3.5440e-02  2.5101e-02  7.8204e-03
   4.0312e-03  1.9894e-02  2.7449e-02
   3.5329e-02  3.5456e-02  1.3315e-02
 
 ( 0 , 2 ,.,.) = 
   1.9270e-02 -2.1333e-02 -3.6199e-02
  -1.9590e-02 -1.8873e-02 -5.9538e-02
  -2.1838e-02 -7.6875e-03  3.9487e-03
     ... 
 
 ( 0 ,125,.,.) = 
  -6.8038e-03  1.0841e-02 -3.7045e-03
   1.3479e-02  1.1362e-02 -1.3431e-03
   8.1422e-03  1.9292e-04  5.5109e-04
 
 ( 0 ,126,.,.) = 
   7.6939e-03  7.7306e-03  4.3960e-03
  -1.0202e-02 -1.1698e-02 -9.6343e-03
  -3.9049e-03  1.8147e-02  1.3297e-02
 
 ( 0 ,127,.,.) = 
   1.2434e-02 -2.6889e-02 -1.1974e-02
   2.6846e-02  2.6409e-02 -2.1473e-02
   1.2892e-02  2.7632e-03 -5.4267e-03
       ⋮  
 
 ( 1 , 0 ,.,.) = 
   1.2840e-02  1.8529e-02 -2.6782e-03
  -1.6777e-02 -1.2281e-02  3.5471e-02
  -8.6486e-04  2.4498e-02  1.8152e-02
 
 ( 1 , 1 ,.,.) = 
  -6.6870e-03 -2.0710e-02 -1.4421e-02
  -7.3135e-03  4.2568e-02  7.4339e-03
   2.7640e-02  1.5997e-02  1.5939e-02
 
 ( 1 , 2 ,.,.) = 
  -2.2903e-02 -2.0577e-02  2.3593e-02
  -2.7524e-02 -5.6073e-02 -6.9899e-02
   2.0502e-02  5.1301e-02  2.1989e-02
     ... 
 
 ( 1 ,125,.,.) = 
  -2.7188e-02 -3.8969e-02 -3.9503e-02
  -6.2117e-02 -7.4923e-02 -9.5650e-02
  -6.0467e-02 -7.7697e-02 -7.4620e-02
 
 ( 1 ,126,.,.) = 
   2.8663e-02  2.9341e-02  2.8688e-02
   7.9438e-03  4.7108e-02  1.4586e-02
  -1.8200e-03  2.2035e-02  7.3670e-03
 
 ( 1 ,127,.,.) = 
   1.5625e-03 -1.6815e-02 -4.6104e-03
  -8.1347e-03 -2.5480e-02 -5.2408e-02
  -9.2823e-03 -9.6452e-04 -3.7804e-02
       ⋮  
 
 ( 2 , 0 ,.,.) = 
   4.7439e-03  6.0541e-03 -7.1074e-03
   2.3583e-02 -9.3094e-02 -7.9317e-02
  -7.8970e-03 -5.0526e-02 -1.0469e-02
 
 ( 2 , 1 ,.,.) = 
   1.4810e-02  1.6199e-02 -5.1457e-02
   8.5937e-03  8.1354e-03 -4.2865e-02
   9.0780e-02  6.5382e-02  4.3530e-02
 
 ( 2 , 2 ,.,.) = 
  -1.3827e-02 -6.3971e-03  8.4965e-03
   1.3832e-02 -1.2413e-02  5.3880e-03
   2.0189e-02 -3.5255e-03  7.9905e-03
     ... 
 
 ( 2 ,125,.,.) = 
  -9.2351e-04  1.8478e-02 -3.0603e-02
  -1.7034e-02  5.6756e-03 -4.9061e-02
  -3.2771e-02 -3.7422e-02 -4.5931e-02
 
 ( 2 ,126,.,.) = 
  -4.6355e-03  6.9231e-03 -1.0628e-03
  -7.9292e-03 -4.9909e-02  4.2104e-02
  -7.5158e-02 -4.7826e-03 -5.8031e-03
 
 ( 2 ,127,.,.) = 
   1.1503e-02 -1.4634e-02  3.7884e-02
   1.4056e-02  4.8553e-02  2.3157e-02
   2.1494e-02 -1.0090e-02  3.3782e-02
 ...     
       ⋮  
 
 (125, 0 ,.,.) = 
   2.6448e-02  4.0213e-03  7.5348e-03
   6.3626e-02 -3.1986e-02 -1.8433e-03
   2.6220e-02  7.5575e-03  4.9462e-02
 
 (125, 1 ,.,.) = 
  -2.8731e-02 -2.2669e-02 -5.1264e-02
  -2.6000e-02 -4.8740e-02 -1.4003e-02
  -1.7263e-02 -4.1574e-02 -1.1665e-02
 
 (125, 2 ,.,.) = 
  -3.4972e-02  3.5634e-02  3.4700e-02
   1.8265e-02  4.3594e-02 -2.6302e-02
   1.7826e-02  3.5585e-02  1.1340e-02
     ... 
 
 (125,125,.,.) = 
   9.7429e-03 -1.7253e-02 -1.6983e-04
  -1.9886e-02  8.1994e-02  1.2903e-02
  -2.3786e-02 -4.7812e-03  4.8584e-02
 
 (125,126,.,.) = 
  -2.4373e-02 -2.5836e-02 -3.5317e-02
  -2.9582e-02 -9.6624e-02 -5.3546e-02
  -1.5009e-02  5.9241e-03 -1.9719e-02
 
 (125,127,.,.) = 
   6.8366e-03 -3.6779e-02 -2.5541e-02
  -1.1634e-02 -2.3650e-02 -7.8005e-03
   8.6452e-03  7.8958e-03 -1.8926e-02
       ⋮  
 
 (126, 0 ,.,.) = 
   3.2894e-02  2.9690e-02  1.1071e-02
   3.8989e-02 -8.9897e-03  2.2632e-02
   7.8374e-03 -2.7959e-02 -2.3005e-02
 
 (126, 1 ,.,.) = 
   1.3667e-02  2.2886e-02 -1.8989e-02
   2.7104e-03  1.1235e-02  7.4223e-03
   2.1089e-02  4.3557e-02  1.0752e-02
 
 (126, 2 ,.,.) = 
  -2.3662e-02  2.2110e-02  4.3471e-04
  -3.0925e-02  6.0868e-02  1.6691e-02
  -8.8467e-02 -8.3442e-02 -3.4247e-02
     ... 
 
 (126,125,.,.) = 
  -7.3418e-03 -1.9690e-02  1.7969e-03
   7.2727e-03 -3.4322e-02 -2.4270e-02
  -1.1512e-02 -6.0470e-02 -5.5070e-02
 
 (126,126,.,.) = 
   3.0219e-03  2.6285e-03  1.7110e-02
  -1.3418e-02 -8.5859e-03  9.0284e-03
   1.9504e-02  9.4355e-03  4.5180e-03
 
 (126,127,.,.) = 
   1.3674e-03  7.6213e-04  1.1925e-02
  -2.3910e-03 -1.0733e-02  1.2625e-02
  -5.0613e-03 -5.7724e-03 -1.4643e-02
       ⋮  
 
 (127, 0 ,.,.) = 
  -7.4213e-03  1.1754e-02 -4.2728e-02
   1.6309e-03 -4.5691e-02 -1.3976e-01
  -6.5419e-03 -2.0547e-03 -4.8392e-02
 
 (127, 1 ,.,.) = 
   7.5053e-03  5.2659e-02  3.8849e-02
  -1.2484e-02  8.4685e-02  6.2233e-03
   1.3136e-03 -1.9656e-02 -8.2167e-02
 
 (127, 2 ,.,.) = 
  -2.4916e-02  1.6551e-02  1.6914e-02
   8.6507e-03  2.1444e-02  1.1694e-02
  -9.0502e-04  3.0596e-02  1.3600e-02
     ... 
 
 (127,125,.,.) = 
  -7.8114e-03  2.2029e-02 -1.7545e-02
  -7.5889e-03 -2.1149e-02 -3.6984e-03
   1.2622e-02 -2.0709e-02 -5.3862e-03
 
 (127,126,.,.) = 
   3.0152e-02 -8.2268e-03 -6.4910e-02
  -2.3752e-02 -9.5375e-02 -5.3019e-02
  -1.6835e-02 -1.1071e-02  9.9055e-04
 
 (127,127,.,.) = 
  -2.4533e-02 -8.4685e-02  2.5065e-02
   1.0639e-02  3.8693e-02  1.4004e-01
   1.5497e-02 -9.5081e-03  4.0948e-03
 [torch.FloatTensor of size 128x128x3x3], Parameter containing:
  0.1454
  0.3270
  0.3113
  0.2538
  0.4086
  0.3937
  0.4400
  0.3108
  0.3406
  0.2168
  0.2170
  0.3857
  0.1971
  0.2692
  0.1663
  0.2454
  0.3232
  0.3686
  0.3893
  0.3264
  0.3875
  0.4707
  0.1958
  0.4717
  0.1673
  0.3938
  0.3044
  0.1929
  0.2175
  0.2119
  0.4230
  0.3683
  0.2455
  0.2229
  0.3370
  0.3229
  0.2688
  0.3557
  0.2581
  0.4031
  0.4492
  0.3642
  0.2599
  0.1881
  0.1359
  0.2958
  0.1913
  0.3065
  0.3981
  0.4102
  0.1874
  0.4516
  0.3340
  0.1628
  0.3599
  0.1624
  0.2886
  0.1358
  0.4491
  0.2694
  0.4823
  0.3393
  0.4764
  0.3155
  0.6005
  0.4654
  0.5264
  0.2991
  0.2992
  0.4621
  0.2614
  0.4247
  0.4662
  0.4249
  0.3345
  0.2655
  0.4048
  0.3605
  0.1782
  0.3833
  0.2823
  0.3843
  0.3307
  0.2151
  0.3317
  0.1458
  0.2771
  0.4917
  0.3199
  0.4222
  0.1559
  0.4884
  0.3267
  0.3440
  0.1608
  0.4855
  0.2677
  0.1616
  0.3221
  0.4243
  0.3661
  0.1893
  0.3400
  0.3648
  0.1779
  0.3544
  0.2852
  0.2437
  0.4472
  0.3011
  0.3997
  0.6173
  0.2794
  0.4867
  0.1502
  0.6021
  0.3604
  0.4696
  0.3711
  0.2388
  0.5347
  0.1509
  0.3213
  0.4394
  0.3229
  0.4329
  0.1489
  0.3702
 [torch.FloatTensor of size 128], Parameter containing:
  0.0246
  0.0593
  0.1347
 -0.1089
 -0.0470
 -0.1359
 -0.0550
  0.0509
 -0.0613
  0.0916
  0.0031
 -0.0274
 -0.0539
  0.0177
  0.0432
  0.0074
  0.0548
 -0.0321
 -0.0224
  0.0142
 -0.2150
 -0.1160
  0.0486
 -0.1141
  0.1066
  0.0355
  0.0140
  0.0177
  0.0781
  0.1331
  0.0139
  0.0447
  0.1063
  0.0528
 -0.0539
 -0.1160
  0.1055
 -0.1591
  0.0100
  0.1197
  0.0170
  0.0929
 -0.0675
  0.0987
  0.1034
  0.0501
  0.0297
  0.0281
 -0.0075
 -0.0577
 -0.0144
 -0.1640
  0.1255
  0.0817
  0.0635
  0.0936
  0.0213
  0.0486
 -0.1174
  0.0237
 -0.2177
  0.0099
 -0.1883
  0.0467
 -0.0829
  0.0585
 -0.0306
  0.0509
  0.0541
 -0.1671
  0.0115
 -0.0302
 -0.1393
  0.0115
  0.0428
  0.1189
 -0.1289
  0.0479
  0.0474
 -0.0625
  0.0009
 -0.0144
  0.0909
  0.1342
 -0.0338
  0.0560
  0.0848
 -0.0467
  0.0228
 -0.0097
  0.1360
 -0.2625
  0.0088
 -0.0553
  0.0383
 -0.0720
  0.0907
  0.1612
 -0.1076
  0.1011
 -0.0519
  0.0838
 -0.0704
 -0.0806
 -0.0243
  0.0533
  0.1277
  0.1403
 -0.0593
 -0.0639
 -0.0766
 -0.1163
  0.0661
 -0.1644
  0.0422
 -0.2786
 -0.1006
 -0.0696
 -0.0761
  0.0371
 -0.0247
  0.0916
 -0.0200
 -0.0176
  0.0298
 -0.0373
  0.0466
 -0.1371
 [torch.FloatTensor of size 128], Parameter containing:
 ( 0 , 0 ,.,.) = 
   1.5916e-02
 
 ( 0 , 1 ,.,.) = 
  -3.1090e-01
 
 ( 0 , 2 ,.,.) = 
   1.2615e-02
     ... 
 
 ( 0 ,61 ,.,.) = 
  -1.6723e-01
 
 ( 0 ,62 ,.,.) = 
   1.2692e-02
 
 ( 0 ,63 ,.,.) = 
   1.3152e-02
       ⋮  
 
 ( 1 , 0 ,.,.) = 
   3.5526e-03
 
 ( 1 , 1 ,.,.) = 
  -1.0868e-03
 
 ( 1 , 2 ,.,.) = 
  -8.2883e-03
     ... 
 
 ( 1 ,61 ,.,.) = 
  -2.3444e-02
 
 ( 1 ,62 ,.,.) = 
  -7.5592e-02
 
 ( 1 ,63 ,.,.) = 
  -1.2622e-02
       ⋮  
 
 ( 2 , 0 ,.,.) = 
  -4.1898e-02
 
 ( 2 , 1 ,.,.) = 
   7.9478e-03
 
 ( 2 , 2 ,.,.) = 
  -1.6623e-01
     ... 
 
 ( 2 ,61 ,.,.) = 
   3.1887e-02
 
 ( 2 ,62 ,.,.) = 
  -1.8766e-02
 
 ( 2 ,63 ,.,.) = 
   6.4507e-02
 ...     
       ⋮  
 
 (125, 0 ,.,.) = 
  -2.8725e-02
 
 (125, 1 ,.,.) = 
   4.7026e-02
 
 (125, 2 ,.,.) = 
  -5.2251e-02
     ... 
 
 (125,61 ,.,.) = 
  -4.7365e-02
 
 (125,62 ,.,.) = 
   5.8639e-02
 
 (125,63 ,.,.) = 
   5.8808e-02
       ⋮  
 
 (126, 0 ,.,.) = 
  -7.7884e-03
 
 (126, 1 ,.,.) = 
  -2.0288e-02
 
 (126, 2 ,.,.) = 
   5.6392e-02
     ... 
 
 (126,61 ,.,.) = 
   7.8023e-01
 
 (126,62 ,.,.) = 
  -2.2917e-03
 
 (126,63 ,.,.) = 
  -2.5941e-02
       ⋮  
 
 (127, 0 ,.,.) = 
  -2.8316e-02
 
 (127, 1 ,.,.) = 
  -1.3194e-02
 
 (127, 2 ,.,.) = 
  -5.1356e-02
     ... 
 
 (127,61 ,.,.) = 
   2.3552e-02
 
 (127,62 ,.,.) = 
  -6.7667e-02
 
 (127,63 ,.,.) = 
   2.6754e-02
 [torch.FloatTensor of size 128x64x1x1], Parameter containing:
  0.3334
  0.0581
  0.0715
  0.3442
  0.1756
  0.1509
  0.1568
  0.3100
  0.1927
  0.1516
  0.3044
  0.2238
  0.3706
  0.1739
  0.3051
  0.2610
  0.1575
  0.2015
  0.2933
  0.1010
  0.5871
  0.0676
  0.2499
  0.0929
  0.2443
  0.0495
  0.2449
  0.2750
  0.3071
  0.3025
  0.1818
  0.0688
  0.2223
  0.3766
  0.4661
  0.3284
  0.1035
  0.3400
  0.2325
  0.1514
  0.1753
  0.2269
  0.2606
  0.1831
  0.2894
  0.2590
  0.2208
  0.1399
  0.0643
  0.2833
  0.3451
  0.2017
  0.0696
  0.2722
  0.1127
  0.2917
  0.2358
  0.2703
  0.0911
  0.2591
  0.1302
  0.2261
  0.1967
  0.0539
  0.0697
  0.0524
  0.1050
  0.0861
  0.1173
  0.0957
  0.1862
  0.1642
  0.1336
  0.1065
  0.1312
  0.0888
  0.0793
  0.0475
  0.3049
  0.2325
  0.2908
  0.1292
  0.0778
  0.2263
  0.2379
  0.3405
  0.0914
  0.1936
  0.1223
  0.1400
  0.2953
  0.2360
  0.1681
  0.1338
  0.2666
  0.1495
  0.0761
  0.1674
  0.1784
  0.1720
  0.2318
  0.3753
  0.2103
  0.1922
  0.4002
  0.1718
  0.0593
  0.0742
  0.0686
  0.1931
  0.1386
  0.1111
  0.3055
  0.1205
  0.3443
  0.1633
  0.3673
  0.1534
  0.0742
  0.2088
  0.0394
  0.2594
  0.1385
 -0.0051
  0.1905
  0.1275
  0.3071
  0.1682
 [torch.FloatTensor of size 128], Parameter containing:
  0.0246
  0.0593
  0.1347
 -0.1089
 -0.0470
 -0.1359
 -0.0550
  0.0509
 -0.0613
  0.0916
  0.0031
 -0.0274
 -0.0539
  0.0177
  0.0432
  0.0074
  0.0548
 -0.0321
 -0.0224
  0.0142
 -0.2150
 -0.1160
  0.0486
 -0.1141
  0.1066
  0.0355
  0.0140
  0.0177
  0.0781
  0.1331
  0.0139
  0.0447
  0.1063
  0.0528
 -0.0539
 -0.1160
  0.1055
 -0.1591
  0.0100
  0.1197
  0.0170
  0.0929
 -0.0675
  0.0987
  0.1034
  0.0501
  0.0297
  0.0281
 -0.0075
 -0.0577
 -0.0144
 -0.1640
  0.1255
  0.0817
  0.0635
  0.0936
  0.0213
  0.0486
 -0.1174
  0.0237
 -0.2177
  0.0099
 -0.1883
  0.0467
 -0.0829
  0.0585
 -0.0306
  0.0509
  0.0541
 -0.1671
  0.0115
 -0.0302
 -0.1393
  0.0115
  0.0428
  0.1189
 -0.1289
  0.0479
  0.0474
 -0.0625
  0.0009
 -0.0144
  0.0909
  0.1342
 -0.0338
  0.0560
  0.0848
 -0.0467
  0.0228
 -0.0097
  0.1360
 -0.2625
  0.0088
 -0.0553
  0.0383
 -0.0720
  0.0907
  0.1612
 -0.1076
  0.1011
 -0.0519
  0.0838
 -0.0704
 -0.0806
 -0.0243
  0.0533
  0.1277
  0.1403
 -0.0593
 -0.0639
 -0.0766
 -0.1163
  0.0661
 -0.1644
  0.0422
 -0.2786
 -0.1006
 -0.0696
 -0.0761
  0.0371
 -0.0247
  0.0916
 -0.0200
 -0.0176
  0.0298
 -0.0373
  0.0466
 -0.1371
 [torch.FloatTensor of size 128], Parameter containing:
 ( 0 , 0 ,.,.) = 
  -9.9023e-04 -7.7429e-03 -7.9740e-03
   2.4844e-02  1.8642e-03  5.8352e-03
   9.5089e-03 -1.6476e-02  3.9157e-03
 
 ( 0 , 1 ,.,.) = 
  -2.1488e-02 -1.2330e-03 -1.4281e-02
  -1.7044e-02  9.5922e-03  7.0445e-03
   1.0790e-02 -7.2350e-03 -1.1357e-02
 
 ( 0 , 2 ,.,.) = 
  -1.1126e-03  3.0388e-02  2.2247e-02
  -6.1184e-02 -2.3797e-02  2.3747e-03
   4.0678e-02 -1.0356e-01 -6.0011e-02
     ... 
 
 ( 0 ,125,.,.) = 
  -8.5833e-03  1.1438e-02  2.0800e-02
  -1.6565e-02 -3.9587e-02  1.2594e-02
  -1.4314e-03 -5.4257e-03  3.6794e-02
 
 ( 0 ,126,.,.) = 
  -1.3687e-02 -2.9514e-02 -1.4745e-02
   2.8299e-02  2.2096e-02  3.4839e-03
  -4.3521e-03 -2.6706e-03  1.2258e-04
 
 ( 0 ,127,.,.) = 
   7.6403e-03  2.0666e-02  3.7429e-02
   6.9478e-03  4.3983e-02  1.7538e-02
  -9.7797e-03 -2.4789e-02 -1.1349e-03
       ⋮  
 
 ( 1 , 0 ,.,.) = 
   8.4439e-02  8.4827e-02 -5.1478e-02
   3.5253e-02 -1.1375e-03 -1.0331e-01
  -6.4078e-02 -1.2660e-01 -1.2952e-01
 
 ( 1 , 1 ,.,.) = 
   1.0628e-03 -1.4083e-02  4.7109e-03
  -2.1059e-02 -2.8778e-02  9.9708e-03
   1.4074e-02  1.8691e-02  5.8192e-02
 
 ( 1 , 2 ,.,.) = 
   2.2139e-02  8.9027e-03  1.4790e-02
  -1.7497e-02 -5.3924e-03  2.7834e-02
  -1.3855e-02 -1.3346e-02  1.7668e-02
     ... 
 
 ( 1 ,125,.,.) = 
  -3.8032e-02 -2.3097e-02 -7.1775e-03
  -3.5089e-02  1.0861e-02  1.3640e-02
   6.3449e-04  9.7476e-03  7.3670e-03
 
 ( 1 ,126,.,.) = 
  -4.4184e-02 -1.6190e-02  1.2243e-02
  -4.0349e-02 -1.7894e-02  2.8911e-02
  -6.5176e-03 -1.0490e-02  9.1658e-03
 
 ( 1 ,127,.,.) = 
   4.3621e-03  1.3119e-02  1.8442e-03
   1.1555e-02 -1.3031e-02 -9.5657e-03
  -2.3314e-02  1.1609e-03  2.6771e-03
       ⋮  
 
 ( 2 , 0 ,.,.) = 
  -2.1180e-02 -6.2213e-03  1.7609e-03
  -4.7424e-03  1.1101e-02  1.1296e-02
  -1.4529e-02  2.9843e-02  2.4383e-03
 
 ( 2 , 1 ,.,.) = 
   6.9183e-03  9.2937e-03  3.0078e-02
  -4.2612e-03  4.9560e-03 -4.7338e-03
   3.1360e-02  1.9035e-03 -4.7242e-03
 
 ( 2 , 2 ,.,.) = 
  -3.6726e-02  5.7285e-03  1.3919e-01
  -4.2992e-02  9.4023e-04  7.7141e-02
  -5.0050e-02 -4.9479e-03  2.4693e-02
     ... 
 
 ( 2 ,125,.,.) = 
   3.7203e-02  7.4712e-03 -4.2659e-02
  -8.1729e-03 -9.2536e-02 -5.4934e-03
  -2.5927e-02  8.3993e-04  7.4632e-02
 
 ( 2 ,126,.,.) = 
   1.8076e-02  4.5272e-03 -1.3757e-02
  -1.8939e-02 -3.2739e-02 -2.9666e-02
  -2.0608e-02 -4.6167e-03  1.3080e-03
 
 ( 2 ,127,.,.) = 
  -1.2078e-02 -2.0285e-03 -1.6998e-02
  -3.4805e-02 -4.9195e-02 -3.1973e-02
  -2.1021e-02 -5.1164e-03 -4.8522e-03
 ...     
       ⋮  
 
 (125, 0 ,.,.) = 
   3.1791e-02  2.2948e-02  1.0390e-02
  -1.2628e-02 -2.9320e-03  4.2645e-03
  -2.1707e-02 -1.0856e-02  1.6094e-02
 
 (125, 1 ,.,.) = 
  -1.4525e-03 -1.0131e-02 -4.6862e-04
   2.2130e-02  2.2736e-02  5.0183e-03
  -6.0125e-02 -4.3150e-02 -4.4480e-02
 
 (125, 2 ,.,.) = 
   3.0761e-03  3.4396e-03  6.0877e-03
  -1.3683e-02  4.0576e-03 -2.6544e-02
   6.8231e-02  6.3474e-02 -9.3660e-03
     ... 
 
 (125,125,.,.) = 
   1.8752e-02  1.9400e-02  4.1691e-02
   8.7770e-03  8.2394e-04  1.8619e-02
   1.8796e-02  6.2238e-02 -2.3801e-02
 
 (125,126,.,.) = 
  -2.9788e-02 -3.4598e-02 -2.5225e-02
   8.4234e-03 -2.3222e-02 -9.4612e-03
   6.9035e-03  6.9737e-02 -1.3359e-02
 
 (125,127,.,.) = 
   2.6981e-03 -4.3182e-02 -1.6731e-02
   2.5812e-02 -7.2025e-02 -6.5399e-02
   4.6257e-02  2.9469e-02 -1.5811e-02
       ⋮  
 
 (126, 0 ,.,.) = 
  -2.1079e-02  3.8220e-02  8.3305e-03
  -5.9912e-03  3.5584e-02 -1.7534e-03
   1.8735e-02  7.0859e-03 -3.5151e-03
 
 (126, 1 ,.,.) = 
  -4.5937e-02 -7.4695e-02 -5.3608e-02
  -8.6266e-03  9.0894e-03 -3.0345e-02
  -2.8158e-02 -2.1204e-02 -8.4730e-03
 
 (126, 2 ,.,.) = 
  -7.1772e-02 -6.8582e-02  2.5544e-02
   5.0363e-02  2.5269e-02  5.6668e-02
   2.6238e-03  1.3871e-03 -8.4692e-03
     ... 
 
 (126,125,.,.) = 
  -2.9644e-02  1.0896e-02 -3.0402e-02
   1.5095e-03  5.0455e-02  1.5597e-02
  -2.1015e-02 -1.0757e-02 -3.4942e-02
 
 (126,126,.,.) = 
  -2.7573e-02  2.9707e-02 -2.9490e-02
   2.3301e-03 -3.9011e-02  6.8010e-03
   4.4006e-02  3.5397e-02  7.9087e-02
 
 (126,127,.,.) = 
  -2.7480e-02  5.0337e-02  1.4290e-02
  -5.2482e-02 -4.7748e-03  1.2988e-02
  -1.8935e-02 -3.0808e-02 -1.7583e-02
       ⋮  
 
 (127, 0 ,.,.) = 
   3.2280e-02  4.7408e-02  3.4054e-02
   2.1445e-02  3.8987e-03  4.6985e-04
   1.5159e-02  8.2067e-03  3.2426e-02
 
 (127, 1 ,.,.) = 
   9.2653e-03  2.3661e-02  4.2089e-02
   2.1976e-02  4.6128e-02  1.1402e-02
   7.2843e-03  5.2285e-02  8.6340e-03
 
 (127, 2 ,.,.) = 
   1.4022e-02  1.2800e-02  3.5398e-02
  -4.4398e-02  1.7399e-02 -1.5838e-02
   3.1712e-02  5.8679e-02 -9.3244e-03
     ... 
 
 (127,125,.,.) = 
  -4.8399e-03  7.8628e-03 -5.6169e-04
   8.0402e-03  1.7392e-02  7.8734e-03
  -1.7713e-02 -4.5957e-02 -9.8762e-03
 
 (127,126,.,.) = 
  -9.7569e-03 -7.5795e-03 -2.4627e-02
  -8.2454e-03  6.3065e-02 -3.2954e-03
  -7.7549e-03 -1.3404e-04 -8.1337e-03
 
 (127,127,.,.) = 
   1.7664e-02  1.0114e-02  4.2687e-03
  -3.7950e-03  2.6715e-02  2.0121e-02
   1.6868e-02 -6.6515e-03 -1.1107e-02
 [torch.FloatTensor of size 128x128x3x3], Parameter containing:
  0.3323
  0.2908
  0.3246
  0.3435
  0.3011
  0.3054
  0.3041
  0.3539
  0.2862
  0.3601
  0.2970
  0.3381
  0.2565
  0.3276
  0.3030
  0.4085
  0.3519
  0.4218
  0.3055
  0.2551
  0.3425
  0.3215
  0.3366
  0.2700
  0.2849
  0.3954
  0.3166
  0.3286
  0.3515
  0.3953
  0.2768
  0.3625
  0.1988
  0.2717
  0.3355
  0.2797
  0.2510
  0.3832
  0.3266
  0.3263
  0.3681
  0.3401
  0.3651
  0.3391
  0.3071
  0.3231
  0.3691
  0.2410
  0.3536
  0.3189
  0.3238
  0.3611
  0.3086
  0.3309
  0.3886
  0.4362
  0.4550
  0.2962
  0.3071
  0.3386
  0.3317
  0.3228
  0.2393
  0.3147
  0.2738
  0.3218
  0.3198
  0.3411
  0.3611
  0.2833
  0.3035
  0.3183
  0.3146
  0.3890
  0.2607
  0.3479
  0.3236
  0.3709
  0.2592
  0.3742
  0.2555
  0.2966
  0.3505
  0.3165
  0.2808
  0.2660
  0.2817
  0.4795
  0.3372
  0.2723
  0.2955
  0.3225
  0.2470
  0.3160
  0.3515
  0.3131
  0.3372
  0.2837
  0.3540
  0.2897
  0.2490
  0.3019
  0.3114
  0.3510
  0.3022
  0.3617
  0.2859
  0.2831
  0.3243
  0.2769
  0.3314
  0.2394
  0.2932
  0.2788
  0.2686
  0.3194
  0.3542
  0.2683
  0.2955
  0.2924
  0.3538
  0.4256
  0.3603
  0.3013
  0.2763
  0.4354
  0.3991
  0.2694
 [torch.FloatTensor of size 128], Parameter containing:
 -0.1735
 -0.2337
 -0.3383
 -0.0806
 -0.1920
 -0.0621
 -0.1885
 -0.2830
 -0.1680
 -0.1796
 -0.2645
 -0.1983
 -0.1183
 -0.2432
 -0.1706
 -0.3090
 -0.2661
 -0.4040
 -0.1949
 -0.1392
 -0.2449
 -0.1242
 -0.2012
 -0.1901
 -0.1014
 -0.3468
 -0.2245
 -0.3272
 -0.3057
 -0.3289
 -0.1532
 -0.1967
 -0.0667
 -0.3281
 -0.1418
 -0.1527
 -0.0987
 -0.3243
 -0.2252
 -0.3462
 -0.2284
 -0.2263
 -0.1810
 -0.1564
 -0.1730
 -0.1507
 -0.2913
 -0.1643
 -0.1998
 -0.1532
 -0.2211
 -0.2247
 -0.0913
 -0.1563
 -0.2453
 -0.4854
 -0.4428
 -0.1021
 -0.1615
 -0.2125
 -0.2239
 -0.1952
 -0.0447
 -0.1733
 -0.1178
 -0.4775
 -0.2110
 -0.2305
 -0.1795
 -0.1582
 -0.2008
 -0.2041
 -0.1974
 -0.2750
 -0.0395
 -0.2161
 -0.2786
 -0.2626
 -0.0997
 -0.2953
 -0.1431
 -0.1448
 -0.1894
 -0.1283
 -0.1807
 -0.1144
 -0.1308
 -0.4154
 -0.2324
 -0.1376
 -0.1154
 -0.2099
 -0.0966
 -0.1669
 -0.3835
 -0.2545
 -0.1603
 -0.1904
 -0.2420
 -0.1658
 -0.1133
 -0.1498
 -0.1213
 -0.2318
 -0.2017
 -0.3827
 -0.1491
 -0.1174
 -0.1261
 -0.2031
 -0.1832
 -0.2274
 -0.1281
 -0.2557
 -0.1400
 -0.0723
 -0.2212
 -0.1486
 -0.2914
 -0.1116
 -0.2194
 -0.4898
 -0.3693
 -0.1437
 -0.1232
 -0.3723
 -0.6794
 -0.1536
 [torch.FloatTensor of size 128], Parameter containing:
 ( 0 , 0 ,.,.) = 
  -1.6153e-02  5.0134e-03 -9.0186e-04
  -8.8386e-03 -1.9390e-02 -2.4174e-02
   6.3052e-03  1.0245e-02 -1.3816e-02
 
 ( 0 , 1 ,.,.) = 
  -1.0979e-02  2.6164e-03  2.3656e-02
  -1.7687e-02  1.9861e-02  6.4150e-02
   6.0224e-03  7.6342e-02  1.0215e-01
 
 ( 0 , 2 ,.,.) = 
  -8.1113e-03  6.8414e-03  2.5436e-02
  -8.0696e-03  9.2929e-03  8.2899e-03
   7.7306e-03  1.2159e-02  7.1625e-03
     ... 
 
 ( 0 ,125,.,.) = 
   1.5175e-02  6.2196e-03  2.1798e-02
  -1.5199e-02 -8.5439e-02 -2.4713e-02
  -1.8460e-02 -4.9767e-02 -1.6818e-03
 
 ( 0 ,126,.,.) = 
   3.0728e-02  3.9962e-02  3.1253e-02
  -1.8738e-02 -6.7510e-02 -2.7649e-02
   2.8429e-02  3.1854e-02  1.0543e-02
 
 ( 0 ,127,.,.) = 
  -1.8320e-02 -1.5854e-02 -1.0685e-02
  -2.7442e-02 -3.0616e-02 -1.0485e-02
  -1.5122e-02 -1.0595e-02 -2.5322e-02
       ⋮  
 
 ( 1 , 0 ,.,.) = 
   3.6868e-03  3.0996e-02  4.2763e-02
   4.6537e-02  4.8606e-02  2.3800e-03
   1.6654e-02  1.2900e-02 -1.8230e-02
 
 ( 1 , 1 ,.,.) = 
  -1.0441e-02 -1.5934e-03 -1.6128e-02
  -1.2799e-02  4.9570e-03 -1.4585e-02
  -2.3553e-02 -3.7023e-03 -1.4399e-02
 
 ( 1 , 2 ,.,.) = 
   1.0338e-02 -1.7560e-02 -3.3046e-02
  -3.2090e-02 -5.9258e-03  2.0201e-03
  -4.1428e-02  4.9121e-03  1.6906e-02
     ... 
 
 ( 1 ,125,.,.) = 
  -4.9525e-02 -4.6498e-02 -5.9916e-02
  -2.6670e-02 -1.9079e-02 -2.9419e-02
  -3.9683e-03  1.9405e-02  7.3317e-03
 
 ( 1 ,126,.,.) = 
   1.4293e-02  1.5643e-02  5.8117e-04
   5.1493e-03  7.4332e-03 -3.6928e-03
  -1.3522e-02 -8.5536e-03 -2.1259e-03
 
 ( 1 ,127,.,.) = 
  -3.0908e-02 -1.9839e-02 -1.9375e-02
  -1.0368e-02 -2.4294e-02  2.4103e-04
  -1.9275e-02 -2.9707e-02 -1.5623e-02
       ⋮  
 
 ( 2 , 0 ,.,.) = 
  -4.9212e-02 -2.9588e-02  8.8023e-02
   4.7453e-03  4.3564e-02  9.3115e-02
   7.4083e-02  4.2868e-02 -5.1033e-02
 
 ( 2 , 1 ,.,.) = 
   6.6992e-03  2.1676e-02 -5.4254e-04
   1.9286e-02  1.0920e-02 -4.5440e-03
   3.1075e-02 -1.7168e-03 -2.7603e-02
 
 ( 2 , 2 ,.,.) = 
   6.0096e-02 -2.9359e-02 -5.8911e-02
  -1.9133e-02 -8.1624e-02 -2.2553e-02
   1.1597e-02  2.5092e-02  1.2130e-02
     ... 
 
 ( 2 ,125,.,.) = 
   5.4307e-03 -2.3130e-02  9.6233e-03
  -4.3785e-02 -2.6735e-02  2.1993e-02
  -3.5919e-02 -4.1009e-02 -2.1860e-02
 
 ( 2 ,126,.,.) = 
   3.3705e-02  6.2938e-02  4.3502e-02
   1.1111e-03  1.9243e-02 -1.9707e-03
  -1.1493e-02 -5.3445e-02 -9.6676e-03
 
 ( 2 ,127,.,.) = 
  -2.6664e-03 -2.6954e-02 -1.7667e-02
  -8.3382e-03  8.9920e-03  8.1260e-04
  -2.6832e-02 -3.5991e-02 -4.2495e-02
 ...     
       ⋮  
 
 (125, 0 ,.,.) = 
  -1.8876e-03 -2.2728e-02 -4.2991e-03
  -9.2231e-03 -3.4333e-02 -1.3392e-02
  -1.2774e-02 -1.1435e-02  1.5617e-02
 
 (125, 1 ,.,.) = 
   1.0703e-02  1.2792e-02  2.2662e-02
   7.3185e-03 -1.7847e-02  1.0674e-02
  -1.5936e-02 -1.9318e-02  2.1768e-02
 
 (125, 2 ,.,.) = 
  -7.3009e-03  3.0234e-02 -1.1899e-02
  -2.6099e-02  3.7452e-03  3.2776e-02
  -3.3101e-02 -7.1923e-03  1.6559e-02
     ... 
 
 (125,125,.,.) = 
  -3.2818e-02 -1.0021e-01 -4.7012e-02
   2.8293e-03  4.1410e-02 -1.1391e-02
  -1.1152e-02 -5.5861e-03  1.9968e-02
 
 (125,126,.,.) = 
  -2.3932e-02 -3.0687e-02 -1.1756e-03
   1.5311e-03 -3.5002e-02 -2.4414e-02
  -8.7575e-03 -7.7842e-02 -3.8842e-02
 
 (125,127,.,.) = 
   2.6107e-02  1.5406e-02  1.7569e-02
  -1.5130e-02 -4.8687e-03  3.0773e-03
  -1.3470e-02 -9.3201e-03 -4.8982e-03
       ⋮  
 
 (126, 0 ,.,.) = 
  -2.0228e-02 -3.0006e-02 -9.8419e-03
  -3.8676e-02 -3.3481e-02 -7.4265e-03
  -2.8935e-02 -3.2037e-02  2.9245e-03
 
 (126, 1 ,.,.) = 
  -1.2900e-02  3.8046e-03  1.5940e-02
  -2.4030e-02  2.0666e-03  5.7250e-03
   6.9989e-03  1.2192e-02  1.5406e-02
 
 (126, 2 ,.,.) = 
  -1.5018e-02 -9.0988e-03  2.4450e-02
   1.0039e-02  1.2561e-02  2.6997e-02
   2.9556e-02  1.9463e-02 -2.6584e-03
     ... 
 
 (126,125,.,.) = 
  -1.8481e-02  3.9417e-04  9.9768e-03
  -4.5447e-03  1.2307e-02  3.5507e-02
  -1.1873e-03 -2.6185e-03  1.1547e-02
 
 (126,126,.,.) = 
   4.6292e-03 -1.3690e-02 -1.0171e-02
   1.2104e-02  1.6793e-02  1.3003e-02
   1.3328e-03  3.4701e-03  1.7323e-02
 
 (126,127,.,.) = 
  -8.7332e-05  5.8646e-03 -3.5117e-03
   3.8112e-03 -7.1828e-03 -1.1407e-02
   1.9705e-02  2.0556e-02  5.7084e-03
       ⋮  
 
 (127, 0 ,.,.) = 
   3.6998e-02  3.2616e-02 -9.4535e-04
  -2.9484e-02 -2.3441e-02 -2.8085e-02
  -2.5451e-02  3.9048e-02  3.6686e-02
 
 (127, 1 ,.,.) = 
  -1.8732e-02 -1.5352e-02  1.1149e-02
  -2.1324e-03 -2.3177e-02  1.7628e-02
  -4.0012e-03  1.5463e-02  9.2496e-03
 
 (127, 2 ,.,.) = 
  -2.9346e-02  7.7071e-03 -5.6520e-03
  -2.3611e-02 -1.9390e-03  2.0221e-02
   8.0955e-03 -2.3268e-02 -2.8827e-02
     ... 
 
 (127,125,.,.) = 
  -3.3532e-02 -2.9092e-02 -4.0045e-02
   2.6530e-03 -2.0568e-02  1.3075e-02
   1.6061e-02 -5.5725e-02 -4.9167e-02
 
 (127,126,.,.) = 
  -7.9132e-03  2.1466e-02  2.0913e-02
  -1.7259e-02 -2.5851e-02  2.7177e-03
  -4.6532e-02 -2.4846e-02 -1.9911e-02
 
 (127,127,.,.) = 
  -5.0350e-02 -2.5574e-02  1.7763e-02
  -3.4474e-02  5.5247e-03 -2.7754e-02
  -2.0743e-02 -2.2332e-02 -4.3512e-02
 [torch.FloatTensor of size 128x128x3x3], Parameter containing:
  0.1194
  0.1625
  0.3084
  0.2931
  0.2957
  0.5263
  0.4038
  0.2024
  0.3401
  0.1982
  0.2559
  0.2311
  0.1630
  0.2891
  0.2248
  0.2311
  0.2417
  0.2187
  0.1922
  0.3103
  0.2015
  0.4802
  0.2481
  0.3898
  0.3204
  0.4035
  0.2617
  0.1551
  0.2256
  0.2117
  0.2708
  0.3537
  0.2505
  0.1843
  0.2465
  0.6501
  0.3898
  0.4289
  0.1799
  0.1604
  0.1775
  0.3600
  0.2694
  0.1283
  0.1662
  0.1716
  0.1837
  0.1710
  0.4178
  0.3249
  0.1759
  0.4717
  0.4115
  0.1995
  0.2025
  0.1492
  0.2860
  0.1072
  0.3649
  0.1906
  0.5369
  0.2400
  0.4411
  0.1702
  0.1993
  0.2045
  0.1972
  0.4041
  0.3034
  0.6168
  0.2284
  0.3228
  0.4547
  0.4370
  0.1570
  0.4057
  0.5791
  0.2338
  0.1586
  0.3130
  0.2201
  0.3195
  0.1166
  0.2517
  0.2184
  0.0989
  0.3116
  0.2613
  0.3277
  0.1778
  0.2718
  0.4174
  0.5140
  0.2136
  0.1905
  0.2898
  0.2472
  0.1341
  0.6212
  0.1810
  0.2394
  0.1417
  0.1759
  0.2827
  0.1987
  0.3775
  0.3749
  0.1274
  0.3656
  0.4305
  0.4212
  0.2673
  0.2016
  0.5098
  0.1449
  0.4408
  0.3583
  0.2503
  0.5682
  0.2518
  0.1392
  0.0617
  0.3406
  0.1313
  0.4586
  0.2914
  0.1326
  0.3915
 [torch.FloatTensor of size 128], Parameter containing:
 -0.1403
 -0.0889
 -0.4147
 -0.2264
 -0.0737
 -0.3534
 -0.3379
 -0.0752
 -0.1791
  0.0448
 -0.2842
 -0.1765
 -0.1591
 -0.0675
 -0.1543
 -0.1061
 -0.2334
 -0.0981
 -0.0908
 -0.0567
 -0.1908
 -0.2055
 -0.2704
 -0.1883
 -0.3570
 -0.1125
 -0.1632
 -0.0211
 -0.1687
 -0.2124
 -0.1713
 -0.0872
 -0.2194
 -0.1888
 -0.2954
 -0.4570
 -0.0226
 -0.0527
  0.0406
 -0.0609
 -0.0456
 -0.1176
 -0.0145
  0.0318
 -0.2046
 -0.0953
 -0.0496
 -0.1051
 -0.0793
 -0.1933
 -0.1467
 -0.3215
 -0.3257
 -0.2287
 -0.0356
 -0.1869
 -0.1932
 -0.0771
  0.2768
 -0.0656
 -0.0895
 -0.2548
 -0.2365
  0.0021
 -0.0987
 -0.3178
  0.1613
  0.0006
 -0.2347
 -0.4150
 -0.1310
 -0.3142
 -0.2582
 -0.5400
  0.0772
 -0.2546
 -0.4454
 -0.0262
 -0.0937
 -0.2201
 -0.2044
 -0.0155
 -0.0893
 -0.2167
  0.1112
 -0.0619
 -0.1217
 -0.1593
 -0.1317
 -0.1717
 -0.3729
 -0.3354
 -0.3414
  0.0358
 -0.2067
 -0.1087
  0.0141
 -0.0338
 -0.2129
 -0.1122
 -0.1627
 -0.2000
  0.0908
 -0.0041
 -0.1313
 -0.2942
  0.0160
 -0.1065
 -0.1289
 -0.1699
 -0.1721
 -0.1809
 -0.2295
 -0.3611
 -0.1746
 -0.3540
 -0.1554
 -0.2709
 -0.2607
  0.0084
 -0.0311
 -0.0022
 -0.0831
  0.0380
 -0.4893
 -0.2749
  0.1245
 -0.1272
 [torch.FloatTensor of size 128], Parameter containing:
 ( 0 , 0 ,.,.) = 
  -1.5906e-02 -1.6618e-02 -1.5938e-02
  -5.2744e-03  1.5103e-02  9.8805e-03
  -1.4850e-02  3.6254e-04 -1.1378e-02
 
 ( 0 , 1 ,.,.) = 
  -9.4971e-03 -1.8568e-02 -6.0605e-03
   9.7622e-03 -1.2294e-02 -5.2978e-03
   7.0518e-03 -1.6063e-02 -7.1445e-03
 
 ( 0 , 2 ,.,.) = 
  -2.2693e-02 -3.7669e-02 -3.3695e-02
  -3.1569e-02 -5.8022e-02 -3.9105e-02
  -3.4616e-02 -3.8806e-02 -1.5695e-02
     ... 
 
 ( 0 ,125,.,.) = 
   4.8713e-03  7.9539e-03  1.4374e-02
  -1.5242e-03  2.4200e-02  5.6440e-03
  -4.4355e-03  6.2454e-03  6.8561e-03
 
 ( 0 ,126,.,.) = 
   1.6028e-02 -1.2036e-02 -1.3101e-03
   9.5804e-03  5.7272e-03  1.6091e-03
  -9.9173e-03 -1.3593e-02 -6.3679e-03
 
 ( 0 ,127,.,.) = 
   5.3450e-02  4.6441e-02  2.4824e-02
   3.4065e-02 -2.8656e-03 -4.1207e-03
  -1.4000e-02 -4.6092e-03 -1.4152e-02
       ⋮  
 
 ( 1 , 0 ,.,.) = 
  -1.1567e-03 -1.8638e-02 -3.4453e-02
   4.9889e-03 -1.1695e-02 -3.3321e-02
   5.9653e-03 -1.6154e-02 -1.7452e-02
 
 ( 1 , 1 ,.,.) = 
   1.0729e-02  1.3964e-02 -1.9171e-02
   2.8854e-03  1.2573e-02  7.2767e-03
  -1.6815e-02 -1.8740e-02 -1.3784e-03
 
 ( 1 , 2 ,.,.) = 
  -2.1852e-02  6.2900e-03  1.5931e-02
  -3.5272e-03  5.6997e-03  3.1077e-02
   2.3169e-03  3.2389e-03  1.7490e-02
     ... 
 
 ( 1 ,125,.,.) = 
  -1.6246e-02 -7.7688e-03  7.7471e-03
  -1.4870e-03 -1.2226e-02 -9.3389e-03
   8.6164e-04 -2.2071e-03  7.3769e-03
 
 ( 1 ,126,.,.) = 
   2.9310e-03 -2.3592e-02  5.8461e-03
   1.4344e-02 -1.6924e-02 -6.1749e-03
  -7.7191e-03 -3.2305e-02 -3.3688e-02
 
 ( 1 ,127,.,.) = 
   8.6900e-03  1.3976e-02  8.0760e-03
  -3.3662e-03  1.0516e-02  1.4952e-02
   1.8944e-02  3.0948e-02  2.5647e-02
       ⋮  
 
 ( 2 , 0 ,.,.) = 
  -3.5797e-02 -2.2565e-02 -1.4440e-02
  -7.5372e-03 -2.2142e-02  1.1150e-02
  -3.6385e-03 -1.4821e-02 -1.6427e-02
 
 ( 2 , 1 ,.,.) = 
  -1.4620e-02 -3.0657e-02 -2.0434e-02
  -2.8462e-02 -4.5328e-02 -5.7915e-02
   2.8774e-02 -1.5172e-02 -2.4541e-02
 
 ( 2 , 2 ,.,.) = 
   1.7403e-02  1.9920e-02 -4.6249e-03
   1.7813e-02  2.3648e-02  1.3638e-02
   2.9347e-02  4.3449e-02  1.8594e-02
     ... 
 
 ( 2 ,125,.,.) = 
   7.9258e-03 -1.2183e-02 -1.5811e-02
  -1.0720e-02 -3.1404e-02 -7.5279e-03
  -7.0299e-03 -1.7342e-02 -3.0783e-02
 
 ( 2 ,126,.,.) = 
  -1.0258e-02 -1.1796e-02 -1.7141e-02
  -2.6423e-02 -1.5036e-03  2.7959e-02
  -8.9306e-03  5.3510e-03  9.6632e-03
 
 ( 2 ,127,.,.) = 
   1.4481e-02 -3.1531e-02 -1.9707e-02
  -1.4944e-02 -1.7709e-02  7.6966e-03
   1.2465e-02  7.1035e-03 -6.1596e-03
 ...     
       ⋮  
 
 (253, 0 ,.,.) = 
   5.3120e-03  2.5512e-02  7.1053e-03
   1.9666e-02  2.6990e-02  4.2043e-02
   4.1191e-02  2.2283e-02  3.5003e-02
 
 (253, 1 ,.,.) = 
   2.5968e-03  4.0685e-03  1.0626e-02
   4.6474e-03  2.0337e-02  8.0847e-03
   1.4475e-02 -3.0070e-03 -1.9656e-02
 
 (253, 2 ,.,.) = 
  -4.0235e-03  2.5510e-02  2.2875e-03
  -1.5182e-02  2.6031e-02  8.2526e-03
  -2.1065e-03  2.6928e-02  3.2296e-03
     ... 
 
 (253,125,.,.) = 
   5.5063e-03 -4.8631e-03  1.8346e-02
   8.5499e-03  2.3002e-03  7.7201e-03
   8.2280e-03  9.5818e-03  2.1510e-02
 
 (253,126,.,.) = 
  -1.7702e-02  9.9203e-03 -1.2934e-02
  -1.2670e-02  9.5506e-03 -1.2438e-02
   8.9810e-03  4.9343e-02  3.6238e-02
 
 (253,127,.,.) = 
   1.2333e-02  1.8408e-02 -1.7794e-02
   5.7676e-03 -5.7844e-03 -1.1706e-02
   3.4462e-03 -1.0299e-02 -4.2529e-02
       ⋮  
 
 (254, 0 ,.,.) = 
   3.1634e-02  7.6514e-02  4.4300e-02
   9.3963e-02  1.4798e-01  1.5104e-01
   6.6483e-02  1.3856e-01  1.1323e-01
 
 (254, 1 ,.,.) = 
  -2.8205e-02 -4.0731e-03 -1.9967e-02
  -1.9283e-02 -1.2330e-03  1.0728e-02
  -1.6487e-02 -2.7540e-03  7.7751e-04
 
 (254, 2 ,.,.) = 
  -1.2156e-02 -3.2183e-02 -1.5299e-02
  -9.1752e-04 -1.2350e-02 -3.8531e-03
  -1.9342e-02 -1.0735e-02 -2.1051e-02
     ... 
 
 (254,125,.,.) = 
  -3.0457e-03  6.5687e-03 -3.2163e-04
   1.4628e-02 -1.6662e-02  1.4216e-02
   2.2738e-02  1.2016e-02  7.1802e-03
 
 (254,126,.,.) = 
   3.9151e-03 -1.9739e-02  1.1058e-02
  -2.5105e-02 -3.8439e-02 -4.4722e-02
  -3.5862e-02 -9.8120e-02 -6.8447e-02
 
 (254,127,.,.) = 
  -8.4853e-03  2.2905e-03  3.0757e-03
   3.8484e-03  1.8156e-02  6.9025e-03
   8.9456e-03  8.0009e-03  1.2579e-02
       ⋮  
 
 (255, 0 ,.,.) = 
  -1.3006e-02 -9.0262e-03  1.0574e-03
  -2.5979e-02 -1.9484e-02 -9.3637e-03
   4.8438e-03  2.3742e-03  1.0574e-02
 
 (255, 1 ,.,.) = 
  -2.4782e-03 -1.4049e-02 -2.8621e-02
  -2.3822e-03  1.1463e-03 -2.3321e-02
   1.2275e-02  8.3306e-04  1.4305e-03
 
 (255, 2 ,.,.) = 
  -4.8958e-02 -4.3860e-02 -5.7901e-02
  -3.5920e-02 -3.6503e-02 -3.8574e-02
  -4.1023e-02 -3.3337e-02 -1.3673e-02
     ... 
 
 (255,125,.,.) = 
  -1.1772e-02 -8.1042e-03 -1.5803e-02
  -2.7190e-02 -2.8550e-02  7.5042e-03
  -2.4363e-02  1.3943e-02  6.0615e-03
 
 (255,126,.,.) = 
  -2.7317e-02  1.9704e-02  2.2183e-02
  -3.7557e-02  2.0815e-02  1.8682e-02
  -4.4557e-02 -4.3529e-03 -1.6779e-02
 
 (255,127,.,.) = 
   1.9939e-02  2.6802e-02  1.1996e-02
   2.0260e-02  2.1540e-02  2.5003e-03
   1.8079e-04 -7.6315e-03 -1.9582e-02
 [torch.FloatTensor of size 256x128x3x3], Parameter containing:
  0.2856
  0.2425
  0.3032
  0.3168
  0.3011
  0.3475
  0.3076
  0.3105
  0.3646
  0.3255
  0.2195
  0.3167
  0.2674
  0.3104
  0.3026
  0.3443
  0.2915
  0.3379
  0.2887
  0.2996
  0.3588
  0.3164
  0.2882
  0.2917
  0.3492
  0.3749
  0.3587
  0.3166
  0.2756
  0.2978
  0.3364
  0.2893
  0.3106
  0.2506
  0.3460
  0.3621
  0.2570
  0.3695
  0.2935
  0.3286
  0.3243
  0.3188
  0.3093
  0.3314
  0.3550
  0.2978
  0.2737
  0.3023
  0.3179
  0.2831
  0.3065
  0.3390
  0.3053
  0.3099
  0.3017
  0.3472
  0.3034
  0.2935
  0.3352
  0.3676
  0.3163
  0.3404
  0.3078
  0.2819
  0.3794
  0.3083
  0.2778
  0.3363
  0.2284
  0.3259
  0.2790
  0.3072
  0.2975
  0.3847
  0.3372
  0.2253
  0.2827
  0.3737
  0.2796
  0.3485
  0.3879
  0.3288
  0.3340
  0.3335
  0.2756
  0.3500
  0.2897
  0.2798
  0.2907
  0.3220
  0.3824
  0.3522
  0.3278
  0.3689
  0.3147
  0.3600
  0.3123
  0.2519
  0.2355
  0.3211
  0.3203
  0.3345
  0.2768
  0.3341
  0.3153
  0.3175
  0.2224
  0.2956
  0.3206
  0.2658
  0.3662
  0.2715
  0.3655
  0.3427
  0.2820
  0.2754
  0.4669
  0.3090
  0.3468
  0.3144
  0.3220
  0.2765
  0.3301
  0.3219
  0.3152
  0.2813
  0.2497
  0.3514
  0.3264
  0.3014
  0.2734
  0.3522
  0.3831
  0.3028
  0.2940
  0.2825
  0.3099
  0.2373
  0.2705
  0.4189
  0.2985
  0.3841
  0.2754
  0.3091
  0.3169
  0.2824
  0.2749
  0.3493
  0.4018
  0.3108
  0.2176
  0.2821
  0.3199
  0.3358
  0.2468
  0.3332
  0.2876
  0.2964
  0.2385
  0.3451
  0.3081
  0.2760
  0.2533
  0.2576
  0.3092
  0.2950
  0.3089
  0.3113
  0.3475
  0.3172
  0.2474
  0.3371
  0.3450
  0.3189
  0.3150
  0.3008
  0.2694
  0.3730
  0.3235
  0.2988
  0.2812
  0.3245
  0.3630
  0.2843
  0.3533
  0.3451
  0.3244
  0.3524
  0.3118
  0.3429
  0.3215
  0.2748
  0.3287
  0.3656
  0.2901
  0.2523
  0.3284
  0.2523
  0.3426
  0.2851
  0.2918
  0.2497
  0.5159
  0.3026
  0.2743
  0.2379
  0.3524
  0.3394
  0.2264
  0.2652
  0.3759
  0.3777
  0.2459
  0.3046
  0.3067
  0.3775
  0.2976
  0.3552
  0.2696
  0.2649
  0.2872
  0.2985
  0.2867
  0.3676
  0.3494
  0.3823
  0.3246
  0.3567
  0.2662
  0.3357
  0.2935
  0.2987
  0.2664
  0.3019
  0.3175
  0.2436
  0.3274
  0.2764
  0.2466
  0.2876
  0.3060
  0.3157
  0.3329
  0.2984
  0.2961
  0.3309
  0.3729
  0.3238
  0.3491
  0.3342
  0.3037
  0.3578
  0.2849
  0.2827
  0.2809
  0.3249
 [torch.FloatTensor of size 256], Parameter containing:
 -0.0915
  0.0189
 -0.1235
 -0.0613
 -0.1003
 -0.1306
 -0.1473
 -0.1079
 -0.2438
 -0.1113
  0.1361
 -0.1477
  0.0387
 -0.0907
  0.0352
 -0.1851
 -0.1319
 -0.1746
 -0.0815
 -0.1004
 -0.3394
 -0.1712
 -0.0807
 -0.1228
 -0.2263
 -0.1503
 -0.2314
 -0.2327
 -0.0854
 -0.0802
 -0.0716
 -0.0839
 -0.0592
  0.0358
 -0.0322
 -0.2197
  0.0027
 -0.1471
 -0.0264
 -0.1886
 -0.2417
 -0.1494
 -0.1904
 -0.1089
 -0.2657
 -0.1362
 -0.0487
 -0.1340
 -0.0930
 -0.0064
 -0.1721
 -0.1476
 -0.1714
  0.0336
 -0.1011
 -0.1761
 -0.1184
 -0.0482
 -0.3260
 -0.1555
 -0.0169
 -0.2373
 -0.1015
 -0.1051
 -0.2738
 -0.1917
 -0.0503
 -0.1098
  0.1484
 -0.2282
 -0.0700
 -0.1427
 -0.1417
 -0.3096
 -0.2043
  0.0269
 -0.0779
 -0.0842
 -0.0464
 -0.1429
 -0.3917
  0.0257
 -0.1779
 -0.0993
 -0.0507
 -0.2222
 -0.0951
 -0.0861
 -0.0743
 -0.1666
 -0.2054
 -0.1782
 -0.1150
 -0.2525
 -0.0694
 -0.0536
 -0.0499
 -0.0311
  0.1212
 -0.0988
 -0.1570
 -0.3093
 -0.0797
 -0.0994
 -0.1774
 -0.0505
  0.0766
 -0.0480
 -0.1278
 -0.0651
 -0.1737
  0.0303
 -0.1334
 -0.2435
 -0.0746
 -0.0365
 -0.1843
 -0.0887
 -0.1924
 -0.1110
 -0.1458
 -0.0895
 -0.0956
 -0.2042
 -0.1338
 -0.0637
 -0.0699
 -0.1656
 -0.1521
 -0.1317
 -0.0826
 -0.2470
 -0.1174
 -0.1475
 -0.0840
 -0.0681
 -0.1789
  0.0288
 -0.0362
 -0.3005
 -0.1441
 -0.0812
 -0.0492
 -0.0657
 -0.1249
 -0.1104
  0.0187
 -0.1351
 -0.1944
 -0.0909
  0.2067
 -0.1081
 -0.2499
 -0.0999
  0.0507
 -0.1899
 -0.0369
 -0.1432
  0.1279
 -0.1782
 -0.1172
 -0.0099
  0.0785
 -0.0681
 -0.0365
 -0.1596
 -0.1606
 -0.0922
 -0.1773
 -0.1788
  0.0306
 -0.1101
 -0.1355
 -0.2244
 -0.0860
 -0.1232
 -0.0927
 -0.1666
 -0.1393
 -0.0898
 -0.0614
 -0.1740
 -0.2503
 -0.0593
 -0.1272
 -0.1422
 -0.0743
 -0.2208
 -0.2207
 -0.2742
 -0.1302
 -0.0916
 -0.1696
 -0.2481
 -0.1524
  0.0410
 -0.1077
  0.0408
 -0.1915
 -0.0697
 -0.1049
 -0.0110
 -0.3257
 -0.1336
 -0.1021
  0.0128
 -0.2717
 -0.1245
  0.0288
 -0.1025
 -0.2405
 -0.1476
  0.1008
 -0.0220
 -0.0983
 -0.4417
 -0.0774
 -0.3207
 -0.0272
 -0.0726
 -0.0608
 -0.0430
 -0.0872
 -0.1280
 -0.1608
 -0.1529
 -0.1745
 -0.1702
 -0.0486
 -0.1459
 -0.0552
 -0.0808
 -0.0264
 -0.0952
 -0.1126
 -0.0452
 -0.0837
 -0.0331
  0.0127
 -0.0865
 -0.1446
 -0.0732
 -0.2160
 -0.0952
 -0.1297
 -0.2008
 -0.2135
 -0.2204
 -0.2381
 -0.1787
 -0.1386
 -0.1901
 -0.0981
 -0.0850
 -0.0761
 -0.0586
 [torch.FloatTensor of size 256], Parameter containing:
 ( 0 , 0 ,.,.) = 
  -9.2775e-03 -3.3897e-02 -1.1927e-02
  -2.4595e-02 -7.9761e-02 -4.8709e-02
  -4.3490e-02 -8.0118e-02 -6.5252e-02
 
 ( 0 , 1 ,.,.) = 
  -2.8918e-02  2.3763e-04 -2.8561e-02
   9.8557e-03  1.0253e-02 -1.7677e-02
  -1.0684e-02  2.8071e-03 -1.2483e-02
 
 ( 0 , 2 ,.,.) = 
  -1.4730e-02  2.2622e-02  4.4314e-03
   1.5512e-02  1.0901e-02 -4.0294e-03
  -2.0756e-02 -1.8048e-02 -1.7258e-02
     ... 
 
 ( 0 ,253,.,.) = 
   3.1821e-04 -4.0924e-03 -7.9885e-04
  -2.1664e-02 -2.2339e-02 -2.9870e-02
   1.0453e-02  3.4707e-03 -1.1426e-02
 
 ( 0 ,254,.,.) = 
   9.6516e-03  1.8361e-02  3.7035e-02
   3.7147e-03  1.0427e-02  1.5162e-02
   8.4325e-03  1.8343e-02  3.0159e-02
 
 ( 0 ,255,.,.) = 
   1.3859e-03  8.4181e-03  9.7185e-03
   2.6455e-02  4.1474e-02  5.5292e-02
   1.6905e-02  6.1027e-02  5.6296e-02
       ⋮  
 
 ( 1 , 0 ,.,.) = 
   1.1743e-02  1.6508e-02  5.1232e-03
   2.9441e-02  2.0441e-02  2.1624e-02
   7.8852e-03  1.3290e-02  1.1664e-02
 
 ( 1 , 1 ,.,.) = 
  -1.5315e-02 -2.1319e-02 -8.9703e-03
  -2.9171e-02 -5.1600e-02 -4.3605e-02
  -4.5486e-03 -3.7239e-02 -4.2013e-02
 
 ( 1 , 2 ,.,.) = 
   3.0217e-04  3.9781e-02 -1.4889e-04
   1.2860e-02  3.3156e-02  1.6254e-02
  -9.5886e-03 -5.6529e-03 -1.6966e-02
     ... 
 
 ( 1 ,253,.,.) = 
   4.4662e-02  8.1982e-03  1.6867e-02
  -6.6190e-03 -3.7080e-02 -5.9346e-03
  -2.3913e-02 -6.0699e-02 -2.8947e-02
 
 ( 1 ,254,.,.) = 
  -5.7020e-03 -4.2262e-02 -2.1947e-02
  -2.2780e-02 -3.1428e-02 -5.8322e-02
  -1.9598e-02 -5.2995e-02 -4.8502e-02
 
 ( 1 ,255,.,.) = 
   6.4948e-03  3.2666e-03  9.3442e-03
   1.0466e-03 -4.9306e-03 -1.1003e-02
  -1.5981e-02 -1.0119e-02 -1.4555e-02
       ⋮  
 
 ( 2 , 0 ,.,.) = 
  -6.1149e-03 -6.6849e-03 -6.9256e-03
  -5.1692e-03 -8.9064e-03 -1.4313e-02
  -1.1450e-02 -1.7125e-02 -2.3729e-02
 
 ( 2 , 1 ,.,.) = 
   3.9899e-02  1.6684e-02  2.0991e-02
   1.6498e-02 -2.6236e-02 -1.1630e-02
   5.9030e-03 -2.0597e-02 -1.5280e-02
 
 ( 2 , 2 ,.,.) = 
   6.0228e-03  2.4200e-02  2.0716e-02
   4.9551e-03 -6.1590e-03  1.4790e-02
   9.8595e-03 -2.7931e-02 -5.4261e-03
     ... 
 
 ( 2 ,253,.,.) = 
  -9.7426e-03 -1.6989e-03 -1.0106e-02
  -6.1351e-04  4.3355e-02  3.8143e-02
   3.7943e-03  4.4980e-02  3.9165e-02
 
 ( 2 ,254,.,.) = 
   1.3395e-02  6.9187e-03  1.9631e-02
   6.7533e-03  2.5027e-02  1.5162e-02
   1.7857e-03 -4.3971e-03  3.7016e-03
 
 ( 2 ,255,.,.) = 
  -1.7440e-02 -1.6260e-02 -2.4000e-02
  -1.9716e-02 -1.7364e-02 -1.7828e-02
  -3.0010e-02 -1.3697e-02 -2.1068e-02
 ...     
       ⋮  
 
 (253, 0 ,.,.) = 
   6.5624e-03  6.0837e-03  2.8446e-02
  -1.2967e-02 -5.0910e-02 -2.0435e-02
  -1.5419e-02 -1.4899e-02 -1.8056e-02
 
 (253, 1 ,.,.) = 
   1.2341e-02  3.2479e-02  2.2650e-02
  -4.2432e-03 -1.8113e-02  2.2224e-03
   2.9012e-03 -1.7405e-02  3.1869e-03
 
 (253, 2 ,.,.) = 
  -1.0992e-02  1.1080e-02 -1.4198e-02
   8.2258e-03  3.0135e-02  4.1601e-02
   6.0791e-04  1.6776e-04  2.1328e-02
     ... 
 
 (253,253,.,.) = 
  -7.5068e-04  2.6565e-02  1.1820e-02
  -1.5916e-02 -7.4243e-03 -5.1214e-03
   4.1732e-03 -6.8548e-03 -7.3191e-03
 
 (253,254,.,.) = 
  -6.9767e-03  9.7686e-04  1.8935e-03
   6.0631e-03  5.0983e-02 -3.4937e-03
  -8.1496e-03 -3.0339e-02 -1.7409e-02
 
 (253,255,.,.) = 
  -1.0048e-02  3.2093e-04 -1.1435e-03
  -1.5435e-03 -2.9689e-02 -1.9539e-02
  -9.6000e-04  4.8948e-03  1.5117e-02
       ⋮  
 
 (254, 0 ,.,.) = 
   1.6080e-02  1.2594e-02  5.4767e-03
  -1.3241e-02 -1.9564e-02 -2.0807e-02
  -7.7261e-03 -2.3040e-02 -2.0197e-02
 
 (254, 1 ,.,.) = 
  -1.8947e-03  5.3025e-02  1.3421e-02
   2.7344e-03  2.4908e-02  1.6726e-02
  -1.9196e-02 -1.8768e-02 -1.9954e-02
 
 (254, 2 ,.,.) = 
   8.0703e-03  2.9987e-02  5.7642e-04
   3.5938e-03  2.5408e-02 -1.0444e-02
  -9.6803e-04 -1.9317e-02 -1.2085e-02
     ... 
 
 (254,253,.,.) = 
   1.6295e-02  2.5060e-02  2.8950e-02
  -7.3188e-03 -1.4100e-03  1.2378e-02
  -2.1144e-02 -3.4673e-02 -1.9507e-02
 
 (254,254,.,.) = 
   1.6469e-02  5.1930e-02  4.9364e-02
   5.8284e-03  1.9868e-02  3.6292e-02
  -4.9320e-03 -1.6470e-02 -1.2967e-02
 
 (254,255,.,.) = 
  -1.0214e-02 -3.0802e-02 -3.4004e-02
   5.5274e-03 -1.0925e-02  4.6995e-04
   3.8212e-02  2.0936e-02  3.2566e-02
       ⋮  
 
 (255, 0 ,.,.) = 
   1.8364e-02 -3.0699e-03  1.0348e-02
  -7.2351e-03 -1.2742e-03 -6.9527e-03
   2.1686e-02  1.1490e-03 -3.2707e-03
 
 (255, 1 ,.,.) = 
  -1.6594e-02  1.5176e-04 -9.1776e-03
   1.5036e-02  5.8408e-02  2.1840e-02
  -1.3606e-02  1.8126e-02  1.6354e-02
 
 (255, 2 ,.,.) = 
   2.1872e-02  3.1581e-02  1.8289e-02
  -2.1028e-03 -1.5633e-02  2.0265e-02
   5.2924e-03  4.8438e-04  1.5701e-02
     ... 
 
 (255,253,.,.) = 
   4.4712e-03 -2.4757e-03  1.7267e-03
  -5.2339e-03 -8.8001e-03  1.3738e-02
  -1.0695e-02  1.0347e-03  1.6962e-02
 
 (255,254,.,.) = 
  -5.9934e-03 -3.6803e-02  3.0996e-03
   1.0224e-02  2.9117e-02 -7.3036e-04
   9.9051e-03  5.9974e-02  2.7242e-02
 
 (255,255,.,.) = 
  -9.1759e-03 -1.8297e-02  6.2411e-03
  -3.1871e-02 -2.9350e-02 -1.4883e-02
  -1.4808e-02 -1.2348e-02 -2.3609e-02
 [torch.FloatTensor of size 256x256x3x3], Parameter containing:
  0.3212
  0.2124
  0.2661
  0.3594
  0.2785
  0.2582
  0.3108
  0.3096
  0.3348
  0.2992
  0.2545
  0.2458
  0.3133
  0.4159
  0.2997
  0.3070
  0.3135
  0.4418
  0.3743
  0.2570
  0.2943
  0.3078
  0.2738
  0.3948
  0.2928
  0.3572
  0.3435
  0.5379
  0.4243
  0.3908
  0.2745
  0.2798
  0.3217
  0.1956
  0.2751
  0.3187
  0.3507
  0.2751
  0.1919
  0.3307
  0.2850
  0.3038
  0.2179
  0.2652
  0.2944
  0.2138
  0.2184
  0.2948
  0.3262
  0.3759
  0.2557
  0.3796
  0.2950
  0.3386
  0.3243
  0.3070
  0.3331
  0.2302
  0.3036
  0.3377
  0.2922
  0.2204
  0.3267
  0.3198
  0.4023
  0.2987
  0.4860
  0.2854
  0.2716
  0.4341
  0.2834
  0.2296
  0.2507
  0.3120
  0.3673
  0.3244
  0.3380
  0.3272
  0.2868
  0.2877
  0.3210
  0.2332
  0.3379
  0.2767
  0.2942
  0.2672
  0.4401
  0.2908
  0.3771
  0.2789
  0.3056
  0.3276
  0.3871
  0.2453
  0.2559
  0.2783
  0.3168
  0.3410
  0.2318
  0.3577
  0.5036
  0.3557
  0.2475
  0.1852
  0.2273
  0.3602
  0.2919
  0.3928
  0.4423
  0.2052
  0.2524
  0.2189
  0.4113
  0.3611
  0.4284
  0.2333
  0.3504
  0.7001
  0.3754
  0.2874
  0.3702
  0.3174
  0.3640
  0.2889
  0.4155
  0.2479
  0.2898
  0.3740
  0.4926
  0.2808
  0.2388
  0.3473
  0.1868
  0.2837
  0.3090
  0.3614
  0.2797
  0.6871
  0.2854
  0.2937
  0.3128
  0.4863
  0.2193
  0.2871
  0.2554
  0.4175
  0.3044
  0.3230
  0.3343
  0.4947
  0.3924
  0.2264
  0.2657
  0.4193
  0.3483
  0.3551
  0.2877
  0.2559
  0.2459
  0.2775
  0.3842
  0.2949
  0.3510
  0.1926
  0.3101
  0.3417
  0.3931
  0.3918
  0.3239
  0.2851
  0.4583
  0.2669
  0.2663
  0.4433
  0.3221
  0.3655
  0.3336
  0.4393
  0.3970
  0.3727
  0.3523
  0.3586
  0.3286
  0.4181
  0.2955
  0.3050
  0.2988
  0.4320
  0.2309
  0.3826
  0.2270
  0.2228
  0.3206
  0.3273
  0.2627
  0.3087
  0.2920
  0.2328
  0.4144
  0.4075
  0.3264
  0.3583
  0.3014
  0.3150
  0.4438
  0.4042
  0.2028
  0.3855
  0.2570
  0.2361
  0.2343
  0.3312
  0.2303
  0.3744
  0.4727
  0.3601
  0.2754
  0.1987
  0.3027
  0.3427
  0.2994
  0.2533
  0.2639
  0.3460
  0.3847
  0.4368
  0.3786
  0.3123
  0.2591
  0.3979
  0.2577
  0.3131
  0.2934
  0.3027
  0.2942
  0.2266
  0.2806
  0.2977
  0.1858
  0.2788
  0.2504
  0.3948
  0.3496
  0.2429
  0.2155
  0.2683
  0.4100
  0.3495
  0.4243
  0.2627
  0.3329
  0.2849
  0.3924
  0.3728
  0.2655
  0.3338
 [torch.FloatTensor of size 256], Parameter containing:
 -0.0264
  0.0995
 -0.0068
 -0.0877
  0.0078
  0.0407
 -0.0307
  0.0060
  0.0017
  0.0478
  0.0630
  0.0358
 -0.0504
  0.0214
 -0.0090
 -0.0337
 -0.0455
 -0.1924
 -0.0676
  0.0775
 -0.0340
 -0.0799
  0.1314
 -0.1273
 -0.0628
 -0.0055
 -0.0915
 -0.1757
 -0.0083
 -0.0945
  0.0025
 -0.0319
 -0.0158
  0.1437
 -0.0035
  0.0108
 -0.0511
  0.0358
  0.0878
 -0.0452
 -0.0458
  0.0147
  0.0687
  0.0168
 -0.0477
  0.0568
  0.0460
 -0.0507
  0.0059
 -0.1034
  0.0103
 -0.1052
 -0.0166
 -0.0192
 -0.0345
  0.0201
 -0.1362
  0.0396
 -0.0088
 -0.0108
 -0.0298
  0.0721
 -0.0669
 -0.0094
 -0.0310
 -0.0267
 -0.1418
  0.1190
  0.0669
 -0.2137
  0.0427
  0.0478
  0.0339
  0.0001
 -0.1482
 -0.0237
 -0.0743
 -0.0684
 -0.0201
  0.0147
 -0.0396
  0.0194
 -0.0696
 -0.0558
  0.0080
  0.0236
 -0.2578
  0.0064
 -0.1004
  0.0280
  0.0152
 -0.0484
 -0.1536
  0.1049
  0.0499
  0.0657
 -0.0541
  0.0077
  0.0941
 -0.0200
 -0.2356
 -0.0623
  0.0334
  0.1102
  0.0770
 -0.0325
  0.0481
 -0.1499
 -0.1650
  0.1230
  0.0712
  0.0589
 -0.0482
 -0.0972
 -0.1860
  0.0853
 -0.0516
 -0.3080
 -0.0604
 -0.0771
 -0.2728
  0.0289
 -0.1328
  0.0173
 -0.0392
  0.0542
 -0.0372
 -0.1528
 -0.1766
  0.0839
  0.0693
 -0.0826
  0.1118
 -0.0508
 -0.0448
 -0.0375
  0.0304
 -0.3782
  0.0149
  0.0068
 -0.0521
 -0.2950
  0.0899
  0.0296
  0.0199
 -0.0835
 -0.0964
 -0.0238
  0.0349
 -0.2663
 -0.1618
  0.0736
  0.0276
 -0.1109
 -0.0103
 -0.0975
  0.0140
  0.0108
  0.0784
  0.0131
 -0.0395
  0.0248
 -0.0774
 -0.0284
  0.0104
 -0.0423
 -0.1663
 -0.0949
 -0.0343
  0.0455
 -0.3000
 -0.0069
  0.0141
 -0.2615
 -0.0736
 -0.1063
 -0.0105
 -0.0712
 -0.1034
 -0.0298
 -0.1428
 -0.0517
 -0.0571
 -0.0544
 -0.0423
 -0.0085
  0.0159
 -0.0654
 -0.0613
 -0.1450
  0.0399
  0.0816
 -0.0078
 -0.0341
  0.0320
 -0.0448
 -0.0703
  0.1021
 -0.1799
 -0.2117
 -0.0598
 -0.1160
  0.0393
 -0.0454
 -0.1845
 -0.1085
  0.0558
 -0.0636
  0.0168
  0.0002
  0.0799
 -0.0672
  0.0798
 -0.0040
 -0.1902
  0.0200
  0.0732
  0.1032
 -0.0264
  0.0240
 -0.0442
  0.0229
  0.0234
 -0.0235
  0.0105
 -0.2149
 -0.1281
 -0.0183
 -0.0006
 -0.0516
  0.0566
 -0.0543
  0.0141
 -0.0499
  0.0673
  0.0517
 -0.0040
  0.0351
  0.0828
  0.0100
  0.0592
 -0.2043
 -0.0762
  0.0414
  0.0775
  0.0760
 -0.1592
 -0.0836
 -0.1663
  0.0023
 -0.0685
  0.0381
 -0.0987
 -0.0203
  0.0154
 -0.1055
 [torch.FloatTensor of size 256], Parameter containing:
 ( 0 , 0 ,.,.) = 
   8.0862e-03
 
 ( 0 , 1 ,.,.) = 
  -1.9208e-02
 
 ( 0 , 2 ,.,.) = 
  -1.7272e-02
     ... 
 
 ( 0 ,125,.,.) = 
  -1.2758e-02
 
 ( 0 ,126,.,.) = 
   2.5496e-03
 
 ( 0 ,127,.,.) = 
   5.3547e-03
       ⋮  
 
 ( 1 , 0 ,.,.) = 
  -1.4284e-02
 
 ( 1 , 1 ,.,.) = 
  -5.5428e-02
 
 ( 1 , 2 ,.,.) = 
  -3.4568e-02
     ... 
 
 ( 1 ,125,.,.) = 
   2.7476e-02
 
 ( 1 ,126,.,.) = 
   3.5964e-02
 
 ( 1 ,127,.,.) = 
   2.3994e-02
       ⋮  
 
 ( 2 , 0 ,.,.) = 
   7.6148e-03
 
 ( 2 , 1 ,.,.) = 
   2.0725e-02
 
 ( 2 , 2 ,.,.) = 
  -1.0066e-02
     ... 
 
 ( 2 ,125,.,.) = 
  -2.7756e-02
 
 ( 2 ,126,.,.) = 
   6.3956e-03
 
 ( 2 ,127,.,.) = 
  -2.2016e-03
 ...     
       ⋮  
 
 (253, 0 ,.,.) = 
   3.3605e-02
 
 (253, 1 ,.,.) = 
  -4.2383e-02
 
 (253, 2 ,.,.) = 
   2.2568e-02
     ... 
 
 (253,125,.,.) = 
  -3.3004e-02
 
 (253,126,.,.) = 
  -9.1010e-04
 
 (253,127,.,.) = 
  -1.7735e-02
       ⋮  
 
 (254, 0 ,.,.) = 
  -1.1416e-02
 
 (254, 1 ,.,.) = 
  -1.8309e-02
 
 (254, 2 ,.,.) = 
   7.6073e-03
     ... 
 
 (254,125,.,.) = 
   1.5128e-02
 
 (254,126,.,.) = 
   3.3239e-02
 
 (254,127,.,.) = 
   2.0724e-04
       ⋮  
 
 (255, 0 ,.,.) = 
   6.2636e-03
 
 (255, 1 ,.,.) = 
  -2.0036e-02
 
 (255, 2 ,.,.) = 
   1.0343e-03
     ... 
 
 (255,125,.,.) = 
  -1.9124e-02
 
 (255,126,.,.) = 
   4.5483e-02
 
 (255,127,.,.) = 
   7.8252e-03
 [torch.FloatTensor of size 256x128x1x1], Parameter containing:
  0.0674
  0.0514
  0.0385
  0.1692
  0.0604
  0.0460
  0.1209
  0.1110
  0.0418
  0.0387
  0.0442
  0.0707
  0.0790
  0.1094
  0.0959
  0.0544
  0.1032
  0.2190
  0.0459
  0.0372
  0.1410
  0.0587
  0.0360
  0.0955
  0.1657
  0.1024
  0.1417
  0.0580
  0.0536
  0.0716
  0.0865
  0.1110
  0.0511
  0.0515
  0.0809
  0.1154
  0.0777
  0.0449
  0.0490
  0.1056
  0.1457
  0.0744
  0.0530
  0.0600
  0.1026
  0.0486
  0.0408
  0.1312
  0.0639
  0.1062
  0.0915
  0.1476
  0.0900
  0.0742
  0.1069
  0.0776
  0.1423
  0.0495
  0.0974
  0.0661
  0.1292
  0.0548
  0.1145
  0.0950
  0.0921
  0.1579
  0.0496
  0.0236
  0.0398
  0.0935
  0.0291
  0.0653
  0.0885
  0.1190
  0.1692
  0.0692
  0.1316
  0.0606
  0.0480
  0.0654
  0.1082
  0.0624
  0.1103
  0.1106
  0.1076
  0.0400
  0.0723
  0.0947
  0.0662
  0.0464
  0.0444
  0.1727
  0.0921
  0.0345
  0.0451
  0.0374
  0.0940
  0.0818
  0.0397
  0.0452
  0.0985
  0.1095
  0.1072
  0.0506
  0.0444
  0.0755
  0.0420
  0.1046
  0.1172
  0.0447
  0.0459
  0.0409
  0.0539
  0.1036
  0.0741
  0.0311
  0.1086
  0.1746
  0.0777
  0.0689
  0.1100
  0.0489
  0.1048
  0.1097
  0.1025
  0.0448
  0.0675
  0.0707
  0.1364
  0.0438
  0.0346
  0.1769
  0.0667
  0.1155
  0.0628
  0.0873
  0.0406
  0.2890
  0.0703
  0.0428
  0.1173
  0.1049
  0.0611
  0.0469
  0.0400
  0.0744
  0.1003
  0.1012
  0.0599
  0.1078
  0.1512
  0.0322
  0.0430
  0.0977
  0.0951
  0.0838
  0.0958
  0.0448
  0.0263
  0.0425
  0.1154
  0.0771
  0.1781
  0.0300
  0.0699
  0.0724
  0.1600
  0.0893
  0.1130
  0.0534
  0.1359
  0.0375
  0.0809
  0.1145
  0.1232
  0.0942
  0.0880
  0.0346
  0.0996
  0.0461
  0.0694
  0.0630
  0.1590
  0.0509
  0.1254
  0.0590
  0.0744
  0.1084
  0.0514
  0.0931
  0.0848
  0.0240
  0.0279
  0.0993
  0.0612
  0.0599
  0.1095
  0.0508
  0.0658
  0.1162
  0.0833
  0.1651
  0.0505
  0.1231
  0.1228
  0.1038
  0.0369
  0.0756
  0.0415
  0.1192
  0.0292
  0.0839
  0.0577
  0.0951
  0.0944
  0.0309
  0.0390
  0.0604
  0.0672
  0.0501
  0.0383
  0.0946
  0.0958
  0.0501
  0.0243
  0.1074
  0.1908
  0.0693
  0.1376
  0.1151
  0.0329
  0.0647
  0.0616
  0.1106
  0.0358
  0.0721
  0.0851
  0.0375
  0.0368
  0.0947
  0.0464
  0.1666
  0.1049
  0.0755
  0.0398
  0.0249
  0.1528
  0.1167
  0.0886
  0.0540
  0.0726
  0.0736
  0.0797
  0.0854
  0.0609
  0.1263
 [torch.FloatTensor of size 256], Parameter containing:
 -0.0264
  0.0995
 -0.0068
 -0.0877
  0.0078
  0.0407
 -0.0307
  0.0060
  0.0017
  0.0478
  0.0630
  0.0358
 -0.0504
  0.0214
 -0.0090
 -0.0337
 -0.0455
 -0.1924
 -0.0676
  0.0775
 -0.0340
 -0.0799
  0.1314
 -0.1273
 -0.0628
 -0.0055
 -0.0915
 -0.1757
 -0.0083
 -0.0945
  0.0025
 -0.0319
 -0.0158
  0.1437
 -0.0035
  0.0108
 -0.0511
  0.0358
  0.0878
 -0.0452
 -0.0458
  0.0147
  0.0687
  0.0168
 -0.0477
  0.0568
  0.0460
 -0.0507
  0.0059
 -0.1034
  0.0103
 -0.1052
 -0.0166
 -0.0192
 -0.0345
  0.0201
 -0.1362
  0.0396
 -0.0088
 -0.0108
 -0.0298
  0.0721
 -0.0669
 -0.0094
 -0.0310
 -0.0267
 -0.1418
  0.1190
  0.0669
 -0.2137
  0.0427
  0.0478
  0.0339
  0.0001
 -0.1482
 -0.0237
 -0.0743
 -0.0684
 -0.0201
  0.0147
 -0.0396
  0.0194
 -0.0696
 -0.0558
  0.0080
  0.0236
 -0.2578
  0.0064
 -0.1004
  0.0280
  0.0152
 -0.0484
 -0.1536
  0.1049
  0.0499
  0.0657
 -0.0541
  0.0077
  0.0941
 -0.0200
 -0.2356
 -0.0623
  0.0334
  0.1102
  0.0770
 -0.0325
  0.0481
 -0.1499
 -0.1650
  0.1230
  0.0712
  0.0589
 -0.0482
 -0.0972
 -0.1860
  0.0853
 -0.0516
 -0.3080
 -0.0604
 -0.0771
 -0.2728
  0.0289
 -0.1328
  0.0173
 -0.0392
  0.0542
 -0.0372
 -0.1528
 -0.1766
  0.0839
  0.0693
 -0.0826
  0.1118
 -0.0508
 -0.0448
 -0.0375
  0.0304
 -0.3782
  0.0149
  0.0068
 -0.0521
 -0.2950
  0.0899
  0.0296
  0.0199
 -0.0835
 -0.0964
 -0.0238
  0.0349
 -0.2663
 -0.1618
  0.0736
  0.0276
 -0.1109
 -0.0103
 -0.0975
  0.0140
  0.0108
  0.0784
  0.0131
 -0.0395
  0.0248
 -0.0774
 -0.0284
  0.0104
 -0.0423
 -0.1663
 -0.0949
 -0.0343
  0.0455
 -0.3000
 -0.0069
  0.0141
 -0.2615
 -0.0736
 -0.1063
 -0.0105
 -0.0712
 -0.1034
 -0.0298
 -0.1428
 -0.0517
 -0.0571
 -0.0544
 -0.0423
 -0.0085
  0.0159
 -0.0654
 -0.0613
 -0.1450
  0.0399
  0.0816
 -0.0078
 -0.0341
  0.0320
 -0.0448
 -0.0703
  0.1021
 -0.1799
 -0.2117
 -0.0598
 -0.1160
  0.0393
 -0.0454
 -0.1845
 -0.1085
  0.0558
 -0.0636
  0.0168
  0.0002
  0.0799
 -0.0672
  0.0798
 -0.0040
 -0.1902
  0.0200
  0.0732
  0.1032
 -0.0264
  0.0240
 -0.0442
  0.0229
  0.0234
 -0.0235
  0.0105
 -0.2149
 -0.1281
 -0.0183
 -0.0006
 -0.0516
  0.0566
 -0.0543
  0.0141
 -0.0499
  0.0673
  0.0517
 -0.0040
  0.0351
  0.0828
  0.0100
  0.0592
 -0.2043
 -0.0762
  0.0414
  0.0775
  0.0760
 -0.1592
 -0.0836
 -0.1663
  0.0023
 -0.0685
  0.0381
 -0.0987
 -0.0203
  0.0154
 -0.1055
 [torch.FloatTensor of size 256], Parameter containing:
 ( 0 , 0 ,.,.) = 
   4.8367e-02  4.8045e-02  3.8471e-02
   4.9888e-02  5.5208e-02  5.6701e-02
   2.4192e-02  1.3436e-02  2.4655e-02
 
 ( 0 , 1 ,.,.) = 
  -3.6542e-03 -3.1100e-03  4.9227e-03
  -1.2114e-03  3.4020e-03  1.9846e-02
  -2.1704e-02 -2.1158e-02 -2.8686e-03
 
 ( 0 , 2 ,.,.) = 
  -1.2536e-02 -2.0486e-02 -2.3154e-02
  -1.3515e-02 -2.3781e-02 -2.5515e-02
   1.0584e-02  7.2999e-03 -5.2329e-03
     ... 
 
 ( 0 ,253,.,.) = 
  -4.3596e-02 -1.8328e-02 -5.0577e-02
   1.6590e-02  5.0719e-02  2.1919e-02
  -1.9203e-02 -8.8315e-03 -2.0335e-02
 
 ( 0 ,254,.,.) = 
  -7.6949e-03 -1.5848e-02  1.5841e-03
  -6.2470e-03 -1.3135e-02  6.9092e-03
  -3.3791e-03  1.7889e-03  3.7373e-03
 
 ( 0 ,255,.,.) = 
  -6.6310e-03  5.8503e-03 -5.8571e-04
  -2.4600e-02 -8.9747e-03 -7.2466e-03
  -1.7566e-02 -8.5829e-03 -7.5220e-03
       ⋮  
 
 ( 1 , 0 ,.,.) = 
  -2.3679e-02 -9.4399e-03 -1.1688e-02
  -2.4777e-02 -1.7326e-02 -3.1489e-02
  -3.3683e-03  9.7571e-03 -5.1527e-03
 
 ( 1 , 1 ,.,.) = 
  -3.0809e-02 -4.0685e-02 -2.2731e-02
  -5.1065e-03 -1.6457e-02 -1.8804e-02
   5.0382e-02  5.2054e-02  3.9185e-02
 
 ( 1 , 2 ,.,.) = 
  -3.7790e-02 -4.2234e-02 -2.9703e-02
  -6.4766e-03  2.6967e-03 -8.1736e-03
   3.7747e-02  5.5416e-02  2.5806e-02
     ... 
 
 ( 1 ,253,.,.) = 
  -2.7275e-02 -4.5364e-02 -3.9567e-02
   8.9827e-03  1.6150e-02  1.1675e-02
  -9.7209e-03 -3.6449e-02 -1.6842e-02
 
 ( 1 ,254,.,.) = 
   1.7824e-02  1.5013e-02  1.0225e-02
   5.4044e-03  1.1664e-02  6.4623e-03
   2.1803e-02  4.1795e-02  1.9234e-02
 
 ( 1 ,255,.,.) = 
  -2.6730e-04  1.5218e-03 -5.0352e-03
   2.5761e-02  2.7110e-02 -9.3395e-04
  -1.1949e-02 -7.5204e-03 -3.9370e-03
       ⋮  
 
 ( 2 , 0 ,.,.) = 
  -1.7447e-02 -1.8358e-02 -2.6020e-02
  -1.4074e-02 -1.1302e-02 -1.4814e-02
  -3.1460e-03 -1.8674e-02 -9.3350e-03
 
 ( 2 , 1 ,.,.) = 
  -5.1125e-03 -4.8036e-03  1.8139e-02
  -1.0524e-02 -1.5152e-02  2.3904e-03
   8.7093e-03  9.3810e-03  2.4203e-03
 
 ( 2 , 2 ,.,.) = 
  -7.6392e-03 -8.1496e-03 -1.5331e-02
  -8.0622e-03 -1.3383e-02 -1.3938e-02
  -1.6904e-02 -3.0059e-02 -1.8659e-02
     ... 
 
 ( 2 ,253,.,.) = 
   1.8390e-02 -2.6080e-03  9.3782e-03
  -6.4662e-04 -1.3146e-02  1.0045e-02
  -2.2293e-03 -1.4097e-02  1.7385e-02
 
 ( 2 ,254,.,.) = 
   3.0293e-04  2.9622e-03  1.0030e-02
  -5.7588e-03 -1.6943e-03  6.9988e-03
   9.8134e-03  1.4197e-02  5.9742e-03
 
 ( 2 ,255,.,.) = 
   2.8753e-03 -1.7814e-03  1.0873e-02
   1.5230e-02  4.5867e-03  1.6860e-02
   1.9536e-03  1.9503e-02  1.2168e-02
 ...     
       ⋮  
 
 (253, 0 ,.,.) = 
   1.3983e-02  2.4598e-03 -7.4604e-03
  -2.2250e-02 -1.2757e-02 -2.8846e-03
  -1.0911e-02  7.5499e-03  8.6910e-03
 
 (253, 1 ,.,.) = 
  -4.8463e-03 -8.3250e-03  1.3420e-02
  -6.2502e-03 -7.3982e-03  1.1153e-02
   4.0391e-03 -9.0354e-03 -7.5441e-03
 
 (253, 2 ,.,.) = 
  -5.1627e-03 -8.9529e-03 -1.2414e-02
  -4.9261e-03 -3.5488e-03  2.1501e-03
  -1.1709e-02 -1.4984e-02 -1.9216e-03
     ... 
 
 (253,253,.,.) = 
   1.5428e-02 -7.6036e-04 -1.3522e-03
  -3.4856e-02 -7.4478e-04 -6.5064e-03
  -9.1655e-03 -2.8467e-02 -4.8924e-02
 
 (253,254,.,.) = 
   1.2207e-02  1.0519e-02 -8.4421e-03
  -2.5495e-02  2.8140e-03  1.6165e-03
  -1.8831e-02  1.2268e-02  1.5439e-02
 
 (253,255,.,.) = 
  -1.3684e-02 -4.1732e-03  1.2609e-02
  -6.8834e-04  5.9757e-03 -1.0183e-02
   2.1559e-04 -1.3462e-02 -3.0114e-02
       ⋮  
 
 (254, 0 ,.,.) = 
  -1.6186e-02 -6.4926e-02 -4.3146e-02
  -2.1790e-02 -4.9106e-02 -3.4568e-02
   4.0506e-02  4.2449e-02  6.1562e-02
 
 (254, 1 ,.,.) = 
   3.5715e-03 -1.0916e-02 -2.2922e-02
  -2.4831e-03  6.4555e-03 -1.1316e-02
   1.6662e-03 -1.9145e-02 -2.3007e-02
 
 (254, 2 ,.,.) = 
  -7.1243e-03 -4.2783e-05  4.9363e-03
  -1.5832e-02  4.0474e-03  4.5135e-04
  -4.7967e-03 -7.2164e-04 -1.7230e-02
     ... 
 
 (254,253,.,.) = 
   1.1589e-02  7.7814e-04  6.3205e-03
   1.1360e-02 -6.2076e-03 -2.7689e-02
   2.6392e-02  2.3775e-03 -1.4937e-02
 
 (254,254,.,.) = 
  -1.1237e-02 -2.6285e-03  9.1537e-03
  -8.2120e-03 -2.2236e-02  3.2917e-04
   5.5909e-03 -1.3858e-03  6.8947e-03
 
 (254,255,.,.) = 
  -1.4783e-02 -1.0367e-02 -2.7472e-02
  -4.1090e-02 -3.8532e-02 -3.9202e-02
  -2.1614e-02 -3.4340e-02 -1.8542e-02
       ⋮  
 
 (255, 0 ,.,.) = 
  -1.9492e-02 -1.6098e-02 -3.1792e-02
   2.5374e-02  4.6815e-02  2.7513e-02
   3.5903e-02  3.1892e-02  2.6156e-02
 
 (255, 1 ,.,.) = 
   1.6856e-02  1.5645e-02  1.4189e-02
   2.2550e-02  3.0456e-02  1.6739e-02
  -2.3615e-04 -7.9501e-03 -1.9666e-03
 
 (255, 2 ,.,.) = 
  -7.9060e-03 -4.7390e-03  1.6030e-03
   1.3802e-03 -8.5837e-03  6.9451e-03
   1.1407e-02 -5.9877e-03  1.3759e-02
     ... 
 
 (255,253,.,.) = 
   4.0124e-03  2.9951e-02  1.1915e-02
  -4.3412e-02 -3.1776e-03 -2.7705e-02
  -1.6183e-02 -1.1247e-02 -3.5084e-02
 
 (255,254,.,.) = 
   2.9837e-02  5.9935e-02  2.4631e-02
  -1.9571e-03  2.2415e-02 -1.5499e-02
   1.6075e-02  1.7850e-02 -1.8412e-02
 
 (255,255,.,.) = 
  -4.3712e-03 -4.9032e-02 -2.1335e-02
  -5.2598e-03 -2.8579e-02 -2.2090e-02
   8.5126e-03  2.0862e-03  2.3301e-02
 [torch.FloatTensor of size 256x256x3x3], Parameter containing:
  0.2480
  0.1972
  0.2279
  0.2709
  0.3296
  0.2640
  0.2710
  0.3475
  0.2388
  0.2904
  0.2769
  0.3045
  0.2268
  0.2634
  0.2999
  0.2397
  0.2724
  0.2723
  0.2133
  0.3806
  0.2767
  0.2403
  0.2406
  0.2917
  0.2675
  0.2305
  0.2394
  0.3123
  0.2984
  0.3353
  0.2234
  0.1919
  0.3168
  0.2626
  0.2901
  0.2918
  0.3455
  0.2561
  0.2434
  0.2298
  0.3318
  0.3481
  0.2032
  0.2478
  0.2478
  0.2483
  0.3252
  0.2567
  0.2685
  0.1977
  0.2541
  0.4079
  0.2480
  0.2076
  0.2276
  0.2683
  0.2098
  0.2056
  0.2010
  0.3560
  0.2384
  0.3284
  0.1952
  0.2445
  0.2848
  0.3742
  0.2746
  0.2117
  0.3859
  0.4785
  0.3005
  0.2848
  0.3762
  0.2903
  0.2126
  0.1776
  0.2778
  0.3878
  0.3123
  0.1974
  0.2679
  0.2300
  0.2474
  0.2320
  0.2635
  0.2819
  0.2296
  0.3194
  0.3814
  0.2503
  0.2269
  0.2676
  0.3431
  0.3799
  0.3787
  0.2968
  0.3021
  0.2575
  0.3007
  0.1939
  0.1950
  0.3217
  0.3623
  0.2171
  0.2486
  0.2266
  0.2133
  0.2851
  0.2715
  0.2720
  0.3107
  0.2174
  0.2675
  0.2387
  0.3434
  0.2761
  0.2084
  0.2975
  0.3178
  0.2818
  0.2858
  0.3498
  0.2675
  0.2638
  0.3159
  0.2879
  0.1873
  0.2986
  0.3584
  0.2570
  0.1815
  0.2758
  0.2640
  0.2486
  0.2567
  0.2252
  0.3420
  0.2910
  0.2898
  0.2902
  0.2404
  0.2381
  0.3633
  0.2690
  0.3810
  0.2947
  0.2743
  0.4644
  0.3133
  0.2444
  0.3477
  0.3001
  0.1977
  0.2301
  0.2513
  0.2660
  0.3271
  0.1622
  0.2274
  0.2225
  0.3596
  0.3215
  0.1997
  0.2215
  0.2706
  0.2831
  0.2621
  0.3710
  0.2730
  0.2903
  0.1893
  0.2140
  0.2460
  0.3141
  0.2424
  0.3699
  0.2364
  0.2420
  0.2948
  0.2497
  0.2760
  0.2686
  0.2895
  0.3857
  0.1398
  0.2832
  0.3362
  0.2522
  0.2823
  0.2381
  0.2311
  0.3274
  0.4078
  0.2648
  0.2525
  0.3388
  0.3251
  0.2420
  0.2856
  0.3605
  0.2603
  0.2294
  0.2483
  0.2171
  0.2353
  0.4117
  0.2588
  0.2888
  0.1972
  0.2408
  0.2755
  0.3031
  0.2457
  0.2744
  0.3564
  0.2546
  0.3673
  0.2883
  0.2590
  0.3021
  0.2890
  0.3505
  0.2092
  0.2953
  0.3222
  0.2925
  0.2574
  0.3012
  0.3893
  0.2211
  0.2226
  0.3258
  0.3205
  0.2975
  0.2323
  0.3323
  0.2812
  0.2702
  0.2300
  0.2846
  0.3318
  0.2292
  0.3498
  0.2622
  0.3581
  0.4003
  0.2924
  0.3049
  0.3478
  0.2845
  0.2742
  0.2019
  0.2466
  0.2988
  0.2044
  0.2691
 [torch.FloatTensor of size 256], Parameter containing:
 -0.1332
 -0.0644
 -0.3239
 -0.2390
 -0.3262
 -0.1796
 -0.2087
 -0.3208
 -0.1874
 -0.2988
 -0.2099
 -0.2283
 -0.2141
 -0.2460
 -0.2768
 -0.1351
 -0.2498
 -0.2393
 -0.1223
 -0.4590
 -0.2172
 -0.1220
 -0.2101
 -0.1779
 -0.2426
 -0.1546
 -0.1549
 -0.3716
 -0.2817
 -0.3886
 -0.1545
 -0.0687
 -0.3412
 -0.2261
 -0.1961
 -0.2242
 -0.2984
 -0.1381
 -0.2251
 -0.1658
 -0.4534
 -0.3226
 -0.0977
 -0.1349
 -0.2619
 -0.1428
 -0.3960
 -0.1633
 -0.2101
 -0.1161
 -0.1448
 -0.5502
 -0.2179
 -0.1246
  0.0502
 -0.1902
 -0.1047
 -0.1000
 -0.1411
 -0.3124
 -0.2190
 -0.3062
 -0.1247
 -0.1557
 -0.2973
 -0.3825
 -0.1951
 -0.1381
 -0.5761
 -0.3879
 -0.2808
 -0.2542
 -0.3470
 -0.2460
 -0.1091
 -0.0562
 -0.1833
 -0.4956
 -0.3059
 -0.0988
 -0.2255
 -0.1958
 -0.1320
 -0.1738
 -0.2287
 -0.1926
 -0.0924
 -0.3427
 -0.5489
 -0.2431
 -0.1935
 -0.1641
 -0.2503
 -0.3274
 -0.4008
 -0.2824
 -0.2694
 -0.1939
 -0.2413
 -0.0309
 -0.0880
 -0.3421
 -0.3104
 -0.1102
 -0.1539
 -0.1233
 -0.1780
 -0.2715
 -0.2005
 -0.1846
 -0.2843
 -0.1117
 -0.1816
 -0.2119
 -0.3304
 -0.2267
 -0.1413
 -0.3376
 -0.2674
 -0.2524
 -0.2554
 -0.4735
 -0.2342
 -0.2130
 -0.3282
 -0.1966
 -0.1063
 -0.2615
 -0.4234
 -0.1374
 -0.0811
 -0.3069
 -0.1538
 -0.1453
 -0.1612
 -0.1631
 -0.3759
 -0.2608
 -0.2382
 -0.2499
 -0.1485
 -0.1487
 -0.4328
 -0.1377
 -0.2781
 -0.2259
 -0.2072
 -0.4165
 -0.3582
 -0.1382
 -0.3598
 -0.2672
 -0.2090
 -0.0177
 -0.1279
 -0.2812
 -0.3621
  0.0476
 -0.2232
 -0.1272
 -0.3237
 -0.3008
 -0.1119
 -0.0839
 -0.2426
 -0.2000
 -0.1873
 -0.4685
 -0.2000
 -0.3462
 -0.0706
 -0.1973
 -0.3548
 -0.1975
 -0.3537
 -0.3546
 -0.1433
 -0.2052
 -0.2722
 -0.1528
 -0.2798
 -0.1945
 -0.2474
 -0.4910
  0.1322
 -0.2378
 -0.5166
 -0.3959
 -0.2354
 -0.1266
 -0.0810
 -0.4132
 -0.5576
 -0.2238
 -0.1563
 -0.3950
 -0.3283
 -0.0846
 -0.3103
 -0.3130
 -0.1498
 -0.1396
 -0.0972
 -0.1620
 -0.1631
 -0.6364
 -0.1350
 -0.2345
 -0.1049
 -0.1625
 -0.2878
 -0.2450
 -0.1468
 -0.2035
 -0.5358
 -0.1683
 -0.5524
 -0.2511
 -0.1230
 -0.2305
 -0.1925
 -0.3759
 -0.1014
 -0.1697
 -0.4002
 -0.2980
 -0.3035
 -0.1563
 -0.4660
 -0.1155
 -0.1665
 -0.3382
 -0.2935
 -0.3122
 -0.3015
 -0.3261
 -0.2542
 -0.2037
 -0.0955
 -0.2070
 -0.4370
 -0.2051
 -0.4205
 -0.3125
 -0.4845
 -0.3528
 -0.2624
 -0.2894
 -0.3976
 -0.2107
 -0.1791
 -0.1075
 -0.1213
 -0.3022
  0.0516
 -0.1928
 [torch.FloatTensor of size 256], Parameter containing:
 ( 0 , 0 ,.,.) = 
  -4.2568e-02 -2.6148e-02 -2.2019e-02
  -1.7334e-02 -7.5950e-03 -7.2384e-03
  -1.7876e-03  2.3800e-02  1.4873e-02
 
 ( 0 , 1 ,.,.) = 
  -2.8277e-03 -5.0644e-03 -4.9442e-03
   1.2117e-03  1.4908e-02  1.6013e-02
   1.4391e-02  3.3109e-02  5.0061e-02
 
 ( 0 , 2 ,.,.) = 
  -3.4891e-03 -4.4437e-03  2.6589e-03
   1.5105e-02  2.6303e-02  2.6802e-02
   3.9232e-02  5.0057e-02  4.6637e-02
     ... 
 
 ( 0 ,253,.,.) = 
   2.2877e-02  1.5454e-02 -2.4483e-02
   3.1145e-02  3.4944e-02  1.3296e-02
  -1.7674e-04  7.3297e-03 -5.7174e-03
 
 ( 0 ,254,.,.) = 
  -2.1781e-02 -3.7379e-02 -1.3382e-02
   1.8976e-02  1.4155e-02 -6.5395e-03
   2.6831e-02  3.6354e-02  1.1450e-02
 
 ( 0 ,255,.,.) = 
   3.1603e-02  3.3933e-02  3.1575e-02
  -1.0098e-02 -1.2657e-02  1.1674e-02
   1.0325e-02  7.9424e-05  1.5911e-02
       ⋮  
 
 ( 1 , 0 ,.,.) = 
   2.5937e-02  6.2590e-03  6.0798e-03
  -4.5745e-03 -3.5188e-02 -2.9249e-02
   2.1366e-02  2.0480e-03  6.2699e-03
 
 ( 1 , 1 ,.,.) = 
  -2.9549e-03 -1.3679e-03 -8.6876e-03
   7.9988e-03  1.2888e-03 -5.9629e-03
  -9.1481e-03 -2.1914e-02 -4.1572e-02
 
 ( 1 , 2 ,.,.) = 
   7.6390e-03  3.0253e-03  2.7817e-04
   7.0329e-03  1.1914e-02 -2.4419e-03
  -8.2131e-03 -9.7848e-05 -1.9223e-02
     ... 
 
 ( 1 ,253,.,.) = 
  -4.4498e-03  5.1611e-03  3.7416e-03
   3.2110e-04  8.3762e-03  3.6612e-03
   9.3343e-03  8.1829e-03  1.1234e-03
 
 ( 1 ,254,.,.) = 
  -6.6849e-02 -5.9871e-02 -3.3931e-02
   2.2337e-02  3.1932e-02  3.7244e-02
   9.3296e-03  3.7222e-02  1.4052e-02
 
 ( 1 ,255,.,.) = 
  -2.0643e-03  1.2408e-02 -3.1072e-03
  -8.2882e-03  1.3917e-02 -2.0680e-02
  -1.9329e-02  1.1953e-02 -2.3436e-02
       ⋮  
 
 ( 2 , 0 ,.,.) = 
  -1.7788e-03 -3.5982e-03 -1.2592e-03
  -1.5320e-02 -1.0690e-02 -2.0311e-02
  -3.4649e-04 -2.2188e-03 -1.5021e-02
 
 ( 2 , 1 ,.,.) = 
  -2.8952e-02 -3.3958e-02 -2.5437e-02
  -1.5919e-04  1.5204e-02  3.4554e-02
   3.6892e-02  7.0144e-02  7.3610e-02
 
 ( 2 , 2 ,.,.) = 
   1.0721e-02  2.1531e-03 -5.6155e-03
   1.1754e-02 -4.8546e-03 -5.5013e-03
  -3.7388e-04 -9.7639e-03 -1.5029e-02
     ... 
 
 ( 2 ,253,.,.) = 
   1.5622e-02  9.8976e-03  3.4725e-03
   1.4711e-02  7.0707e-03 -9.1826e-03
   7.0986e-03  6.3087e-03 -3.5893e-03
 
 ( 2 ,254,.,.) = 
  -6.4518e-03 -6.7673e-03  1.1635e-02
   1.4707e-02  2.3831e-02  4.9396e-02
   1.8897e-02  3.4981e-02  4.5488e-02
 
 ( 2 ,255,.,.) = 
   1.5900e-02  3.3369e-02  2.6194e-02
   1.0616e-02  1.8515e-02  3.0190e-03
   1.1004e-02  2.5503e-02  1.3654e-02
 ...     
       ⋮  
 
 (253, 0 ,.,.) = 
  -2.1231e-02 -1.2804e-02 -1.5498e-02
   7.6750e-03  1.2120e-02  1.5099e-02
   1.8536e-02  2.5110e-02  2.5283e-02
 
 (253, 1 ,.,.) = 
   7.4059e-03 -3.0540e-03 -1.5475e-03
  -8.4415e-03 -2.2002e-02 -3.4099e-03
   9.1918e-03  2.2617e-03 -1.4260e-02
 
 (253, 2 ,.,.) = 
  -5.2568e-03 -5.3507e-03 -3.2230e-03
  -1.5805e-02  6.0508e-03 -1.5917e-03
  -8.9323e-03  2.6483e-03  5.0508e-03
     ... 
 
 (253,253,.,.) = 
   1.9826e-02 -2.1209e-03  1.4889e-02
   5.7275e-02  3.5549e-02  6.0175e-03
   2.3347e-02 -2.2153e-02 -2.5497e-02
 
 (253,254,.,.) = 
  -1.3985e-02 -6.4766e-02 -1.7286e-02
   1.1704e-02  1.0714e-02  4.6278e-02
  -1.0038e-02 -3.5707e-03  2.2691e-02
 
 (253,255,.,.) = 
  -8.3342e-03 -1.3070e-03 -1.0049e-02
   3.2605e-02  5.3259e-02  2.2172e-02
   3.7339e-02  6.1155e-02  4.4555e-02
       ⋮  
 
 (254, 0 ,.,.) = 
  -1.6584e-02 -1.3850e-02 -1.4604e-02
  -1.7604e-02 -2.1268e-02 -1.6734e-02
  -6.0039e-04  3.8569e-03  1.2837e-02
 
 (254, 1 ,.,.) = 
   1.7623e-02  2.3706e-02  2.7633e-02
  -2.2841e-02 -1.9576e-02 -1.6551e-02
  -8.0822e-03  4.3779e-03 -5.3622e-03
 
 (254, 2 ,.,.) = 
   1.5582e-02  3.7879e-02  2.3555e-02
  -6.4632e-03  9.8620e-03  1.2121e-02
  -1.3743e-02 -6.1246e-03 -2.7332e-03
     ... 
 
 (254,253,.,.) = 
  -2.5037e-03 -1.2064e-02 -9.0989e-03
  -4.7911e-04 -2.8339e-03  2.1365e-03
  -6.2077e-03 -2.6615e-03  1.1215e-02
 
 (254,254,.,.) = 
  -8.1794e-03 -2.2417e-02 -3.4012e-02
  -2.8553e-02 -2.9546e-02 -4.4372e-02
  -5.0348e-02 -3.4973e-02 -5.2028e-02
 
 (254,255,.,.) = 
   1.4728e-02  3.2834e-02  2.6312e-02
   1.3449e-02  2.6407e-02  2.6924e-02
   2.5572e-02  3.4316e-02  2.6184e-02
       ⋮  
 
 (255, 0 ,.,.) = 
   7.2026e-03 -2.3931e-03  2.2182e-03
   4.2555e-03 -6.4084e-03  7.8548e-03
   2.0510e-02  1.8644e-02  2.3280e-02
 
 (255, 1 ,.,.) = 
  -1.2471e-02  1.3008e-02  1.0010e-02
  -1.7496e-03  6.1331e-03  4.3366e-03
   5.2269e-03  1.5111e-02 -8.1881e-03
 
 (255, 2 ,.,.) = 
  -3.7337e-02  1.9923e-02 -2.4149e-02
  -4.9487e-02 -1.0510e-02 -4.2107e-02
  -5.7684e-03 -4.8632e-03 -1.8332e-02
     ... 
 
 (255,253,.,.) = 
   7.2013e-03 -1.5208e-02 -1.6507e-02
  -8.8276e-03 -1.8698e-02 -1.6637e-03
  -1.2015e-02  2.9667e-03  6.2300e-03
 
 (255,254,.,.) = 
  -1.8341e-02 -9.0521e-03  2.6030e-02
   3.5930e-02  5.3049e-02  5.8487e-02
  -1.3661e-02 -3.6888e-03 -7.1606e-03
 
 (255,255,.,.) = 
  -1.2594e-02 -4.0898e-02  1.7162e-03
  -1.7420e-02 -4.3435e-02 -1.3183e-02
  -3.7506e-02 -5.5707e-02 -3.0051e-02
 [torch.FloatTensor of size 256x256x3x3], Parameter containing:
  0.1971
  0.1771
  0.1303
  0.1995
  0.1839
  0.0934
  0.2333
  0.2236
  0.1654
  0.1280
  0.0842
  0.1085
  0.3168
  0.2032
  0.3246
  0.2184
  0.3208
  0.2824
  0.3408
  0.3339
  0.3307
  0.5571
  0.2821
  0.3081
  0.2114
  0.2971
  0.2361
  0.5500
  0.1221
  0.3381
  0.1528
  0.1544
  0.1982
  0.0582
  0.1812
  0.2489
  0.1954
  0.0705
  0.0918
  0.1328
  0.2616
  0.2013
  0.0720
  0.1573
  0.1919
  0.0813
  0.1170
  0.2504
  0.2863
  0.3032
  0.1476
  0.3696
  0.1870
  0.2097
  0.1907
  0.2364
  0.1642
  0.1079
  0.2531
  0.1703
  0.1266
  0.0814
  0.2407
  0.2609
  0.2705
  0.2128
  0.5007
  0.2375
  0.0802
  0.2896
  0.1776
  0.0887
  0.1094
  0.1834
  0.2812
  0.1971
  0.2021
  0.3443
  0.1411
  0.1362
  0.2676
  0.1618
  0.2723
  0.2727
  0.2528
  0.0982
  0.4707
  0.2239
  0.3649
  0.1987
  0.0815
  0.2543
  0.3322
  0.1561
  0.2336
  0.1294
  0.2570
  0.1700
  0.1374
  0.2215
  0.5015
  0.3132
  0.1487
  0.1174
  0.0916
  0.2130
  0.1393
  0.3057
  0.5634
  0.1018
  0.0994
  0.0492
  0.4427
  0.3142
  0.4002
  0.1334
  0.2174
  0.5522
  0.2806
  0.2784
  0.4333
  0.2602
  0.3788
  0.1827
  0.2664
  0.1077
  0.3001
  0.2428
  0.5130
  0.0829
  0.1254
  0.1996
  0.1451
  0.2253
  0.1467
  0.3712
  0.0794
  0.5425
  0.2058
  0.2103
  0.1288
  0.4993
  0.1815
  0.1845
  0.4154
  0.3817
  0.2054
  0.2205
  0.1471
  0.4964
  0.4202
  0.0801
  0.0623
  0.3536
  0.2760
  0.3840
  0.1632
  0.1402
  0.2674
  0.0844
  0.2305
  0.2259
  0.2146
  0.4181
  0.2821
  0.2926
  0.3416
  0.4640
  0.3025
  0.3732
  0.5871
  0.0616
  0.2797
  0.3042
  0.2173
  0.3550
  0.2096
  0.2449
  0.3428
  0.2868
  0.3543
  0.4667
  0.3220
  0.3805
  0.2632
  0.2160
  0.1924
  0.4074
  0.4966
  0.3623
  0.1670
  0.1321
  0.2374
  0.2118
  0.1522
  0.1668
  0.3836
  0.0983
  0.3729
  0.3943
  0.4353
  0.2270
  0.1508
  0.3133
  0.3850
  0.5774
  0.1892
  0.2822
  0.0907
  0.2364
  0.0964
  0.2360
  0.0699
  0.2938
  0.5100
  0.3348
  0.2339
  0.1145
  0.2155
  0.2266
  0.2829
  0.2341
  0.1891
  0.2906
  0.2681
  0.3876
  0.3915
  0.1844
  0.1889
  0.4405
  0.1405
  0.3460
  0.2724
  0.2567
  0.2785
  0.1148
  0.1607
  0.1754
  0.0883
  0.1649
  0.1268
  0.2356
  0.2811
  0.0766
  0.1424
  0.1683
  0.3979
  0.2685
  0.6383
  0.1087
  0.3180
  0.1760
  0.3634
  0.2615
  0.1999
  0.2541
 [torch.FloatTensor of size 256], Parameter containing:
 -0.0162
 -0.2033
  0.0294
 -0.1697
 -0.1840
 -0.0309
 -0.2039
 -0.1426
 -0.0443
 -0.0886
 -0.0647
 -0.0968
 -0.0380
 -0.2073
 -0.3061
  0.1443
 -0.3079
 -0.1232
 -0.1627
 -0.0980
 -0.2471
 -0.2837
 -0.1201
 -0.2893
 -0.2303
 -0.3562
 -0.0825
 -0.3483
  0.0707
 -0.1321
 -0.1074
 -0.1451
  0.0235
  0.0225
 -0.1885
 -0.2507
 -0.2461
  0.0631
 -0.0023
 -0.1209
 -0.2581
 -0.1640
 -0.0172
 -0.1143
 -0.2096
 -0.0158
  0.0128
 -0.1332
 -0.3139
 -0.2294
 -0.1527
 -0.3503
  0.2086
  0.0785
 -0.1597
 -0.1990
  0.0346
  0.0388
 -0.1269
  0.1019
  0.0981
 -0.0390
 -0.2537
 -0.1356
 -0.1796
 -0.2422
 -0.4517
 -0.3124
 -0.0177
 -0.2615
 -0.1567
  0.0212
 -0.0753
 -0.1426
 -0.2788
  0.0062
 -0.1895
 -0.2327
 -0.1298
 -0.1200
 -0.1917
 -0.0987
 -0.1916
 -0.1666
 -0.2729
  0.1287
 -0.4620
 -0.2259
 -0.2270
  0.1939
  0.0230
 -0.3303
 -0.3202
 -0.1292
 -0.0716
  0.0048
 -0.2579
 -0.0116
 -0.0557
 -0.1229
 -0.4804
 -0.2351
 -0.1367
 -0.0578
 -0.0537
 -0.2743
 -0.0827
 -0.1922
 -0.3481
 -0.0358
 -0.1094
  0.0138
 -0.1888
 -0.2592
 -0.3293
 -0.0820
 -0.1839
 -0.1636
 -0.3163
 -0.0246
 -0.1667
 -0.1653
 -0.3076
 -0.2229
 -0.1834
 -0.0536
 -0.0621
 -0.1752
 -0.5243
 -0.1933
 -0.1119
 -0.2283
 -0.0437
 -0.1777
 -0.1300
 -0.2519
 -0.0456
 -0.6305
 -0.1364
 -0.2138
  0.0406
 -0.5287
 -0.2014
 -0.1442
 -0.1930
 -0.3033
  0.1030
 -0.1499
 -0.2297
 -0.5301
 -0.2543
 -0.0417
  0.0429
 -0.3218
 -0.1611
 -0.2562
 -0.1187
 -0.1001
  0.0225
  0.0996
 -0.2138
 -0.2019
  0.0808
 -0.0121
 -0.2364
 -0.3247
 -0.1482
 -0.4846
 -0.3449
 -0.1365
 -0.6664
  0.0418
 -0.2807
 -0.0961
 -0.2378
 -0.1834
 -0.1890
 -0.0377
 -0.3056
 -0.1843
 -0.1357
 -0.3038
 -0.2680
 -0.4143
 -0.2633
 -0.1750
 -0.1856
 -0.2405
 -0.1082
 -0.2250
 -0.1268
 -0.1094
  0.0594
 -0.1419
 -0.1178
 -0.1602
 -0.0328
 -0.0194
 -0.1985
  0.0470
 -0.1887
 -0.2776
 -0.0930
 -0.4092
 -0.3378
 -0.7252
  0.0260
 -0.1829
  0.0561
 -0.2227
 -0.0026
 -0.3218
 -0.0093
 -0.2843
 -0.5121
 -0.2337
 -0.0836
 -0.0818
 -0.1296
 -0.2090
  0.0169
 -0.1899
 -0.1892
 -0.3075
 -0.3108
 -0.2986
 -0.4712
 -0.1823
 -0.1893
 -0.3131
 -0.0876
 -0.1166
 -0.2995
 -0.0831
 -0.3427
 -0.0772
 -0.1460
 -0.1611
  0.0203
 -0.0627
 -0.0610
 -0.2574
 -0.1383
  0.0470
 -0.0302
 -0.1638
 -0.3323
 -0.1741
 -0.6307
 -0.0772
 -0.2123
 -0.1559
 -0.0459
 -0.2416
 -0.0143
 -0.2079
 [torch.FloatTensor of size 256], Parameter containing:
 ( 0 , 0 ,.,.) = 
  -1.1645e-02 -1.9010e-02 -2.1876e-02
   2.0482e-02  2.3962e-02  2.9161e-02
   4.3672e-02  3.3278e-02  4.9908e-02
 
 ( 0 , 1 ,.,.) = 
  -7.4040e-03  2.8083e-03 -4.7339e-03
   6.9030e-03  1.4271e-02 -3.6954e-03
  -3.1341e-03  1.3736e-02  1.6127e-03
 
 ( 0 , 2 ,.,.) = 
   1.8676e-02 -1.0553e-02 -1.4233e-02
   8.9944e-03 -2.5068e-03 -1.2145e-02
  -4.9455e-03 -2.9206e-02 -9.6385e-03
     ... 
 
 ( 0 ,253,.,.) = 
  -1.2655e-02  1.7691e-02  9.8264e-04
   7.4271e-03  7.6115e-03  1.1135e-02
   2.3242e-02  1.1058e-02  4.0498e-03
 
 ( 0 ,254,.,.) = 
   1.8557e-02  1.2472e-02  1.7220e-02
  -4.8544e-03  8.3627e-03  2.2811e-02
  -5.1675e-03  2.3264e-02  3.4068e-02
 
 ( 0 ,255,.,.) = 
   2.4934e-02  2.2373e-02  4.2614e-02
   1.3486e-02  1.6760e-03  1.3019e-02
  -6.2821e-03 -1.5112e-03 -8.9229e-03
       ⋮  
 
 ( 1 , 0 ,.,.) = 
  -9.8089e-04 -6.3011e-03  5.9932e-03
   1.5936e-02  1.3394e-02  2.9934e-02
   2.3149e-02  2.0709e-02  2.5485e-02
 
 ( 1 , 1 ,.,.) = 
  -2.0015e-02 -3.3349e-02 -8.0396e-03
  -7.2800e-03 -1.2187e-02 -2.0389e-04
  -1.3138e-02 -2.0427e-02 -1.6286e-02
 
 ( 1 , 2 ,.,.) = 
  -6.7681e-03  5.0045e-03 -2.6683e-03
  -2.1073e-02  2.8275e-04 -1.8205e-02
  -1.7382e-02 -5.0244e-03 -3.0386e-03
     ... 
 
 ( 1 ,253,.,.) = 
  -1.1035e-02 -2.2964e-02 -1.1028e-02
  -6.3256e-03 -4.1667e-03 -1.7323e-02
  -1.3611e-02 -2.3468e-02 -1.6436e-02
 
 ( 1 ,254,.,.) = 
   7.3663e-03  6.6219e-03  5.2776e-03
  -3.5464e-03  3.2750e-03 -9.1126e-03
   3.5593e-04 -1.0151e-02 -1.9123e-02
 
 ( 1 ,255,.,.) = 
   1.8193e-03  8.8087e-03  5.1361e-03
   3.1915e-03  2.5287e-02  2.4939e-02
   1.3968e-02  1.9613e-02  2.2382e-02
       ⋮  
 
 ( 2 , 0 ,.,.) = 
  -4.1548e-03  8.8964e-03  2.0143e-03
   1.1327e-02  1.3251e-02  1.4014e-02
   7.2196e-03  1.3045e-02  2.4827e-02
 
 ( 2 , 1 ,.,.) = 
  -1.5025e-02  5.0530e-03  7.4766e-03
  -2.4685e-02 -1.6732e-02 -1.0888e-02
  -2.8064e-02 -1.1875e-02 -3.4120e-03
 
 ( 2 , 2 ,.,.) = 
   2.8449e-02  1.4594e-02  6.9441e-03
   2.4799e-02  1.9453e-02  1.1294e-02
  -1.0787e-02 -2.1006e-02 -1.0372e-02
     ... 
 
 ( 2 ,253,.,.) = 
   1.4967e-02  8.2449e-03  2.0244e-03
   1.4287e-02 -6.3867e-03 -8.0757e-03
   2.7547e-02  1.0791e-02  1.6567e-02
 
 ( 2 ,254,.,.) = 
   3.6191e-02  3.8918e-02  3.9028e-02
  -8.3489e-04  1.3273e-02  2.0172e-02
  -2.0652e-02 -5.4010e-03  1.7147e-03
 
 ( 2 ,255,.,.) = 
   2.0373e-04  3.5919e-03  8.5592e-03
   6.2363e-03 -9.3086e-05  1.2940e-02
   1.3152e-02  1.0732e-02  1.9896e-02
 ...     
       ⋮  
 
 (509, 0 ,.,.) = 
  -1.7400e-02 -6.7019e-03 -9.1787e-03
  -9.9672e-03  2.6298e-04  3.3439e-03
   1.5721e-02  1.4216e-02  2.0509e-02
 
 (509, 1 ,.,.) = 
   2.1410e-02  3.6914e-02  2.8239e-02
   3.8158e-02  4.8944e-02  3.4652e-02
   3.1723e-02  4.4208e-02  4.0035e-02
 
 (509, 2 ,.,.) = 
  -3.3437e-03 -1.0482e-02 -5.3990e-03
  -5.3186e-03  1.1394e-02  1.7593e-03
  -5.6652e-03 -6.6373e-03 -1.3492e-02
     ... 
 
 (509,253,.,.) = 
  -1.7099e-02 -1.8145e-03 -1.3040e-02
  -2.2750e-02 -3.6062e-03 -8.0294e-03
  -1.6087e-02 -1.0175e-02 -1.3529e-02
 
 (509,254,.,.) = 
   4.1701e-04 -5.1785e-03 -2.1884e-02
   2.6919e-03  8.9139e-03 -1.4217e-04
  -7.3746e-03 -6.6853e-03 -2.3725e-02
 
 (509,255,.,.) = 
   1.9425e-02  1.3175e-02  1.7511e-02
   1.8235e-02  4.4286e-02  2.3767e-02
   2.6504e-02  3.3104e-02  1.9696e-02
       ⋮  
 
 (510, 0 ,.,.) = 
  -1.0177e-02 -1.0701e-02 -2.0428e-02
  -1.7986e-02  5.9928e-03 -1.0584e-03
  -1.8794e-02 -1.8773e-03 -6.9449e-03
 
 (510, 1 ,.,.) = 
  -2.8498e-03  1.6427e-03  1.4575e-04
  -5.4403e-03  8.3667e-03 -9.4164e-03
  -4.4999e-03  5.4902e-03  2.4863e-03
 
 (510, 2 ,.,.) = 
  -1.3356e-02 -2.1525e-02  5.3421e-04
  -1.9160e-02 -2.4645e-02 -1.3791e-02
  -6.1991e-03 -1.3174e-02 -3.6783e-03
     ... 
 
 (510,253,.,.) = 
  -3.3993e-03 -2.7823e-03  7.6715e-03
  -2.0649e-02 -1.2731e-02 -9.4138e-03
  -1.3678e-03 -3.4410e-02 -2.6984e-02
 
 (510,254,.,.) = 
  -3.5651e-04  2.0102e-03  1.4130e-02
  -1.3073e-02 -1.6616e-02 -1.2690e-02
  -3.5934e-02 -4.1700e-02 -3.3968e-02
 
 (510,255,.,.) = 
   2.0470e-02  8.0159e-04 -1.1607e-03
   9.5101e-03  3.0336e-02  2.7362e-02
   1.5588e-02  3.2851e-02  1.3015e-02
       ⋮  
 
 (511, 0 ,.,.) = 
  -1.5574e-02 -3.2971e-02 -3.1939e-02
  -2.2502e-02 -5.7187e-03 -5.6729e-03
  -2.7309e-02 -1.6981e-02  1.2832e-04
 
 (511, 1 ,.,.) = 
  -1.1925e-02 -2.9479e-02 -2.0437e-02
  -2.4408e-02 -2.2069e-02 -1.9965e-03
  -2.3279e-02 -5.5140e-03  2.5630e-02
 
 (511, 2 ,.,.) = 
  -1.6100e-02 -8.2417e-03  1.5266e-04
  -2.6195e-03 -8.2754e-03 -2.9435e-02
  -2.7493e-03 -2.4889e-02 -2.3583e-02
     ... 
 
 (511,253,.,.) = 
   1.7985e-02  1.8594e-02  8.9198e-04
  -1.7319e-02  7.8735e-03 -2.8659e-03
   3.8596e-03  2.9061e-02  2.4188e-02
 
 (511,254,.,.) = 
  -2.6735e-02 -1.4391e-02 -4.0148e-02
  -2.6728e-02 -2.4455e-02 -6.9176e-03
  -5.7244e-02 -2.1995e-04  5.5438e-02
 
 (511,255,.,.) = 
   2.3487e-02  2.7157e-03 -8.4719e-04
   1.7886e-02  5.4860e-03  2.8059e-02
   4.6468e-03  1.8598e-02  1.3761e-03
 [torch.FloatTensor of size 512x256x3x3], Parameter containing:
  0.2427
  0.2232
  0.2511
  0.2288
  0.2074
  0.2905
  0.2482
  0.3102
  0.2749
  0.2892
  0.2448
  0.1759
  0.2426
  0.2780
  0.2315
  0.2631
  0.3383
  0.2785
  0.2536
  0.2989
  0.2335
  0.2812
  0.3486
  0.2778
  0.2280
  0.2547
  0.3032
  0.2468
  0.2512
  0.2973
  0.2577
  0.3200
  0.2385
  0.2714
  0.2532
  0.2625
  0.3344
  0.2626
  0.1838
  0.2839
  0.2187
  0.2666
  0.2858
  0.2471
  0.2915
  0.2332
  0.2637
  0.2691
  0.2432
  0.2384
  0.2356
  0.2525
  0.2564
  0.2451
  0.2529
  0.2522
  0.2800
  0.3165
  0.2340
  0.2634
  0.2569
  0.1942
  0.2621
  0.2205
  0.2301
  0.2323
  0.2811
  0.1897
  0.2280
  0.3472
  0.2717
  0.3191
  0.2440
  0.2719
  0.2781
  0.2262
  0.3444
  0.2648
  0.2725
  0.2851
  0.2039
  0.2935
  0.2742
  0.2774
  0.2654
  0.2430
  0.2721
  0.2708
  0.3085
  0.2895
  0.2596
  0.2147
  0.3119
  0.3449
  0.2262
  0.2814
  0.2326
  0.2712
  0.2637
  0.2323
  0.3333
  0.2714
  0.2991
  0.2747
  0.2515
  0.2394
  0.2709
  0.2836
  0.2866
  0.2408
  0.2560
  0.2048
  0.2394
  0.2813
  0.3267
  0.2761
  0.2123
  0.2715
  0.2540
  0.2771
  0.3209
  0.1905
  0.3989
  0.2676
  0.2357
  0.2169
  0.3216
  0.3596
  0.2838
  0.2648
  0.2702
  0.2469
  0.2442
  0.2553
  0.2599
  0.2693
  0.2399
  0.2700
  0.2063
  0.2711
  0.2834
  0.2781
  0.2529
  0.2013
  0.2343
  0.2082
  0.3063
  0.1635
  0.2673
  0.2197
  0.2787
  0.2724
  0.2744
  0.2287
  0.2969
  0.2662
  0.2982
  0.2396
  0.3039
  0.2319
  0.2773
  0.2661
  0.2898
  0.2489
  0.3060
  0.2612
  0.2937
  0.3045
  0.2999
  0.2580
  0.2093
  0.2714
  0.2993
  0.2679
  0.2963
  0.2754
  0.2580
  0.2566
  0.2634
  0.2325
  0.2442
  0.2934
  0.2398
  0.2631
  0.2851
  0.2870
  0.2239
  0.2410
  0.2676
  0.2681
  0.2638
  0.2732
  0.2812
  0.2203
  0.2670
  0.2764
  0.2550
  0.3160
  0.2888
  0.2615
  0.2178
  0.2485
  0.2414
  0.2798
  0.2872
  0.2767
  0.2551
  0.2429
  0.2459
  0.3288
  0.3024
  0.2912
  0.2625
  0.3019
  0.2643
  0.2721
  0.2108
  0.2368
  0.2269
  0.1988
  0.2830
  0.2569
  0.2349
  0.2755
  0.2442
  0.2717
  0.2747
  0.2785
  0.2516
  0.2227
  0.2783
  0.2465
  0.2652
  0.2641
  0.2960
  0.2671
  0.2679
  0.2537
  0.2847
  0.2507
  0.2525
  0.2024
  0.2311
  0.2618
  0.2764
  0.3031
  0.2452
  0.2716
  0.2273
  0.2295
  0.2611
  0.2329
  0.2690
  0.2753
  0.2737
  0.2590
  0.2421
  0.2685
  0.3392
  0.3073
  0.1371
  0.3650
  0.2980
  0.2460
  0.2487
  0.2912
  0.2704
  0.2560
  0.2213
  0.2569
  0.2661
  0.2367
  0.2742
  0.2847
  0.3055
  0.2671
  0.2819
  0.2791
  0.2401
  0.2549
  0.2210
  0.3507
  0.2852
  0.2162
  0.2821
  0.2369
  0.2905
  0.2826
  0.2300
  0.2745
  0.2437
  0.2522
  0.2489
  0.2395
  0.2851
  0.2887
  0.2621
  0.2500
  0.2689
  0.2427
  0.3010
  0.3067
  0.2861
  0.2387
  0.2462
  0.2859
  0.2550
  0.2630
  0.2442
  0.2145
  0.2898
  0.2282
  0.2327
  0.2242
  0.2738
  0.2485
  0.2379
  0.3058
  0.2798
  0.2761
  0.2252
  0.2866
  0.2660
  0.3250
  0.2612
  0.2767
  0.3205
  0.2932
  0.3183
  0.2939
  0.3103
  0.2553
  0.2981
  0.3667
  0.3086
  0.2254
  0.2352
  0.2348
  0.2555
  0.2597
  0.2369
  0.3017
  0.2776
  0.2728
  0.3174
  0.2785
  0.2721
  0.2637
  0.2702
  0.3633
  0.2869
  0.2675
  0.3405
  0.2587
  0.2732
  0.2747
  0.2821
  0.2750
  0.2630
  0.2018
  0.2358
  0.3034
  0.3155
  0.3013
  0.2775
  0.2511
  0.2945
  0.1605
  0.2825
  0.2964
  0.2194
  0.2061
  0.2332
  0.2348
  0.2663
  0.2543
  0.2927
  0.2215
  0.2521
  0.2827
  0.1993
  0.2453
  0.2597
  0.2654
  0.2757
  0.2650
  0.2444
  0.2949
  0.2308
  0.3071
  0.1904
  0.3024
  0.2786
  0.3659
  0.2966
  0.2746
  0.2449
  0.2201
  0.2564
  0.2853
  0.2392
  0.2457
  0.2467
  0.2374
  0.2664
  0.2460
  0.3182
  0.1793
  0.2379
  0.2596
  0.2847
  0.2452
  0.1974
  0.2388
  0.2949
  0.2879
  0.2786
  0.2765
  0.3296
  0.2530
  0.2690
  0.2547
  0.2333
  0.2348
  0.2690
  0.2718
  0.2679
  0.2516
  0.2710
  0.2366
  0.2601
  0.2764
  0.2880
  0.2008
  0.2637
  0.2263
  0.2511
  0.2604
  0.2805
  0.2989
  0.2965
  0.2597
  0.2767
  0.2553
  0.2959
  0.2512
  0.2925
  0.3008
  0.2423
  0.2394
  0.2708
  0.3704
  0.2879
  0.2532
  0.2248
  0.2023
  0.2279
  0.2366
  0.3082
  0.2980
  0.2909
  0.2777
  0.4293
  0.2658
  0.2940
  0.2418
  0.2816
  0.3247
  0.2647
  0.2216
  0.2758
  0.2421
  0.2078
  0.2332
  0.2271
  0.2611
  0.3650
  0.2017
  0.2598
  0.2160
  0.2641
  0.1408
  0.2664
  0.2502
  0.2553
  0.2227
  0.2417
  0.2696
  0.2388
  0.2833
  0.2333
  0.2667
  0.2224
  0.2691
  0.2710
  0.2459
  0.2674
  0.2430
  0.2593
  0.1851
  0.2950
  0.3664
  0.2212
  0.3026
  0.1840
  0.3443
  0.2140
  0.3717
  0.2360
  0.3081
  0.2638
  0.2233
 [torch.FloatTensor of size 512], Parameter containing:
 -0.1986
 -0.1593
 -0.2054
 -0.1598
 -0.1268
 -0.3226
 -0.1597
 -0.3477
 -0.2497
 -0.2730
 -0.2319
 -0.0286
 -0.1899
 -0.2813
 -0.1733
 -0.2412
 -0.3712
 -0.2747
 -0.2053
 -0.2585
 -0.1535
 -0.2748
 -0.3241
 -0.2525
 -0.1906
 -0.2252
 -0.3436
 -0.2202
 -0.1664
 -0.2716
 -0.1920
 -0.3399
 -0.2026
 -0.2972
 -0.2616
 -0.2238
 -0.2486
 -0.2606
 -0.0893
 -0.3572
 -0.1283
 -0.2583
 -0.2450
 -0.1523
 -0.3165
 -0.1445
 -0.2522
 -0.1963
 -0.1794
 -0.1071
 -0.1662
 -0.2053
 -0.2530
 -0.1447
 -0.2517
 -0.2062
 -0.2817
 -0.3376
 -0.1382
 -0.2389
 -0.2557
 -0.0156
 -0.2169
 -0.1763
 -0.1486
 -0.2122
 -0.2002
 -0.0716
 -0.2089
 -0.3580
 -0.2588
 -0.3599
 -0.1528
 -0.2107
 -0.2925
 -0.1855
 -0.3970
 -0.1257
 -0.2574
 -0.2412
 -0.0863
 -0.3065
 -0.2701
 -0.3380
 -0.2485
 -0.1935
 -0.2987
 -0.2279
 -0.3600
 -0.2764
 -0.2480
 -0.1208
 -0.3378
 -0.2661
 -0.1677
 -0.2470
 -0.2152
 -0.2591
 -0.1936
 -0.1543
 -0.4117
 -0.1570
 -0.2372
 -0.2997
 -0.2124
 -0.2034
 -0.1848
 -0.3070
 -0.3438
 -0.1839
 -0.1937
 -0.0916
 -0.2338
 -0.3558
 -0.1967
 -0.3303
 -0.1398
 -0.2177
 -0.1665
 -0.1857
 -0.3115
 -0.1049
 -0.4229
 -0.2408
 -0.1320
 -0.1631
 -0.3378
 -0.3300
 -0.3183
 -0.2268
 -0.2787
 -0.1950
 -0.1950
 -0.1463
 -0.2437
 -0.2297
 -0.1282
 -0.2164
 -0.1179
 -0.2437
 -0.2611
 -0.2656
 -0.1948
 -0.1208
 -0.1668
 -0.1351
 -0.2713
 -0.0560
 -0.2243
 -0.1318
 -0.2356
 -0.2720
 -0.2051
 -0.1736
 -0.2891
 -0.2627
 -0.3358
 -0.1779
 -0.2309
 -0.1477
 -0.2685
 -0.1882
 -0.2629
 -0.1983
 -0.3522
 -0.1905
 -0.2778
 -0.3395
 -0.2895
 -0.2240
 -0.1150
 -0.2462
 -0.2426
 -0.2581
 -0.3133
 -0.2315
 -0.2271
 -0.2077
 -0.2109
 -0.1371
 -0.1323
 -0.2529
 -0.1716
 -0.2532
 -0.2277
 -0.2084
 -0.1803
 -0.1868
 -0.2404
 -0.2166
 -0.2197
 -0.2870
 -0.3062
 -0.1507
 -0.1054
 -0.2199
 -0.2415
 -0.3310
 -0.2700
 -0.1568
 -0.1449
 -0.2610
 -0.1828
 -0.2648
 -0.3134
 -0.2937
 -0.2687
 -0.2115
 -0.2164
 -0.4522
 -0.2999
 -0.3032
 -0.2292
 -0.3099
 -0.2642
 -0.2695
 -0.1441
 -0.1671
 -0.1570
 -0.1415
 -0.2222
 -0.1736
 -0.1481
 -0.2573
 -0.2060
 -0.1703
 -0.2360
 -0.1770
 -0.2132
 -0.2016
 -0.3001
 -0.1518
 -0.2086
 -0.2805
 -0.2698
 -0.2292
 -0.1293
 -0.2514
 -0.2600
 -0.2454
 -0.1744
 -0.1029
 -0.1679
 -0.2353
 -0.2007
 -0.3363
 -0.1640
 -0.2430
 -0.1699
 -0.1697
 -0.1837
 -0.1625
 -0.2415
 -0.2687
 -0.2305
 -0.2029
 -0.2209
 -0.2240
 -0.2675
 -0.3233
  0.1462
 -0.4777
 -0.2376
 -0.1489
 -0.1462
 -0.3055
 -0.2234
 -0.1697
 -0.1952
 -0.2131
 -0.2340
 -0.2039
 -0.3054
 -0.2596
 -0.3470
 -0.2176
 -0.2706
 -0.2897
 -0.1729
 -0.2300
 -0.1066
 -0.3556
 -0.2912
 -0.1777
 -0.2007
 -0.1699
 -0.3009
 -0.3046
 -0.1693
 -0.2602
 -0.2053
 -0.1810
 -0.1808
 -0.1730
 -0.3757
 -0.1808
 -0.1805
 -0.1895
 -0.2643
 -0.2075
 -0.2365
 -0.1975
 -0.3064
 -0.1984
 -0.1811
 -0.3676
 -0.1198
 -0.1485
 -0.1770
 -0.0781
 -0.2052
 -0.1360
 -0.1417
 -0.1691
 -0.2395
 -0.1785
 -0.1747
 -0.2484
 -0.2717
 -0.3096
 -0.1465
 -0.2239
 -0.2584
 -0.3572
 -0.2311
 -0.2878
 -0.3841
 -0.3475
 -0.3896
 -0.1891
 -0.2861
 -0.2431
 -0.2837
 -0.4365
 -0.3353
 -0.1802
 -0.1976
 -0.1529
 -0.1978
 -0.2535
 -0.1954
 -0.2667
 -0.2813
 -0.2487
 -0.3070
 -0.2339
 -0.2212
 -0.1925
 -0.2224
 -0.4178
 -0.3151
 -0.2663
 -0.3581
 -0.1935
 -0.2385
 -0.2424
 -0.1850
 -0.2265
 -0.1803
 -0.0777
 -0.1492
 -0.3361
 -0.4133
 -0.3123
 -0.2745
 -0.1247
 -0.3102
  0.0041
 -0.1981
 -0.3301
 -0.2047
 -0.1053
 -0.1653
 -0.1634
 -0.1116
 -0.2314
 -0.3191
 -0.1818
 -0.2657
 -0.2220
 -0.1029
 -0.1999
 -0.2702
 -0.2139
 -0.2256
 -0.2653
 -0.1630
 -0.3322
 -0.1617
 -0.3446
  0.0288
 -0.2456
 -0.3171
 -0.3580
 -0.2857
 -0.2520
 -0.2031
 -0.1522
 -0.2203
 -0.3490
 -0.1685
 -0.1424
 -0.1602
 -0.1553
 -0.3057
 -0.2420
 -0.3536
 -0.0551
 -0.0987
 -0.2272
 -0.2619
 -0.2035
 -0.0906
 -0.1976
 -0.3040
 -0.2732
 -0.3161
 -0.2102
 -0.3384
 -0.1740
 -0.1475
 -0.1842
 -0.1823
 -0.1151
 -0.2183
 -0.2010
 -0.2659
 -0.2205
 -0.2567
 -0.1633
 -0.2213
 -0.2658
 -0.2938
 -0.1069
 -0.2522
 -0.1103
 -0.2216
 -0.2244
 -0.2908
 -0.2176
 -0.3605
 -0.2374
 -0.2391
 -0.2251
 -0.2256
 -0.1339
 -0.1970
 -0.2970
 -0.2206
 -0.2051
 -0.2229
 -0.3602
 -0.2923
 -0.2498
 -0.1466
 -0.0979
 -0.1686
 -0.2158
 -0.2881
 -0.3002
 -0.2760
 -0.2496
 -0.3536
 -0.2868
 -0.3251
 -0.1847
 -0.3062
 -0.3861
 -0.2650
 -0.1339
 -0.1846
 -0.1630
 -0.0630
 -0.1717
 -0.1415
 -0.1906
 -0.4611
 -0.1391
 -0.1920
 -0.1369
 -0.1647
 -0.0055
 -0.2598
 -0.2653
 -0.2319
 -0.1780
 -0.1913
 -0.2055
 -0.1891
 -0.2625
 -0.1633
 -0.2497
 -0.1696
 -0.1907
 -0.2431
 -0.1825
 -0.2607
 -0.1943
 -0.2361
 -0.0581
 -0.2758
 -0.2593
 -0.1466
 -0.3589
 -0.0439
 -0.3440
 -0.1089
 -0.4219
 -0.1503
 -0.2792
 -0.3035
 -0.1156
 [torch.FloatTensor of size 512], Parameter containing:
 ( 0 , 0 ,.,.) = 
   1.6218e-04 -1.4720e-02 -1.7000e-02
  -1.2850e-02 -3.3085e-02 -3.6656e-02
   2.7812e-02  1.7691e-02 -1.8369e-02
 
 ( 0 , 1 ,.,.) = 
   1.0528e-02  3.1379e-02  2.4801e-02
  -1.2698e-02 -2.9453e-02 -1.1834e-02
  -9.4094e-03 -8.9462e-03 -3.1349e-02
 
 ( 0 , 2 ,.,.) = 
  -7.8447e-03 -2.9256e-02  5.3590e-03
  -1.3791e-02 -1.1116e-02  5.0388e-03
  -2.4919e-03  7.3514e-03  5.4013e-03
     ... 
 
 ( 0 ,509,.,.) = 
  -1.0276e-03 -1.0275e-02 -2.9986e-02
  -3.8465e-03  1.9549e-03 -1.6291e-02
  -1.8100e-03  8.3778e-03 -8.5481e-03
 
 ( 0 ,510,.,.) = 
  -1.8196e-02 -1.3533e-02 -1.7457e-02
   2.2457e-02  5.7402e-02  1.9325e-02
  -2.4977e-02 -3.2113e-02 -8.1780e-03
 
 ( 0 ,511,.,.) = 
   3.6550e-03  4.9358e-03 -5.7597e-03
  -1.6875e-02  1.3999e-04  3.7629e-04
  -2.6272e-03  1.0947e-03  1.1145e-03
       ⋮  
 
 ( 1 , 0 ,.,.) = 
   1.4018e-02  3.9198e-03 -1.7189e-03
  -1.3175e-03  4.3503e-04 -1.1798e-02
  -9.8003e-03 -1.7693e-02 -1.9910e-02
 
 ( 1 , 1 ,.,.) = 
  -1.4957e-02 -1.9796e-02 -2.8724e-02
   5.8908e-03 -1.5228e-02 -5.6715e-03
   2.9284e-03 -1.8028e-02 -7.1433e-03
 
 ( 1 , 2 ,.,.) = 
  -1.1625e-02 -3.3804e-02 -1.0025e-02
  -1.6606e-02 -5.5716e-02 -2.3204e-02
  -2.5758e-02 -4.3135e-02 -2.5901e-02
     ... 
 
 ( 1 ,509,.,.) = 
  -1.5007e-02 -1.4333e-02 -2.5937e-03
  -2.3078e-02 -1.5820e-02 -2.2818e-03
  -4.1318e-03 -8.0353e-03 -2.3236e-03
 
 ( 1 ,510,.,.) = 
  -1.8531e-02 -1.8004e-02 -2.8084e-02
  -3.6680e-02 -6.8641e-02 -5.2469e-02
  -1.1712e-02 -2.4334e-02 -1.6733e-02
 
 ( 1 ,511,.,.) = 
  -2.2078e-02 -2.9163e-02 -3.8717e-03
  -7.0301e-03  1.6718e-02  5.4339e-03
  -1.3131e-02  1.1999e-02 -1.7480e-02
       ⋮  
 
 ( 2 , 0 ,.,.) = 
  -5.2378e-03 -3.4890e-03 -2.0851e-03
   1.5306e-02 -2.1752e-02 -8.7682e-03
   2.2460e-02  9.9175e-03 -3.3635e-03
 
 ( 2 , 1 ,.,.) = 
   7.4677e-03 -9.1762e-03 -9.2569e-05
   1.9441e-04  1.2344e-03 -8.9978e-03
  -5.1243e-04  2.1850e-04 -4.8828e-03
 
 ( 2 , 2 ,.,.) = 
   1.7078e-02  3.3955e-03  9.3503e-03
   2.0334e-02 -1.0621e-04 -8.2017e-05
   1.0706e-02 -1.8414e-03  1.0828e-02
     ... 
 
 ( 2 ,509,.,.) = 
   3.2008e-02  2.3494e-02  2.5386e-02
   1.9307e-02  2.3924e-02  2.8972e-02
   9.9003e-03  2.0158e-02  2.2655e-02
 
 ( 2 ,510,.,.) = 
  -9.8395e-03 -1.1114e-02 -3.7696e-03
  -2.9508e-02 -3.6956e-02 -1.8228e-02
  -1.3663e-03 -2.5845e-03  1.0352e-02
 
 ( 2 ,511,.,.) = 
  -7.3867e-03 -2.5413e-02 -2.1942e-02
  -1.6699e-02 -1.5133e-02 -1.3030e-02
  -2.0090e-02  3.7970e-03 -1.0341e-02
 ...     
       ⋮  
 
 (509, 0 ,.,.) = 
  -1.6157e-02 -1.6883e-02 -2.8328e-04
  -7.7759e-03 -2.4465e-03 -1.4641e-02
   2.4639e-02  3.9862e-02  2.1048e-02
 
 (509, 1 ,.,.) = 
   2.4491e-03 -9.3885e-03 -1.1786e-02
   2.5301e-02  2.5625e-04  7.1335e-03
   2.2342e-02  1.9042e-02  7.2526e-03
 
 (509, 2 ,.,.) = 
  -1.4652e-02 -2.7802e-02 -4.3564e-03
  -1.7961e-02 -4.3846e-02  2.7409e-03
  -4.7968e-03 -8.4231e-03  1.2070e-02
     ... 
 
 (509,509,.,.) = 
  -2.0171e-02 -3.3546e-02 -1.6728e-02
  -1.7847e-02 -5.1713e-02 -2.6780e-02
  -1.3145e-03 -4.3181e-03 -9.6373e-03
 
 (509,510,.,.) = 
  -5.3917e-03 -2.0410e-04  2.7798e-03
  -9.6882e-04 -2.5141e-02  1.4804e-02
   2.8748e-02  9.0832e-03  4.2548e-02
 
 (509,511,.,.) = 
  -1.5698e-02 -1.9303e-02 -9.1469e-03
  -2.0025e-02 -1.1131e-02 -3.3902e-02
  -5.7436e-03 -7.3640e-03 -1.0044e-02
       ⋮  
 
 (510, 0 ,.,.) = 
  -8.8612e-03 -4.5370e-03 -1.2354e-02
  -5.9245e-03 -1.7058e-02 -2.8041e-02
  -1.0435e-02  7.6695e-04 -1.0578e-02
 
 (510, 1 ,.,.) = 
   9.5200e-03 -5.1975e-03  1.2947e-02
   4.4305e-03 -2.3992e-02 -8.4569e-04
   4.6608e-03  9.6787e-03  8.2174e-03
 
 (510, 2 ,.,.) = 
   5.1559e-03  4.4635e-04 -7.9934e-03
   3.3069e-03  1.4450e-02  8.9234e-03
   6.3402e-03  1.9043e-02  1.9021e-02
     ... 
 
 (510,509,.,.) = 
   7.6964e-03 -1.3777e-02  6.0539e-03
  -1.5745e-03 -2.3391e-02 -1.0052e-02
   9.5183e-03 -1.2251e-02  2.2436e-03
 
 (510,510,.,.) = 
   1.0375e-02  3.5875e-03 -5.7940e-04
   7.0412e-03 -1.0673e-02 -4.9120e-03
  -2.6034e-03  1.1306e-02  7.0696e-03
 
 (510,511,.,.) = 
  -1.7509e-02 -2.3182e-02 -1.7897e-02
  -1.7769e-03  1.9672e-03 -7.3220e-03
  -6.6833e-03  9.8286e-03  2.0653e-03
       ⋮  
 
 (511, 0 ,.,.) = 
   2.8375e-02 -8.1936e-03  1.8009e-02
   1.5829e-02 -1.3571e-02 -1.9335e-02
   4.0766e-03 -1.5722e-02 -5.0620e-02
 
 (511, 1 ,.,.) = 
  -5.5310e-03 -1.8996e-02 -7.9436e-03
   1.3825e-03 -4.9608e-02  1.7256e-03
   7.6629e-03 -7.6101e-03  1.2541e-02
 
 (511, 2 ,.,.) = 
   1.8052e-02  3.1718e-02  4.2556e-03
  -3.6760e-03  3.0490e-03 -1.2264e-02
  -8.9404e-03 -1.6604e-02  1.6348e-03
     ... 
 
 (511,509,.,.) = 
   5.3192e-03  1.8204e-02  1.8114e-02
  -6.1202e-03  1.5905e-03  2.0264e-02
  -1.1471e-02 -1.5697e-02  9.0871e-03
 
 (511,510,.,.) = 
   3.7707e-03  8.0599e-03  1.8290e-02
   1.7257e-02  6.9638e-03  1.8746e-02
   1.0751e-02  1.3663e-02 -1.0081e-03
 
 (511,511,.,.) = 
   1.9711e-02 -1.4569e-02 -2.4663e-02
   2.5966e-03 -2.4807e-02  9.3861e-03
  -1.2876e-03  1.3974e-03  1.3434e-02
 [torch.FloatTensor of size 512x512x3x3], Parameter containing:
  0.4474
  0.5138
  0.4335
  0.3421
  0.3855
  0.3495
  0.3741
  0.5836
  0.4327
  0.5043
  0.4618
  0.3866
  0.3498
  0.4798
  0.3310
  0.3913
  0.3880
  0.5225
  0.3975
  0.3292
  0.4151
  0.4458
  0.3970
  0.3614
  0.3914
  0.4633
  0.3463
  0.3644
  0.3272
  0.4584
  0.4280
  0.4538
  0.4030
  0.4673
  0.4209
  0.3987
  0.4233
  0.3876
  0.4212
  0.3460
  0.3522
  0.3744
  0.4550
  0.2888
  0.4590
  0.4817
  0.4450
  0.5110
  0.4052
  0.4247
  0.3558
  0.3075
  0.4462
  0.4724
  0.4253
  0.3884
  0.4492
  0.3727
  0.4630
  0.3985
  0.3512
  0.3665
  0.3860
  0.5082
  0.4022
  0.3458
  0.4805
  0.5390
  0.4223
  0.4275
  0.4590
  0.4736
  0.3673
  0.5405
  0.3243
  0.5178
  0.4743
  0.3506
  0.3759
  0.4328
  0.3867
  0.4591
  0.3843
  0.4982
  0.5288
  0.3946
  0.4589
  0.3197
  0.4676
  0.4806
  0.4308
  0.4235
  0.3284
  0.3877
  0.4140
  0.4469
  0.4041
  0.4407
  0.4356
  0.5120
  0.5059
  0.4628
  0.4585
  0.3311
  0.3424
  0.4150
  0.5170
  0.4593
  0.5228
  0.4252
  0.4214
  0.4995
  0.4098
  0.5380
  0.4874
  0.3719
  0.4649
  0.4320
  0.3277
  0.3743
  0.4360
  0.4838
  0.4399
  0.3763
  0.4150
  0.5147
  0.5012
  0.4382
  0.3655
  0.4037
  0.4498
  0.4720
  0.3914
  0.3237
  0.3208
  0.3224
  0.4291
  0.4009
  0.3947
  0.3779
  0.4349
  0.4120
  0.3274
  0.4334
  0.3740
  0.4189
  0.4288
  0.3071
  0.4260
  0.3410
  0.4375
  0.4407
  0.3750
  0.5853
  0.4518
  0.5045
  0.3005
  0.4968
  0.4155
  0.3755
  0.5514
  0.4146
  0.4677
  0.1404
  0.5001
  0.4193
  0.4246
  0.4452
  0.5109
  0.4488
  0.4574
  0.3896
  0.4145
  0.4497
  0.4245
  0.3971
  0.3957
  0.4072
  0.5305
  0.4986
  0.3733
  0.4280
  0.3469
  0.4178
  0.3766
  0.4029
  0.3814
  0.4493
  0.5132
  0.4080
  0.4155
  0.3635
  0.4391
  0.3489
  0.4228
  0.4833
  0.3494
  0.4406
  0.3795
  0.4298
  0.4910
  0.3878
  0.6299
  0.4322
  0.5436
  0.4140
  0.4312
  0.3161
  0.3612
  0.3597
  0.4281
  0.4506
  0.4294
  0.3646
  0.4110
  0.4038
  0.4098
  0.3901
  0.3928
  0.5421
  0.3629
  0.4078
  0.4586
  0.4217
  0.3953
  0.3997
  0.3838
  0.4374
  0.3576
  0.4217
  0.4128
  0.3904
  0.4137
  0.5145
  0.4039
  0.3577
  0.4429
  0.5639
  0.3848
  0.6104
  0.4482
  0.6203
  0.5336
  0.3480
  0.5401
  0.6044
  0.4077
  0.3469
  0.4281
  0.4631
  0.5948
  0.3479
  0.3689
  0.3658
  0.3191
  0.5492
  0.3410
  0.5386
  0.4041
  0.3373
  0.4186
  0.5187
  0.3933
  0.3188
  0.3502
  0.3736
  0.4238
  0.4752
  0.3322
  0.5078
  0.4317
  0.5318
  0.4413
  0.5510
  0.5648
  0.4130
  0.4017
  0.4304
  0.4077
  0.4285
  0.4360
  0.3749
  0.4261
  0.3905
  0.3030
  0.3412
  0.3768
  0.4507
  0.3127
  0.4592
  0.4298
  0.3936
  0.3106
  0.3869
  0.3594
  0.4046
  0.4722
  0.4373
  0.3902
  0.3515
  0.4448
  0.4299
  0.4347
  0.4693
  0.4807
  0.2549
  0.4171
  0.4387
  0.4156
  0.3976
  0.4092
  0.4953
  0.4824
  0.3468
  0.4382
  0.4179
  0.4668
  0.3299
  0.5986
  0.4949
  0.4167
  0.4996
  0.4528
  0.4550
  0.4945
  0.3415
  0.4658
  0.4356
  0.3976
  0.5439
  0.4643
  0.5122
  0.4669
  0.4463
  0.4810
  0.3492
  0.3961
  0.3593
  0.4053
  0.3878
  0.3959
  0.5001
  0.2808
  0.5470
  0.4448
  0.4894
  0.4621
  0.3417
  0.3485
  0.5060
  0.3637
  0.3774
  0.3248
  0.4520
  0.3936
  0.3403
  0.4660
  0.4114
  0.3643
  0.4196
  0.3903
  0.5128
  0.4221
  0.4115
  0.4240
  0.3610
  0.4999
  0.3672
  0.4721
  0.4252
  0.5590
  0.4694
  0.7322
  0.5849
  0.4749
  0.4426
  0.3934
  0.3909
  0.4576
  0.3636
  0.4146
  0.4129
  0.5081
  0.3681
  0.3652
  0.4254
  0.2945
  0.4142
  0.3145
  0.4304
  0.4252
  0.3493
  0.4257
  0.5133
  0.3261
  0.4367
  0.3637
  0.3712
  0.4183
  0.3772
  0.4418
  0.4231
  0.4133
  0.4731
  0.4955
  0.4046
  0.4079
  0.4719
  0.3875
  0.4673
  0.4129
  0.4569
  0.3530
  0.4793
  0.3844
  0.3785
  0.3343
  0.4351
  0.6512
  0.4295
  0.4122
  0.3788
  0.3692
  0.4343
  0.4214
  0.3873
  0.4566
  0.4456
  0.4107
  0.4596
  0.7082
  0.4452
  0.3515
  0.4785
  0.4217
  0.5756
  0.4312
  0.4047
  0.4043
  0.4764
  0.5489
  0.4430
  0.5559
  0.3744
  0.3951
  0.4376
  0.4752
  0.4340
  0.4399
  0.3586
  0.4161
  0.3930
  0.4599
  0.4354
  0.3448
  0.4649
  0.4442
  0.4275
  0.3881
  0.3247
  0.4909
  0.3426
  0.3989
  0.4320
  0.3363
  0.3991
  0.4732
  0.3514
  0.4736
  0.4244
  0.4603
  0.3298
  0.4357
  0.4353
  0.3742
  0.4191
  0.3880
  0.4212
  0.4527
  0.7213
  0.3969
  0.5217
  0.3786
  0.3512
  0.5318
  0.4138
  0.3243
  0.3244
  0.3652
  0.4774
  0.3997
  0.2800
  0.4562
  0.4463
  0.4816
  0.4290
  0.4399
  0.4633
  0.3575
  0.4774
  0.3105
  0.4356
  0.3797
  0.4304
  0.4261
  0.3740
  0.3370
  0.3917
  0.3637
  0.4347
  0.5235
  0.3845
 [torch.FloatTensor of size 512], Parameter containing:
 -0.1759
 -0.2156
 -0.2047
 -0.1695
 -0.1628
 -0.1473
 -0.2158
 -0.2905
 -0.1112
 -0.2196
 -0.1020
 -0.1549
 -0.1989
 -0.0445
 -0.1508
 -0.1920
 -0.2114
 -0.1655
 -0.1854
 -0.1733
 -0.1289
 -0.2376
 -0.1965
 -0.1965
 -0.1776
 -0.1774
 -0.1760
 -0.1546
 -0.1648
 -0.2599
 -0.1752
 -0.2498
 -0.1741
 -0.2410
 -0.2498
 -0.2938
 -0.1496
 -0.1578
 -0.1800
 -0.1851
 -0.1516
 -0.1345
 -0.2746
 -0.1248
 -0.2246
 -0.2531
 -0.2398
 -0.1859
 -0.1739
 -0.2393
 -0.1214
 -0.1803
 -0.2729
 -0.2617
 -0.1855
 -0.2316
 -0.2333
 -0.1860
 -0.2097
 -0.0692
 -0.1912
 -0.2078
 -0.1084
 -0.2810
 -0.1303
 -0.1654
 -0.2119
 -0.3641
 -0.2951
 -0.2384
 -0.1632
 -0.1892
 -0.1792
 -0.2031
 -0.1770
 -0.2738
 -0.3324
 -0.1725
 -0.1793
 -0.2638
 -0.2207
 -0.1609
 -0.1534
 -0.1414
 -0.2992
 -0.1450
 -0.1838
 -0.1779
 -0.1422
 -0.2198
 -0.1900
 -0.1580
 -0.1666
 -0.2490
 -0.1569
 -0.1718
 -0.1660
 -0.1972
 -0.2287
 -0.2366
 -0.2230
 -0.1543
 -0.2030
 -0.1431
 -0.1363
 -0.2015
 -0.1804
 -0.2093
 -0.2964
 -0.1984
 -0.2683
 -0.2216
 -0.2147
 -0.3404
 -0.2668
 -0.1890
 -0.1733
 -0.2226
 -0.1772
 -0.1698
 -0.1095
 -0.2180
 -0.1154
 -0.1654
 -0.1910
 -0.3535
 -0.3112
 -0.2161
 -0.1496
 -0.1667
 -0.2849
 -0.2207
 -0.1529
 -0.1807
 -0.2118
 -0.1869
 -0.1376
 -0.1770
 -0.1861
 -0.1969
 -0.1741
 -0.3011
 -0.0787
 -0.2017
 -0.1947
 -0.2247
 -0.2459
 -0.1058
 -0.1401
 -0.1213
 -0.1199
 -0.1760
 -0.2156
 -0.3307
 -0.3515
 -0.2366
 -0.1185
 -0.2155
 -0.1751
 -0.1892
 -0.3365
 -0.1598
 -0.2554
  0.0644
 -0.2856
 -0.1198
 -0.1583
 -0.2297
 -0.3352
 -0.1987
 -0.2686
 -0.1632
 -0.2461
 -0.2900
 -0.2428
 -0.1449
 -0.1900
 -0.2149
 -0.1541
 -0.2917
 -0.2504
 -0.2213
 -0.0463
 -0.1547
 -0.1511
 -0.1527
 -0.1735
 -0.1931
 -0.1987
 -0.2239
 -0.2086
 -0.2688
 -0.1845
 -0.1797
 -0.1833
 -0.3880
 -0.1539
 -0.1553
 -0.1567
 -0.2238
 -0.1511
 -0.2540
 -0.2849
 -0.1826
 -0.2687
 -0.2328
 -0.2108
 -0.2410
 -0.1022
 -0.1507
 -0.1978
 -0.1734
 -0.2282
 -0.0985
 -0.1847
 -0.1770
 -0.1576
 -0.1937
 -0.1643
 -0.2822
 -0.1866
 -0.2754
 -0.2266
 -0.2169
 -0.1352
 -0.2194
 -0.1060
 -0.2139
 -0.1322
 -0.1889
 -0.2130
 -0.1913
 -0.2364
 -0.1402
 -0.2228
 -0.2354
 -0.1632
 -0.1905
 -0.1428
 -0.1177
 -0.2419
 -0.2733
 -0.2963
 -0.1600
 -0.3558
 -0.3673
 -0.2201
 -0.1505
 -0.2084
 -0.0870
 -0.2052
 -0.2070
 -0.1986
 -0.2299
 -0.0745
 -0.1765
 -0.1412
 -0.2180
 -0.1450
 -0.1426
 -0.1452
 -0.2916
 -0.0871
 -0.1359
 -0.2003
 -0.1125
 -0.2588
 -0.1988
 -0.2028
 -0.2443
 -0.0864
 -0.3415
 -0.2579
 -0.2343
 -0.3552
 -0.1859
 -0.1153
 -0.1732
 -0.1780
 -0.1909
 -0.2018
 -0.1886
 -0.2751
 -0.1501
  0.1165
 -0.1891
 -0.1845
 -0.2037
 -0.0339
 -0.3464
 -0.1956
 -0.1962
 -0.1537
 -0.1902
 -0.1431
 -0.3022
 -0.1780
 -0.1971
 -0.2118
 -0.0952
 -0.1711
 -0.2409
 -0.2184
 -0.2114
 -0.2042
 -0.0566
 -0.0700
 -0.2081
 -0.1872
 -0.2079
 -0.1540
 -0.2266
 -0.1981
 -0.1679
 -0.2022
 -0.2010
 -0.1051
 -0.1705
 -0.2139
  0.0396
 -0.1077
 -0.2745
 -0.2690
 -0.2603
 -0.2819
 -0.1917
 -0.1940
 -0.2944
 -0.1822
 -0.2903
 -0.1064
 -0.2076
 -0.2648
 -0.3032
 -0.2878
 -0.1579
 -0.0071
 -0.2142
 -0.2022
 -0.1516
 -0.1123
  0.0246
 -0.0978
 -0.1382
 -0.1800
 -0.3214
 -0.2179
 -0.1369
 -0.0800
  0.0117
 -0.1839
 -0.1926
 -0.1614
 -0.2769
 -0.1909
 -0.2101
 -0.2305
 -0.2055
 -0.2017
 -0.2741
 -0.1005
 -0.3152
 -0.1121
 -0.1700
 -0.1364
 -0.2157
 -0.2673
 -0.1584
 -0.1997
 -0.1745
 -0.1886
 -0.2307
 -0.2024
 -0.3376
 -0.2266
 -0.2355
 -0.2133
 -0.2346
 -0.2412
 -0.2358
 -0.1265
 -0.2341
 -0.1887
 -0.1646
 -0.1417
 -0.1882
 -0.1076
 -0.3048
 -0.1162
 -0.1651
 -0.2046
 -0.1833
 -0.3102
 -0.1778
 -0.1575
 -0.2676
 -0.1777
 -0.1569
 -0.1741
 -0.1892
 -0.3028
 -0.1457
 -0.2179
 -0.2226
 -0.1609
 -0.1423
 -0.2683
 -0.2920
 -0.1740
 -0.2079
 -0.1940
 -0.2679
 -0.1973
 -0.1951
 -0.1665
 -0.2286
 -0.1903
 -0.2667
 -0.4010
 -0.2550
 -0.1817
 -0.2025
 -0.1589
 -0.2476
 -0.0573
 -0.2203
 -0.2084
 -0.1587
 -0.1212
 -0.1795
 -0.3449
 -0.1662
 -0.2523
 -0.2435
 -0.2878
 -0.2797
 -0.1897
 -0.2113
 -0.1943
 -0.2050
 -0.1694
 -0.2243
 -0.2987
 -0.1328
 -0.1428
 -0.2399
 -0.1593
 -0.1999
 -0.3225
 -0.1860
 -0.1763
 -0.2691
 -0.2097
 -0.2396
 -0.1140
 -0.1897
 -0.1870
 -0.1829
 -0.2615
 -0.2073
 -0.1858
 -0.0598
 -0.1915
 -0.2183
 -0.2088
 -0.1742
 -0.2715
 -0.1999
 -0.2117
 -0.2492
 -0.1717
 -0.1566
 -0.1669
 -0.3015
 -0.1685
 -0.2434
 -0.2297
 -0.1947
 -0.2860
 -0.3288
 -0.2197
 -0.1862
 -0.1755
 -0.0987
 -0.1756
 -0.1304
 -0.1555
 -0.1679
 -0.2222
 -0.2819
 -0.2652
 -0.0947
 -0.2412
 -0.2731
 -0.2572
 -0.2604
 -0.2934
 -0.2470
 -0.1820
 -0.2740
 -0.1336
 -0.1698
 -0.1919
 -0.1796
 -0.2325
 -0.1352
 -0.1077
 -0.2184
 -0.1539
 -0.2015
 -0.3243
 -0.1713
 [torch.FloatTensor of size 512], Parameter containing:
 ( 0 , 0 ,.,.) = 
   5.6973e-03
 
 ( 0 , 1 ,.,.) = 
   2.0359e-03
 
 ( 0 , 2 ,.,.) = 
   1.6696e-02
     ... 
 
 ( 0 ,253,.,.) = 
   8.4662e-03
 
 ( 0 ,254,.,.) = 
  -2.7450e-02
 
 ( 0 ,255,.,.) = 
   9.6710e-03
       ⋮  
 
 ( 1 , 0 ,.,.) = 
  -2.7123e-02
 
 ( 1 , 1 ,.,.) = 
  -1.5713e-02
 
 ( 1 , 2 ,.,.) = 
   5.4291e-02
     ... 
 
 ( 1 ,253,.,.) = 
  -2.0631e-02
 
 ( 1 ,254,.,.) = 
  -3.0793e-02
 
 ( 1 ,255,.,.) = 
   1.3228e-03
       ⋮  
 
 ( 2 , 0 ,.,.) = 
  -5.2315e-02
 
 ( 2 , 1 ,.,.) = 
  -3.5294e-02
 
 ( 2 , 2 ,.,.) = 
   3.9423e-02
     ... 
 
 ( 2 ,253,.,.) = 
  -3.8161e-02
 
 ( 2 ,254,.,.) = 
  -2.6385e-02
 
 ( 2 ,255,.,.) = 
  -4.4272e-02
 ...     
       ⋮  
 
 (509, 0 ,.,.) = 
   4.9361e-02
 
 (509, 1 ,.,.) = 
   4.3553e-02
 
 (509, 2 ,.,.) = 
   1.0309e-02
     ... 
 
 (509,253,.,.) = 
   7.1570e-03
 
 (509,254,.,.) = 
   1.4031e-03
 
 (509,255,.,.) = 
  -6.6892e-02
       ⋮  
 
 (510, 0 ,.,.) = 
   5.3341e-02
 
 (510, 1 ,.,.) = 
  -1.4842e-02
 
 (510, 2 ,.,.) = 
  -4.8024e-02
     ... 
 
 (510,253,.,.) = 
   5.4730e-03
 
 (510,254,.,.) = 
   4.2852e-02
 
 (510,255,.,.) = 
   1.2923e-02
       ⋮  
 
 (511, 0 ,.,.) = 
   3.0030e-02
 
 (511, 1 ,.,.) = 
  -9.1642e-03
 
 (511, 2 ,.,.) = 
   9.0266e-03
     ... 
 
 (511,253,.,.) = 
   1.0095e-02
 
 (511,254,.,.) = 
  -1.1120e-02
 
 (511,255,.,.) = 
  -7.9560e-03
 [torch.FloatTensor of size 512x256x1x1], Parameter containing:
  0.1694
  0.3368
  0.2993
  0.3745
  0.1513
  0.1781
  0.3167
  0.3947
  0.1858
  0.2068
  0.1090
  0.2042
  0.2955
  0.0765
  0.2023
  0.2487
  0.3295
  0.3349
  0.2532
  0.2739
  0.1661
  0.3432
  0.3424
  0.2969
  0.2226
  0.0993
  0.3328
  0.2349
  0.2894
  0.2296
  0.2719
  0.3945
  0.1990
  0.2564
  0.2557
  0.3541
  0.1848
  0.2513
  0.3101
  0.2782
  0.2109
  0.2441
  0.3282
  0.3248
  0.2499
  0.1873
  0.2643
  0.3949
  0.1962
  0.2587
  0.1708
  0.3381
  0.2238
  0.2498
  0.2787
  0.3783
  0.3445
  0.2681
  0.2956
  0.1146
  0.2688
  0.3479
  0.1295
  0.2843
  0.1552
  0.3026
  0.2738
  0.1891
  0.3568
  0.2302
  0.2199
  0.2070
  0.2119
  0.0971
  0.2482
  0.2264
  0.3555
  0.3113
  0.2386
  0.2654
  0.2975
  0.2666
  0.2180
  0.1451
  0.2460
  0.1734
  0.2358
  0.2891
  0.2091
  0.1971
  0.2185
  0.2008
  0.2461
  0.3726
  0.2028
  0.1993
  0.3652
  0.2258
  0.2606
  0.1900
  0.2764
  0.2011
  0.1973
  0.2958
  0.3222
  0.4117
  0.1475
  0.2674
  0.1928
  0.3615
  0.2774
  0.2143
  0.2688
  0.4286
  0.2560
  0.2777
  0.1339
  0.5103
  0.3238
  0.2417
  0.1529
  0.1843
  0.0579
  0.2288
  0.1797
  0.2803
  0.2279
  0.1579
  0.3196
  0.1842
  0.3378
  0.1688
  0.1654
  0.3049
  0.3533
  0.2948
  0.1140
  0.2503
  0.1892
  0.2647
  0.2405
  0.3880
  0.1933
  0.1918
  0.2511
  0.2901
  0.3151
  0.3252
  0.1296
  0.2491
  0.1417
  0.1295
  0.3062
  0.2836
  0.3483
  0.2306
  0.2741
  0.2700
  0.1873
  0.2431
  0.3526
  0.3546
  0.2721
  0.2708
  0.3065
  0.0832
  0.2968
  0.2286
  0.3276
  0.2695
  0.2452
  0.2444
  0.2857
  0.3365
  0.2784
  0.2933
  0.3397
  0.2231
  0.2330
  0.1486
  0.3846
  0.3104
  0.1724
  0.1724
  0.3466
  0.2978
  0.2582
  0.1879
  0.2419
  0.2249
  0.2720
  0.3735
  0.4259
  0.3754
  0.1731
  0.3698
  0.2349
  0.2694
  0.3148
  0.1658
  0.1181
  0.2994
  0.4018
  0.2126
  0.3864
  0.2955
  0.1848
  0.3686
  0.1972
  0.3265
  0.2319
  0.1676
  0.1756
  0.2367
  0.2139
  0.1974
  0.2561
  0.2619
  0.2170
  0.2284
  0.3486
  0.4500
  0.2563
  0.2559
  0.2814
  0.1797
  0.1736
  0.2013
  0.3411
  0.2245
  0.1385
  0.2284
  0.2230
  0.2566
  0.2301
  0.3639
  0.1380
  0.2381
  0.2590
  0.0830
  0.1863
  0.1267
  0.4501
  0.2741
  0.2590
  0.2782
  0.2248
  0.2718
  0.1949
  0.1815
  0.2969
  0.3168
  0.3389
  0.2790
  0.1594
  0.2752
  0.2947
  0.2909
  0.1418
  0.3336
  0.1953
  0.2646
  0.0879
  0.2553
  0.3335
  0.1943
  0.2777
  0.2386
  0.3676
  0.3042
  0.1234
  0.2615
  0.2548
  0.3224
  0.3462
  0.2090
  0.2142
  0.2054
  0.2115
  0.2153
  0.2163
  0.2509
  0.2429
  0.3326
 -0.0527
  0.2244
  0.2319
  0.2674
  0.1103
  0.2320
  0.2822
  0.3234
  0.2818
  0.2093
  0.2261
  0.2900
  0.3127
  0.3456
  0.2592
  0.1677
  0.3924
  0.2694
  0.1997
  0.2973
  0.3324
  0.2270
  0.0656
  0.2964
  0.1948
  0.2383
  0.3021
  0.2510
  0.3117
  0.3185
  0.1721
  0.1867
  0.1665
  0.2851
  0.3512
 -0.0486
  0.1558
  0.2213
  0.3281
  0.3861
  0.2375
  0.3057
  0.1178
  0.2681
  0.1921
  0.2211
  0.1679
  0.2877
  0.2495
  0.2451
  0.2678
  0.2393
  0.0988
  0.2778
  0.2465
  0.1747
  0.1005
  0.0502
  0.2809
  0.2810
  0.1716
  0.2114
  0.2213
  0.2817
  0.1506
  0.0769
  0.2381
  0.2411
  0.2942
  0.2543
  0.2556
  0.3451
  0.2948
  0.3040
  0.3204
  0.2757
  0.1657
  0.2941
  0.1301
  0.1854
  0.2866
  0.3198
  0.2127
  0.3608
  0.3440
  0.0954
  0.2586
  0.1709
  0.2007
  0.1967
  0.1972
  0.1942
  0.3201
  0.3484
  0.3437
  0.3153
  0.2020
  0.3251
  0.3227
  0.3038
  0.2634
  0.2364
  0.2492
  0.3080
  0.2591
  0.2391
  0.2720
  0.2601
  0.3210
  0.1818
  0.3526
  0.3579
  0.2861
  0.2526
  0.1642
  0.2897
  0.3996
  0.2651
  0.2031
  0.2502
  0.3694
  0.2085
  0.2804
  0.2233
  0.2309
  0.1609
  0.2369
  0.2116
  0.3549
  0.1635
  0.1642
  0.3072
  0.3077
  0.2152
  0.2821
  0.2857
  0.1701
  0.2305
  0.2134
  0.3189
  0.1061
  0.2628
  0.2608
  0.1749
  0.0820
  0.1815
  0.3566
  0.1204
  0.3159
  0.1595
  0.3790
  0.3272
  0.2086
  0.3096
  0.2253
  0.1456
  0.1346
  0.2304
  0.2913
  0.2727
  0.2027
  0.2688
  0.1958
  0.2277
  0.3036
  0.3250
  0.3000
  0.3328
  0.2417
  0.2665
  0.2473
  0.0913
  0.2503
  0.2543
  0.3710
  0.3321
  0.3693
  0.1099
  0.1701
  0.1758
  0.3888
  0.2206
  0.2766
  0.2813
  0.1755
  0.2616
  0.1544
  0.2519
  0.1945
  0.2452
  0.3405
  0.2446
  0.2426
  0.1822
  0.3002
  0.3037
  0.3118
  0.2414
  0.2326
  0.1303
  0.3081
  0.0979
  0.2776
  0.2918
  0.3848
  0.1789
  0.3622
  0.3005
  0.1923
  0.2672
  0.1663
  0.2998
  0.2710
  0.2040
  0.2565
  0.2289
  0.2552
  0.2121
  0.3532
  0.2293
  0.2510
  0.3085
  0.2368
  0.3000
  0.2111
  0.3456
  0.3422
  0.1576
 [torch.FloatTensor of size 512], Parameter containing:
 -0.1759
 -0.2156
 -0.2047
 -0.1695
 -0.1628
 -0.1473
 -0.2158
 -0.2905
 -0.1112
 -0.2196
 -0.1020
 -0.1549
 -0.1989
 -0.0445
 -0.1508
 -0.1920
 -0.2114
 -0.1655
 -0.1854
 -0.1733
 -0.1289
 -0.2376
 -0.1965
 -0.1965
 -0.1776
 -0.1774
 -0.1760
 -0.1546
 -0.1648
 -0.2599
 -0.1752
 -0.2498
 -0.1741
 -0.2410
 -0.2498
 -0.2938
 -0.1496
 -0.1578
 -0.1800
 -0.1851
 -0.1516
 -0.1345
 -0.2746
 -0.1248
 -0.2246
 -0.2531
 -0.2398
 -0.1859
 -0.1739
 -0.2393
 -0.1214
 -0.1803
 -0.2729
 -0.2617
 -0.1855
 -0.2316
 -0.2333
 -0.1860
 -0.2097
 -0.0692
 -0.1912
 -0.2078
 -0.1084
 -0.2810
 -0.1303
 -0.1654
 -0.2119
 -0.3641
 -0.2951
 -0.2384
 -0.1632
 -0.1892
 -0.1792
 -0.2031
 -0.1770
 -0.2738
 -0.3324
 -0.1725
 -0.1793
 -0.2638
 -0.2207
 -0.1609
 -0.1534
 -0.1414
 -0.2992
 -0.1450
 -0.1838
 -0.1779
 -0.1422
 -0.2198
 -0.1900
 -0.1580
 -0.1666
 -0.2490
 -0.1569
 -0.1718
 -0.1660
 -0.1972
 -0.2287
 -0.2366
 -0.2230
 -0.1543
 -0.2030
 -0.1431
 -0.1363
 -0.2015
 -0.1804
 -0.2093
 -0.2964
 -0.1984
 -0.2683
 -0.2216
 -0.2147
 -0.3404
 -0.2668
 -0.1890
 -0.1733
 -0.2226
 -0.1772
 -0.1698
 -0.1095
 -0.2180
 -0.1154
 -0.1654
 -0.1910
 -0.3535
 -0.3112
 -0.2161
 -0.1496
 -0.1667
 -0.2849
 -0.2207
 -0.1529
 -0.1807
 -0.2118
 -0.1869
 -0.1376
 -0.1770
 -0.1861
 -0.1969
 -0.1741
 -0.3011
 -0.0787
 -0.2017
 -0.1947
 -0.2247
 -0.2459
 -0.1058
 -0.1401
 -0.1213
 -0.1199
 -0.1760
 -0.2156
 -0.3307
 -0.3515
 -0.2366
 -0.1185
 -0.2155
 -0.1751
 -0.1892
 -0.3365
 -0.1598
 -0.2554
  0.0644
 -0.2856
 -0.1198
 -0.1583
 -0.2297
 -0.3352
 -0.1987
 -0.2686
 -0.1632
 -0.2461
 -0.2900
 -0.2428
 -0.1449
 -0.1900
 -0.2149
 -0.1541
 -0.2917
 -0.2504
 -0.2213
 -0.0463
 -0.1547
 -0.1511
 -0.1527
 -0.1735
 -0.1931
 -0.1987
 -0.2239
 -0.2086
 -0.2688
 -0.1845
 -0.1797
 -0.1833
 -0.3880
 -0.1539
 -0.1553
 -0.1567
 -0.2238
 -0.1511
 -0.2540
 -0.2849
 -0.1826
 -0.2687
 -0.2328
 -0.2108
 -0.2410
 -0.1022
 -0.1507
 -0.1978
 -0.1734
 -0.2282
 -0.0985
 -0.1847
 -0.1770
 -0.1576
 -0.1937
 -0.1643
 -0.2822
 -0.1866
 -0.2754
 -0.2266
 -0.2169
 -0.1352
 -0.2194
 -0.1060
 -0.2139
 -0.1322
 -0.1889
 -0.2130
 -0.1913
 -0.2364
 -0.1402
 -0.2228
 -0.2354
 -0.1632
 -0.1905
 -0.1428
 -0.1177
 -0.2419
 -0.2733
 -0.2963
 -0.1600
 -0.3558
 -0.3673
 -0.2201
 -0.1505
 -0.2084
 -0.0870
 -0.2052
 -0.2070
 -0.1986
 -0.2299
 -0.0745
 -0.1765
 -0.1412
 -0.2180
 -0.1450
 -0.1426
 -0.1452
 -0.2916
 -0.0871
 -0.1359
 -0.2003
 -0.1125
 -0.2588
 -0.1988
 -0.2028
 -0.2443
 -0.0864
 -0.3415
 -0.2579
 -0.2343
 -0.3552
 -0.1859
 -0.1153
 -0.1732
 -0.1780
 -0.1909
 -0.2018
 -0.1886
 -0.2751
 -0.1501
  0.1165
 -0.1891
 -0.1845
 -0.2037
 -0.0339
 -0.3464
 -0.1956
 -0.1962
 -0.1537
 -0.1902
 -0.1431
 -0.3022
 -0.1780
 -0.1971
 -0.2118
 -0.0952
 -0.1711
 -0.2409
 -0.2184
 -0.2114
 -0.2042
 -0.0566
 -0.0700
 -0.2081
 -0.1872
 -0.2079
 -0.1540
 -0.2266
 -0.1981
 -0.1679
 -0.2022
 -0.2010
 -0.1051
 -0.1705
 -0.2139
  0.0396
 -0.1077
 -0.2745
 -0.2690
 -0.2603
 -0.2819
 -0.1917
 -0.1940
 -0.2944
 -0.1822
 -0.2903
 -0.1064
 -0.2076
 -0.2648
 -0.3032
 -0.2878
 -0.1579
 -0.0071
 -0.2142
 -0.2022
 -0.1516
 -0.1123
  0.0246
 -0.0978
 -0.1382
 -0.1800
 -0.3214
 -0.2179
 -0.1369
 -0.0800
  0.0117
 -0.1839
 -0.1926
 -0.1614
 -0.2769
 -0.1909
 -0.2101
 -0.2305
 -0.2055
 -0.2017
 -0.2741
 -0.1005
 -0.3152
 -0.1121
 -0.1700
 -0.1364
 -0.2157
 -0.2673
 -0.1584
 -0.1997
 -0.1745
 -0.1886
 -0.2307
 -0.2024
 -0.3376
 -0.2266
 -0.2355
 -0.2133
 -0.2346
 -0.2412
 -0.2358
 -0.1265
 -0.2341
 -0.1887
 -0.1646
 -0.1417
 -0.1882
 -0.1076
 -0.3048
 -0.1162
 -0.1651
 -0.2046
 -0.1833
 -0.3102
 -0.1778
 -0.1575
 -0.2676
 -0.1777
 -0.1569
 -0.1741
 -0.1892
 -0.3028
 -0.1457
 -0.2179
 -0.2226
 -0.1609
 -0.1423
 -0.2683
 -0.2920
 -0.1740
 -0.2079
 -0.1940
 -0.2679
 -0.1973
 -0.1951
 -0.1665
 -0.2286
 -0.1903
 -0.2667
 -0.4010
 -0.2550
 -0.1817
 -0.2025
 -0.1589
 -0.2476
 -0.0573
 -0.2203
 -0.2084
 -0.1587
 -0.1212
 -0.1795
 -0.3449
 -0.1662
 -0.2523
 -0.2435
 -0.2878
 -0.2797
 -0.1897
 -0.2113
 -0.1943
 -0.2050
 -0.1694
 -0.2243
 -0.2987
 -0.1328
 -0.1428
 -0.2399
 -0.1593
 -0.1999
 -0.3225
 -0.1860
 -0.1763
 -0.2691
 -0.2097
 -0.2396
 -0.1140
 -0.1897
 -0.1870
 -0.1829
 -0.2615
 -0.2073
 -0.1858
 -0.0598
 -0.1915
 -0.2183
 -0.2088
 -0.1742
 -0.2715
 -0.1999
 -0.2117
 -0.2492
 -0.1717
 -0.1566
 -0.1669
 -0.3015
 -0.1685
 -0.2434
 -0.2297
 -0.1947
 -0.2860
 -0.3288
 -0.2197
 -0.1862
 -0.1755
 -0.0987
 -0.1756
 -0.1304
 -0.1555
 -0.1679
 -0.2222
 -0.2819
 -0.2652
 -0.0947
 -0.2412
 -0.2731
 -0.2572
 -0.2604
 -0.2934
 -0.2470
 -0.1820
 -0.2740
 -0.1336
 -0.1698
 -0.1919
 -0.1796
 -0.2325
 -0.1352
 -0.1077
 -0.2184
 -0.1539
 -0.2015
 -0.3243
 -0.1713
 [torch.FloatTensor of size 512], Parameter containing:
 ( 0 , 0 ,.,.) = 
  -8.0284e-03 -5.7776e-03  6.4154e-03
   5.0498e-03 -6.7796e-03  1.2691e-02
   1.3331e-02  1.4523e-02  2.4522e-02
 
 ( 0 , 1 ,.,.) = 
  -1.9876e-03  1.2466e-02  1.0494e-02
  -1.9364e-02 -1.6696e-02 -1.1857e-02
  -1.1569e-02 -3.7674e-03 -3.4679e-03
 
 ( 0 , 2 ,.,.) = 
  -1.1440e-02 -1.3884e-02  1.1559e-03
  -1.7906e-02 -2.9349e-02 -1.3876e-02
  -1.4057e-02 -2.6989e-02 -2.3963e-02
     ... 
 
 ( 0 ,509,.,.) = 
  -6.3040e-03 -3.1167e-03 -1.3304e-02
   7.1623e-03  6.4669e-03  1.6063e-02
  -1.0750e-02 -1.0480e-02 -6.1070e-03
 
 ( 0 ,510,.,.) = 
   7.4484e-03  6.3878e-03 -1.2579e-02
  -7.7356e-03  1.8112e-03 -1.7890e-02
  -2.9142e-03  7.7705e-03 -9.7314e-03
 
 ( 0 ,511,.,.) = 
   2.1760e-02  2.2364e-02  2.2731e-02
   2.6681e-02  2.9127e-02  3.3356e-02
   1.2892e-02 -3.5818e-03  5.3022e-03
       ⋮  
 
 ( 1 , 0 ,.,.) = 
  -1.0597e-02 -9.1551e-03 -2.3418e-02
  -1.0768e-02 -3.3171e-03 -1.8559e-02
  -1.8607e-02 -4.2634e-03 -1.5591e-02
 
 ( 1 , 1 ,.,.) = 
  -2.6090e-02 -2.2517e-02 -3.0593e-02
  -3.9406e-02 -2.6639e-02 -2.8202e-02
  -2.6143e-02 -1.9647e-02 -2.1466e-02
 
 ( 1 , 2 ,.,.) = 
  -3.5259e-03  1.6623e-03 -6.5624e-03
  -5.0597e-03 -8.7162e-04 -5.3742e-03
  -7.9651e-03 -9.7778e-03 -1.0736e-02
     ... 
 
 ( 1 ,509,.,.) = 
   1.8492e-02 -3.6799e-03  1.0043e-02
  -5.2974e-03 -2.0757e-02 -1.5120e-02
   2.1435e-02  6.4916e-03  4.7660e-03
 
 ( 1 ,510,.,.) = 
  -1.8810e-02 -6.0469e-04 -7.6999e-03
  -1.7697e-02 -7.8692e-03 -1.6543e-02
  -1.7206e-02 -2.4746e-02 -3.0270e-02
 
 ( 1 ,511,.,.) = 
  -3.1191e-02 -1.4363e-02  2.2032e-03
  -1.2033e-02 -2.3699e-03 -1.6630e-02
  -1.2905e-02 -1.5363e-02 -3.6297e-03
       ⋮  
 
 ( 2 , 0 ,.,.) = 
  -3.2648e-02 -4.8158e-03 -2.0476e-02
  -2.5846e-02 -1.4660e-03 -2.8170e-02
  -2.6640e-02  4.3022e-03 -2.7636e-02
 
 ( 2 , 1 ,.,.) = 
  -6.3289e-03 -1.5401e-02 -1.3096e-03
  -1.7499e-02 -2.6212e-02 -2.3646e-02
  -7.3207e-03 -1.5592e-02 -8.9578e-03
 
 ( 2 , 2 ,.,.) = 
   8.9701e-04 -6.6914e-03 -5.3129e-03
  -1.1727e-03 -1.0726e-02 -9.0103e-03
   3.2311e-03 -4.5854e-03  4.3512e-03
     ... 
 
 ( 2 ,509,.,.) = 
  -2.1822e-02 -3.6889e-02 -2.2588e-02
  -1.3054e-02 -3.4191e-02 -2.7238e-02
  -1.2383e-02 -2.3452e-02 -2.2486e-02
 
 ( 2 ,510,.,.) = 
   6.8177e-03  2.1561e-02  1.3674e-02
   3.1192e-03  1.0660e-02  1.0409e-02
   8.0477e-03 -4.6817e-03 -4.3912e-03
 
 ( 2 ,511,.,.) = 
  -1.1983e-02 -1.6201e-02 -2.2626e-02
  -1.3461e-02 -7.0928e-03 -1.4384e-02
  -2.4456e-02  1.4885e-02  1.2247e-02
 ...     
       ⋮  
 
 (509, 0 ,.,.) = 
  -2.6347e-02 -2.9923e-02 -3.7810e-02
  -1.5663e-02 -4.1126e-03 -1.1482e-02
  -1.3415e-02 -1.5432e-02 -1.8204e-02
 
 (509, 1 ,.,.) = 
  -3.8392e-03 -1.1093e-02 -8.0841e-04
  -5.9634e-03 -5.9165e-03 -9.3332e-03
  -2.2761e-03  5.4781e-03 -5.6050e-03
 
 (509, 2 ,.,.) = 
  -1.8406e-03 -2.8134e-03  8.3246e-03
  -1.2453e-03  2.1453e-04  7.4868e-03
   1.3450e-02  3.0599e-02  2.6405e-02
     ... 
 
 (509,509,.,.) = 
   3.5268e-04  2.3897e-03  6.2558e-03
  -1.4338e-02 -2.3146e-02 -1.9024e-02
  -2.7306e-02 -3.0079e-02 -3.1762e-02
 
 (509,510,.,.) = 
   1.4584e-02  4.3430e-03  1.2053e-02
  -6.1130e-03 -2.8539e-02 -1.8268e-02
  -1.6844e-02 -4.7816e-02 -2.6274e-02
 
 (509,511,.,.) = 
  -1.8850e-02 -9.3396e-03  7.8905e-03
  -1.5322e-03  8.3153e-03  1.7783e-02
  -8.3318e-03 -1.5759e-02 -1.2061e-02
       ⋮  
 
 (510, 0 ,.,.) = 
   9.9578e-03  7.4573e-03 -1.8738e-03
  -1.7752e-03 -6.8015e-04 -7.4443e-03
  -1.8319e-02 -1.4264e-02 -7.1446e-03
 
 (510, 1 ,.,.) = 
   7.8524e-03 -2.6520e-03 -1.7556e-02
   4.5240e-03 -4.8661e-03 -1.5215e-02
  -5.0211e-03 -1.1864e-02 -1.4846e-02
 
 (510, 2 ,.,.) = 
   2.9163e-02  1.0344e-02  2.4736e-02
   1.2012e-02 -1.0346e-02  3.5472e-03
   8.2238e-03 -1.8237e-02 -5.4892e-03
     ... 
 
 (510,509,.,.) = 
  -8.8434e-03 -4.3184e-03 -5.7536e-03
   7.7230e-03 -4.1936e-04  7.7260e-03
   1.3536e-02  1.5705e-02  2.0893e-02
 
 (510,510,.,.) = 
   1.6743e-03  1.9720e-03  2.1567e-02
  -8.0074e-03 -4.6606e-03  4.0560e-03
  -1.6688e-02 -1.3754e-02 -1.1708e-02
 
 (510,511,.,.) = 
  -9.7959e-03 -9.4502e-03 -9.3443e-03
   6.9547e-03 -3.9134e-05  6.2691e-03
  -1.3193e-02  9.3272e-04  1.4579e-02
       ⋮  
 
 (511, 0 ,.,.) = 
  -1.4963e-03  5.5133e-04  1.1571e-02
   1.0174e-02  1.7889e-03  1.1035e-02
   7.0212e-03  1.4651e-03  1.2769e-03
 
 (511, 1 ,.,.) = 
  -1.3021e-02  6.4109e-03 -1.5199e-02
   2.4775e-02  2.1926e-02  3.3679e-02
   2.6471e-04 -3.0235e-03  1.1690e-02
 
 (511, 2 ,.,.) = 
  -2.9665e-02 -1.5314e-02 -1.7500e-02
  -1.8339e-02 -2.0845e-02 -1.5494e-02
  -1.6086e-03  1.0831e-02 -1.4309e-02
     ... 
 
 (511,509,.,.) = 
  -7.7044e-03 -2.1100e-02 -2.2816e-02
   5.7688e-03  1.9362e-04  7.7105e-04
  -6.1357e-03  9.7275e-03 -2.5464e-03
 
 (511,510,.,.) = 
   1.1043e-02  2.4205e-02  3.4213e-02
   2.9181e-02  2.6904e-02  4.5372e-02
  -2.1594e-02 -1.1072e-03 -7.8312e-03
 
 (511,511,.,.) = 
  -8.3287e-03 -7.9521e-03 -5.3358e-03
  -6.2527e-04 -5.3243e-03 -8.6296e-03
   3.6094e-03 -1.2544e-03 -4.3801e-03
 [torch.FloatTensor of size 512x512x3x3], Parameter containing:
  0.2587
  0.3073
  0.2595
  0.3223
  0.2662
  0.2652
  0.2575
  0.2660
  0.2766
  0.2414
  0.3045
  0.2853
  0.2821
  0.2880
  0.3094
  0.3444
  0.3155
  0.4129
  0.2110
  0.2903
  0.2496
  0.2601
  0.2967
  0.3033
  0.4152
  0.2719
  0.3661
  0.3251
  0.3898
  0.3346
  0.2753
  0.2712
  0.2414
  0.3351
  0.3394
  0.3167
  0.3360
  0.2666
  0.2109
  0.2705
  0.2587
  0.3070
  0.2720
  0.2316
  0.2885
  0.2884
  0.2955
  0.3057
  0.3043
  0.2596
  0.2673
  0.1929
  0.3136
  0.3593
  0.2622
  0.2931
  0.3295
  0.2514
  0.3208
  0.2798
  0.3259
  0.2939
  0.2390
  0.3105
  0.3471
  0.2812
  0.2148
  0.2997
  0.3061
  0.2740
  0.2791
  0.3790
  0.3592
  0.3247
  0.2995
  0.2735
  0.3356
  0.2703
  0.3255
  0.3127
  0.2783
  0.2702
  0.3900
  0.2942
  0.2899
  0.3461
  0.3432
  0.4685
  0.2634
  0.2553
  0.3019
  0.3961
  0.2742
  0.2995
  0.3858
  0.2785
  0.3212
  0.3109
  0.3642
  0.2193
  0.2643
  0.2333
  0.3151
  0.3102
  0.2936
  0.2374
  0.2419
  0.2976
  0.3335
  0.2619
  0.3984
  0.2721
  0.2718
  0.2678
  0.2757
  0.2445
  0.3508
  0.2174
  0.3309
  0.2653
  0.2564
  0.1748
  0.3177
  0.2751
  0.2067
  0.2905
  0.2762
  0.3329
  0.2738
  0.3224
  0.2199
  0.2997
  0.2206
  0.3213
  0.2760
  0.3927
  0.3174
  0.2698
  0.2988
  0.2610
  0.2550
  0.2788
  0.4445
  0.2862
  0.3606
  0.3279
  0.2869
  0.3294
  0.2244
  0.2338
  0.1754
  0.2318
  0.3186
  0.3322
  0.2255
  0.3041
  0.2837
  0.3276
  0.2392
  0.3668
  0.1971
  0.2946
  0.3613
  0.2736
  0.2554
  0.2860
  0.2511
  0.3490
  0.3253
  0.2934
  0.2027
  0.2580
  0.2200
  0.3089
  0.3074
  0.3332
  0.2943
  0.3375
  0.2330
  0.2611
  0.3383
  0.2837
  0.3546
  0.3093
  0.3791
  0.2197
  0.2648
  0.2830
  0.2587
  0.3588
  0.2830
  0.3971
  0.3194
  0.3066
  0.2754
  0.2647
  0.0970
  0.2182
  0.2334
  0.2624
  0.1829
  0.2933
  0.2747
  0.3001
  0.2996
  0.3107
  0.3256
  0.2940
  0.3901
  0.2790
  0.3030
  0.2838
  0.3010
  0.3044
  0.3479
  0.3087
  0.2611
  0.1958
  0.2941
  0.2558
  0.2889
  0.3148
  0.2516
  0.2664
  0.2862
  0.3940
  0.2933
  0.2781
  0.3796
  0.3022
  0.2583
  0.3021
  0.2784
  0.2967
  0.2994
  0.3856
  0.3277
  0.2587
  0.2539
  0.2824
  0.2634
  0.1489
  0.2205
  0.3929
  0.3401
  0.2717
  0.2789
  0.2917
  0.3177
  0.1992
  0.3684
  0.3120
  0.3201
  0.2810
  0.2302
  0.2779
  0.2865
  0.2858
  0.2713
  0.1601
  0.2496
  0.2895
  0.3154
  0.3443
  0.3285
  0.3444
  0.3251
  0.3235
  0.3375
  0.2282
  0.2128
  0.1795
  0.3077
  0.3005
  0.2775
  0.3054
  0.2914
  0.3535
  0.2871
  0.2669
  0.3961
  0.2674
  0.3898
  0.3183
  0.3242
  0.2789
  0.1911
  0.2569
  0.3427
  0.2464
  0.2778
  0.2098
  0.3019
  0.3145
  0.3271
  0.2914
  0.2619
  0.2643
  0.3039
  0.2520
  0.2099
  0.3643
  0.2915
  0.1957
  0.3286
  0.2355
  0.3210
  0.2982
  0.3388
  0.3450
  0.3716
  0.2898
  0.2846
  0.2805
  0.2219
  0.2910
  0.2681
  0.3163
  0.1964
  0.3176
  0.3092
  0.2706
  0.2505
  0.2508
  0.3166
  0.3583
  0.1563
  0.2608
  0.2892
  0.3401
  0.2891
  0.3126
  0.2172
  0.2459
  0.2651
  0.4052
  0.2986
  0.3026
  0.3773
  0.2262
  0.2675
  0.2900
  0.3759
  0.3201
  0.2567
  0.3443
  0.2348
  0.3057
  0.2347
  0.3277
  0.2938
  0.2746
  0.2805
  0.2421
  0.3590
  0.2622
  0.2773
  0.2396
  0.2134
  0.2727
  0.2984
  0.2744
  0.2591
  0.2628
  0.3568
  0.2009
  0.3220
  0.2868
  0.2561
  0.3113
  0.2138
  0.3136
  0.2745
  0.3046
  0.3042
  0.1972
  0.2815
  0.2542
  0.2983
  0.2613
  0.2668
  0.3142
  0.2930
  0.3800
  0.1966
  0.2948
  0.3363
  0.2713
  0.3625
  0.2909
  0.2695
  0.3111
  0.3242
  0.3009
  0.3231
  0.3051
  0.2012
  0.2716
  0.3692
  0.2694
  0.1481
  0.2858
  0.2819
  0.2391
  0.2867
  0.3466
  0.3431
  0.2365
  0.3357
  0.1685
  0.2925
  0.3092
  0.3127
  0.1883
  0.2561
  0.3086
  0.1732
  0.2989
  0.3235
  0.2693
  0.2630
  0.2913
  0.2786
  0.3124
  0.3098
  0.2695
  0.2403
  0.2906
  0.2784
  0.2654
  0.3485
  0.3939
  0.3033
  0.3145
  0.2622
  0.1540
  0.2790
  0.2967
  0.1954
  0.2632
  0.2957
  0.2581
  0.3231
  0.2795
  0.2859
  0.3139
  0.2488
  0.2404
  0.3714
  0.2649
  0.2267
  0.2878
  0.3462
  0.3063
  0.3180
  0.1726
  0.3153
  0.2625
  0.3020
  0.2996
  0.3632
  0.1541
  0.3192
  0.2200
  0.2894
  0.2622
  0.2534
  0.2935
  0.3208
  0.2231
  0.2743
  0.3023
  0.2829
  0.2394
  0.2506
  0.3512
  0.3366
  0.2666
  0.2930
  0.3049
  0.2321
  0.3397
  0.2727
  0.2900
  0.3146
  0.2682
  0.3094
  0.3718
  0.3387
  0.3202
  0.2423
  0.2745
  0.2966
  0.2500
  0.2329
  0.3419
  0.2928
  0.3536
  0.3739
  0.1935
  0.2670
  0.2846
  0.2583
  0.3783
  0.2826
  0.2929
  0.2728
  0.3645
  0.2770
  0.2756
  0.2523
  0.2500
 [torch.FloatTensor of size 512], Parameter containing:
 -0.1668
 -0.3019
 -0.2187
 -0.2917
 -0.1971
 -0.2325
 -0.1869
 -0.1857
 -0.2474
 -0.1629
 -0.2448
 -0.2508
 -0.1895
 -0.2651
 -0.3250
 -0.3811
 -0.2953
 -0.4963
 -0.0294
 -0.2724
 -0.2007
 -0.2220
 -0.2945
 -0.2579
 -0.5152
 -0.1994
 -0.5016
 -0.2736
 -0.4528
 -0.3968
 -0.2281
 -0.1772
 -0.1293
 -0.2655
 -0.3252
 -0.3232
 -0.3337
 -0.1901
 -0.0692
 -0.2196
 -0.2132
 -0.2565
 -0.1646
 -0.1567
 -0.2087
 -0.2178
 -0.2480
 -0.2767
 -0.3071
 -0.1988
 -0.1985
 -0.0235
 -0.2458
 -0.4156
 -0.1660
 -0.1923
 -0.3328
 -0.1481
 -0.3047
 -0.2277
 -0.3182
 -0.2744
 -0.1643
 -0.3365
 -0.4050
 -0.2082
 -0.0621
 -0.2671
 -0.2809
 -0.2185
 -0.2148
 -0.4465
 -0.3376
 -0.3213
 -0.2921
 -0.1998
 -0.3369
 -0.2092
 -0.2831
 -0.2893
 -0.1719
 -0.2189
 -0.4016
 -0.2484
 -0.2070
 -0.3849
 -0.3753
 -0.5874
 -0.1637
 -0.1748
 -0.2217
 -0.5067
 -0.2496
 -0.2117
 -0.4291
 -0.1944
 -0.3089
 -0.2621
 -0.4096
 -0.0602
 -0.2009
 -0.1316
 -0.3336
 -0.2627
 -0.2320
 -0.0910
 -0.1560
 -0.2889
 -0.3286
 -0.1628
 -0.5128
 -0.2036
 -0.1726
 -0.1844
 -0.2285
 -0.1925
 -0.3432
 -0.0929
 -0.3138
 -0.1912
 -0.1926
 -0.0342
 -0.3268
 -0.1699
 -0.0828
 -0.2417
 -0.2069
 -0.3870
 -0.2210
 -0.2867
 -0.0526
 -0.3092
 -0.0655
 -0.2594
 -0.2160
 -0.5062
 -0.2905
 -0.2125
 -0.3124
 -0.2128
 -0.1946
 -0.2520
 -0.5475
 -0.2321
 -0.3350
 -0.3473
 -0.2158
 -0.3603
 -0.0759
 -0.1472
 -0.0327
 -0.1404
 -0.3128
 -0.3063
 -0.1120
 -0.2664
 -0.2700
 -0.3112
 -0.1519
 -0.3843
 -0.0645
 -0.2373
 -0.4227
 -0.2546
 -0.1611
 -0.2350
 -0.1524
 -0.3494
 -0.3453
 -0.2081
 -0.0918
 -0.2025
 -0.1246
 -0.2533
 -0.2768
 -0.3156
 -0.2530
 -0.3957
 -0.0981
 -0.1257
 -0.3697
 -0.2333
 -0.3664
 -0.2829
 -0.4320
 -0.0836
 -0.1583
 -0.2395
 -0.1818
 -0.4408
 -0.2376
 -0.4450
 -0.3232
 -0.2787
 -0.1858
 -0.2137
  0.0481
 -0.1058
 -0.1093
 -0.2035
 -0.0496
 -0.2117
 -0.1598
 -0.2389
 -0.2830
 -0.2878
 -0.3406
 -0.2560
 -0.4468
 -0.2444
 -0.2492
 -0.2222
 -0.2792
 -0.3005
 -0.4180
 -0.2568
 -0.1872
 -0.0270
 -0.2645
 -0.1873
 -0.3022
 -0.3400
 -0.1803
 -0.1810
 -0.2079
 -0.4775
 -0.2047
 -0.1878
 -0.4504
 -0.2516
 -0.1657
 -0.2765
 -0.2329
 -0.2446
 -0.2956
 -0.4163
 -0.2816
 -0.1571
 -0.2199
 -0.2125
 -0.1684
  0.0356
 -0.0914
 -0.4484
 -0.3535
 -0.2212
 -0.2550
 -0.2509
 -0.2702
 -0.0599
 -0.3505
 -0.2924
 -0.2360
 -0.2339
 -0.1259
 -0.2597
 -0.2267
 -0.1978
 -0.1371
 -0.0129
 -0.1175
 -0.2527
 -0.3099
 -0.3231
 -0.3468
 -0.3553
 -0.3537
 -0.3315
 -0.3713
 -0.1091
 -0.0959
 -0.0258
 -0.2756
 -0.2808
 -0.2012
 -0.2812
 -0.1991
 -0.3948
 -0.2257
 -0.2469
 -0.4211
 -0.2110
 -0.4670
 -0.3069
 -0.3549
 -0.2337
 -0.0612
 -0.1321
 -0.2968
 -0.1870
 -0.2316
 -0.0686
 -0.3113
 -0.2895
 -0.3149
 -0.2686
 -0.2081
 -0.2096
 -0.3011
 -0.1810
 -0.0227
 -0.3873
 -0.2665
 -0.0225
 -0.2973
 -0.0973
 -0.2980
 -0.3219
 -0.2926
 -0.3196
 -0.4332
 -0.1980
 -0.2117
 -0.2302
 -0.0980
 -0.2344
 -0.2154
 -0.2921
 -0.0350
 -0.3361
 -0.2620
 -0.2188
 -0.1566
 -0.1795
 -0.2726
 -0.4103
  0.0413
 -0.1507
 -0.2552
 -0.3137
 -0.2466
 -0.2961
 -0.0938
 -0.1481
 -0.2129
 -0.5480
 -0.2915
 -0.2802
 -0.5077
 -0.1306
 -0.1862
 -0.2400
 -0.4362
 -0.3017
 -0.1633
 -0.3447
 -0.1047
 -0.2846
 -0.1244
 -0.3036
 -0.2404
 -0.2333
 -0.2494
 -0.1866
 -0.3294
 -0.1677
 -0.2540
 -0.1295
 -0.0512
 -0.1966
 -0.2801
 -0.1702
 -0.1879
 -0.1850
 -0.3274
 -0.0369
 -0.2979
 -0.2612
 -0.1889
 -0.3270
 -0.1377
 -0.2787
 -0.2201
 -0.2417
 -0.2834
 -0.0555
 -0.2538
 -0.1040
 -0.2660
 -0.1644
 -0.1723
 -0.2672
 -0.2797
 -0.4214
 -0.0378
 -0.2386
 -0.3498
 -0.2435
 -0.4348
 -0.2554
 -0.1719
 -0.2836
 -0.3316
 -0.2787
 -0.2879
 -0.2640
 -0.0560
 -0.1789
 -0.4195
 -0.2152
  0.0567
 -0.2359
 -0.2249
 -0.0911
 -0.2644
 -0.3875
 -0.3317
 -0.1415
 -0.3425
 -0.0020
 -0.1941
 -0.2821
 -0.2809
 -0.0965
 -0.1841
 -0.2971
 -0.0173
 -0.3043
 -0.3013
 -0.1729
 -0.1872
 -0.2683
 -0.2033
 -0.3059
 -0.2939
 -0.2163
 -0.1889
 -0.2581
 -0.2296
 -0.2066
 -0.3462
 -0.4298
 -0.2600
 -0.3095
 -0.1800
 -0.0116
 -0.2124
 -0.2552
 -0.0523
 -0.2216
 -0.2605
 -0.2134
 -0.2867
 -0.2556
 -0.2275
 -0.3437
 -0.1698
 -0.1560
 -0.4120
 -0.2067
 -0.1159
 -0.2408
 -0.3093
 -0.2621
 -0.2593
 -0.0135
 -0.3099
 -0.2179
 -0.2766
 -0.2400
 -0.3934
  0.0072
 -0.2982
 -0.0930
 -0.2166
 -0.1635
 -0.1827
 -0.2308
 -0.2525
 -0.0991
 -0.2325
 -0.2938
 -0.2480
 -0.0934
 -0.1911
 -0.3772
 -0.3369
 -0.1606
 -0.2752
 -0.3005
 -0.1372
 -0.2990
 -0.2156
 -0.2622
 -0.3160
 -0.1342
 -0.2903
 -0.3865
 -0.2916
 -0.3243
 -0.2051
 -0.2656
 -0.2359
 -0.1508
 -0.1063
 -0.3595
 -0.2312
 -0.3046
 -0.4178
 -0.0276
 -0.2204
 -0.2426
 -0.1616
 -0.4789
 -0.1713
 -0.2802
 -0.2305
 -0.4327
 -0.2413
 -0.1862
 -0.1486
 -0.1507
 [torch.FloatTensor of size 512], Parameter containing:
 ( 0 , 0 ,.,.) = 
   2.8729e-04  4.2632e-03 -2.0266e-03
   1.9513e-04  2.4381e-03 -5.8632e-03
   4.4803e-03  8.6577e-03  8.5538e-04
 
 ( 0 , 1 ,.,.) = 
  -1.1335e-02 -1.3195e-02 -1.0305e-02
  -4.9507e-03 -4.5898e-03 -3.1041e-03
  -7.5883e-03 -8.3795e-03 -8.9239e-03
 
 ( 0 , 2 ,.,.) = 
  -1.1914e-02 -1.2104e-02 -1.0167e-02
  -1.2093e-02 -1.1557e-02 -8.9600e-03
  -1.2515e-02 -9.3296e-03 -6.4079e-03
     ... 
 
 ( 0 ,509,.,.) = 
  -9.3573e-03 -1.0662e-02 -1.2672e-02
  -8.0600e-03 -8.5423e-03 -1.2121e-02
  -8.1498e-03 -8.8037e-03 -1.0611e-02
 
 ( 0 ,510,.,.) = 
   4.2632e-03  5.6461e-03  2.8460e-03
   4.7070e-03  6.2550e-03  7.5862e-03
   1.1504e-02  1.1518e-02  1.0728e-02
 
 ( 0 ,511,.,.) = 
  -6.2455e-03 -9.1693e-03 -9.6664e-03
  -4.2935e-03 -6.5311e-03 -5.0513e-03
  -3.1141e-03 -5.0124e-03 -5.8122e-03
       ⋮  
 
 ( 1 , 0 ,.,.) = 
   2.7483e-03  3.7146e-04  3.3262e-05
  -4.5675e-03 -6.6689e-03 -6.4447e-03
  -6.7610e-03 -7.3204e-03 -9.5855e-03
 
 ( 1 , 1 ,.,.) = 
  -1.4630e-02 -1.2320e-02 -1.4457e-02
  -8.6197e-03 -5.8059e-03 -1.1075e-02
  -6.2154e-03 -6.8218e-03 -9.3805e-03
 
 ( 1 , 2 ,.,.) = 
   1.0879e-03  4.3850e-04 -1.9456e-03
  -1.2517e-03  3.2917e-04 -2.1435e-03
   4.8136e-03  2.5333e-03  5.1504e-03
     ... 
 
 ( 1 ,509,.,.) = 
   2.4644e-02  1.7434e-02  2.0734e-02
   2.3101e-02  1.3487e-02  2.0728e-02
   1.9381e-02  1.5243e-02  1.7340e-02
 
 ( 1 ,510,.,.) = 
   1.2212e-02  1.2448e-02  1.5048e-02
   5.2993e-03  4.0090e-03  9.3927e-03
   6.6766e-03  2.4941e-03  8.3288e-03
 
 ( 1 ,511,.,.) = 
   3.1040e-02  2.8243e-02  3.2319e-02
   3.8608e-02  3.3099e-02  3.8652e-02
   2.5839e-02  2.6524e-02  2.4995e-02
       ⋮  
 
 ( 2 , 0 ,.,.) = 
  -2.1761e-03  4.5553e-03  2.0612e-03
   4.9747e-03  1.1420e-02  8.5734e-03
   4.8583e-03  1.1469e-02  1.0039e-02
 
 ( 2 , 1 ,.,.) = 
  -6.2547e-05  6.5336e-04  9.4747e-04
   5.0603e-03  7.7136e-03  6.5484e-03
  -4.8432e-04  2.3057e-03  2.9219e-03
 
 ( 2 , 2 ,.,.) = 
  -3.2788e-02 -2.7615e-02 -3.2608e-02
  -3.6296e-02 -2.8170e-02 -3.0277e-02
  -3.6814e-02 -3.1547e-02 -3.0231e-02
     ... 
 
 ( 2 ,509,.,.) = 
  -5.2998e-03 -2.8590e-04 -4.9266e-03
  -7.0530e-03 -2.3684e-04 -1.5838e-03
  -6.9291e-03  4.8084e-04 -3.1548e-03
 
 ( 2 ,510,.,.) = 
   1.1854e-02  8.4836e-03  1.3839e-02
   2.8741e-03 -9.7358e-05  4.4888e-03
  -2.5515e-03 -2.7788e-03 -3.2464e-03
 
 ( 2 ,511,.,.) = 
  -1.2408e-02 -1.5001e-02 -1.3377e-02
  -1.4540e-02 -1.8537e-02 -1.7392e-02
  -6.7315e-03 -9.5205e-03 -9.0692e-03
 ...     
       ⋮  
 
 (509, 0 ,.,.) = 
   3.0369e-03  1.9542e-03  1.7140e-03
  -7.6240e-03 -2.8765e-03 -5.1760e-03
  -9.3019e-03 -4.8800e-03 -4.2932e-03
 
 (509, 1 ,.,.) = 
   4.4836e-03  2.4909e-03  1.5746e-03
   1.2065e-02  1.2936e-02  1.0344e-02
   1.9010e-02  1.7459e-02  1.5988e-02
 
 (509, 2 ,.,.) = 
  -1.4914e-03 -8.1727e-03 -8.0671e-03
  -6.6247e-03 -6.2421e-03 -9.2717e-03
  -8.7991e-03 -7.7528e-03 -8.6336e-03
     ... 
 
 (509,509,.,.) = 
  -1.8040e-02 -1.5366e-02 -1.5334e-02
  -1.3148e-02 -1.2180e-02 -1.0915e-02
  -1.4545e-02 -1.4756e-02 -1.1787e-02
 
 (509,510,.,.) = 
   3.5762e-03  6.6073e-03 -1.4055e-03
   4.3975e-03  7.8375e-03  8.8085e-05
  -5.0697e-03 -5.6633e-04 -5.9284e-03
 
 (509,511,.,.) = 
  -1.9234e-03 -8.8012e-03 -5.8821e-03
   3.6685e-03 -1.3784e-03 -3.2117e-03
  -4.7037e-04  1.5340e-04 -3.4046e-03
       ⋮  
 
 (510, 0 ,.,.) = 
  -1.8305e-02 -1.7735e-02 -2.1683e-02
  -1.6598e-02 -1.2508e-02 -2.0530e-02
  -1.0800e-02 -9.8670e-03 -1.7195e-02
 
 (510, 1 ,.,.) = 
   2.0721e-02  2.2466e-02  2.5049e-02
   1.8682e-02  1.3160e-02  2.3696e-02
   2.2104e-02  1.7261e-02  2.4877e-02
 
 (510, 2 ,.,.) = 
  -5.7091e-03 -2.6876e-03 -9.2260e-04
  -9.4530e-03 -7.0543e-03 -6.2770e-03
  -4.5806e-03 -2.7182e-03 -2.5823e-03
     ... 
 
 (510,509,.,.) = 
   2.4150e-02  1.4002e-02  1.6559e-02
   2.1363e-02  1.4359e-02  1.5854e-02
   2.5786e-02  2.7233e-02  2.5104e-02
 
 (510,510,.,.) = 
  -4.6450e-03  1.2419e-03 -1.8768e-03
   1.3005e-03  4.0888e-03 -6.5483e-04
  -7.9783e-03 -6.6539e-03 -8.9957e-03
 
 (510,511,.,.) = 
   1.1494e-02  2.6621e-02  1.5649e-02
   6.5960e-03  1.7290e-02  7.5466e-03
  -8.0256e-03  4.6246e-03 -5.7808e-03
       ⋮  
 
 (511, 0 ,.,.) = 
   1.4232e-02  1.1769e-02  9.4342e-03
   6.2592e-03  5.1087e-03  2.3311e-03
  -1.9694e-03  2.7110e-03 -2.8945e-03
 
 (511, 1 ,.,.) = 
  -7.0772e-03  1.0365e-03 -5.8451e-03
  -9.1879e-03 -3.1388e-03 -8.1517e-03
  -8.0300e-03 -5.1313e-03 -9.5734e-03
 
 (511, 2 ,.,.) = 
   2.4314e-02  1.8942e-02  2.4256e-02
   2.0090e-02  1.1472e-02  1.5993e-02
   2.2910e-02  2.0622e-02  2.3820e-02
     ... 
 
 (511,509,.,.) = 
  -1.6375e-02 -1.6928e-02 -1.9019e-02
  -9.7367e-03 -1.1274e-02 -1.0261e-02
  -1.2310e-02 -1.5931e-02 -1.4151e-02
 
 (511,510,.,.) = 
   4.7098e-03 -4.5205e-04  2.8042e-03
   2.1428e-03 -4.6175e-03 -1.6818e-03
  -1.3336e-03 -5.5009e-03 -2.6237e-03
 
 (511,511,.,.) = 
  -1.4367e-02 -1.3520e-02 -1.1387e-02
  -4.7420e-03 -1.7309e-03 -2.6426e-03
   5.1448e-03  7.0428e-03  5.0202e-03
 [torch.FloatTensor of size 512x512x3x3], Parameter containing:
  1.8419
  1.8307
  1.7650
  1.8288
  1.9505
  1.8026
  1.9536
  2.2790
  1.7662
  1.8902
  1.7768
  1.7749
  1.9055
  1.7328
  1.8762
  1.8211
  1.7967
  2.3428
  1.7985
  1.7271
  1.7915
  1.9512
  1.8928
  1.9017
  1.8784
  1.9809
  1.8569
  1.7830
  1.8911
  1.8859
  1.7764
  1.9832
  1.8389
  1.7616
  1.8728
  1.8753
  1.9008
  1.8209
  1.7039
  1.7377
  1.7786
  1.6944
  1.7829
  1.7815
  1.7594
  1.8428
  1.9238
  2.0871
  1.8980
  1.8413
  1.8471
  1.8584
  1.7640
  1.8453
  1.7606
  1.9504
  1.9620
  1.8755
  1.9424
  1.8731
  1.8674
  1.9422
  1.8750
  1.9208
  1.7464
  1.8558
  1.6539
  2.0660
  2.0298
  1.9174
  1.8972
  1.7589
  1.7551
  1.9560
  1.7909
  1.7971
  1.7851
  1.7733
  1.8061
  1.7949
  1.8169
  1.8089
  1.8641
  2.1542
  1.7739
  1.7913
  1.8022
  1.7155
  1.7679
  1.7704
  1.6266
  1.8645
  1.9076
  1.8576
  1.6924
  1.8020
  1.7100
  1.7713
  1.8572
  1.7103
  2.0664
  1.9054
  1.9422
  1.8078
  1.7412
  1.6061
  1.9105
  1.8947
  1.7954
  1.8989
  1.8239
  1.7619
  1.7951
  1.8149
  1.8539
  1.8502
  1.7095
  2.1831
  1.8599
  1.8252
  1.8193
  1.8460
  1.7968
  1.6229
  1.8450
  1.8290
  1.8706
  1.9293
  1.6881
  1.9725
  1.8981
  1.8925
  1.8851
  1.8445
  1.9764
  2.0674
  1.8384
  1.8414
  1.8762
  1.7931
  1.7131
  1.9644
  1.7854
  1.9369
  1.8972
  1.8940
  1.8700
  1.7967
  1.8775
  1.9409
  1.7391
  1.7944
  1.9678
  1.7678
  1.6851
  1.9414
  1.9663
  1.9882
  1.7915
  1.8141
  1.8325
  2.1200
  1.9256
  2.3592
  2.0304
  1.9594
  1.7334
  1.9048
  1.8221
  1.7811
  1.9084
  1.8053
  1.9171
  1.9644
  1.8256
  1.6432
  1.9173
  1.9094
  1.9923
  1.7963
  1.9077
  1.7619
  2.1724
  1.7931
  1.7564
  1.8889
  1.9832
  1.9136
  1.8035
  1.8419
  1.8278
  1.8057
  1.9063
  1.8646
  1.7848
  1.8230
  1.7986
  1.7091
  1.7724
  1.7939
  1.7611
  1.9325
  2.0162
  1.7295
  2.0196
  1.8876
  1.8325
  1.8225
  1.7870
  1.9160
  1.7197
  1.7170
  1.9133
  1.7770
  1.9943
  1.8389
  1.8070
  1.8516
  1.7857
  1.9648
  1.9553
  1.9232
  1.8086
  1.8114
  1.7141
  1.8058
  1.8532
  1.9255
  1.7682
  1.8314
  1.8495
  1.8296
  1.8278
  1.8819
  1.7698
  1.7838
  1.7807
  1.9974
  1.6994
  1.9483
  1.7793
  1.8029
  2.2210
  1.6455
  1.8357
  2.1706
  1.9204
  1.7414
  1.7809
  1.8648
  1.9145
  1.8849
  1.8346
  1.9368
  1.8169
  2.2302
  1.8262
  2.0651
  1.9888
  1.8169
  1.8462
  1.9681
  1.8083
  1.8595
  1.8539
  1.7699
  1.9001
  1.7285
  1.7553
  1.8924
  1.7829
  1.9428
  1.8724
  1.7228
  2.0548
  1.7732
  1.8561
  1.7699
  1.9269
  1.8171
  2.4075
  1.7257
  1.7819
  1.7244
  1.8521
  1.8302
  1.8797
  1.7617
  1.9650
  1.9807
  1.7102
  1.7486
  1.8350
  1.9919
  1.8505
  1.9000
  1.8269
  1.9787
  1.7635
  1.6071
  1.7998
  1.9545
  1.7348
  1.7140
  1.8851
  1.7981
  1.9100
  1.8315
  1.7864
  1.9165
  1.8839
  1.9017
  1.9334
  1.7405
  1.7661
  1.8015
  1.9987
  1.7622
  1.9107
  1.8444
  1.7128
  1.8726
  1.8529
  1.9270
  1.8769
  1.7261
  1.8393
  1.9075
  1.7953
  1.8246
  1.7605
  2.0470
  1.9221
  1.9205
  1.8910
  1.7666
  1.6801
  1.8308
  1.8845
  1.8339
  1.8238
  1.7616
  1.6114
  1.8411
  1.7437
  1.8423
  1.9540
  1.7465
  1.7741
  1.8746
  1.8856
  1.7740
  1.7603
  1.7682
  1.8396
  1.6869
  1.8080
  1.8836
  1.8283
  1.8341
  1.8522
  1.9749
  1.8707
  1.7719
  1.8993
  1.8108
  1.8480
  1.8267
  1.8731
  1.9576
  1.8347
  1.9509
  1.9641
  1.7997
  1.7652
  1.9253
  1.7126
  1.7551
  1.9427
  1.8559
  1.9163
  1.7681
  1.7803
  1.8500
  1.8535
  1.8865
  1.7599
  2.0692
  1.8021
  1.7077
  1.8890
  1.9457
  1.8516
  1.7882
  1.8356
  1.8472
  1.6708
  1.7435
  1.9080
  1.9653
  2.0401
  1.8935
  1.8450
  1.7536
  1.7733
  1.8135
  1.8534
  1.9368
  1.7348
  1.8738
  1.9632
  1.9033
  1.7422
  1.7842
  1.8516
  2.0218
  1.7044
  1.8793
  1.8655
  1.8516
  1.8002
  1.8687
  1.8460
  1.7589
  1.8174
  1.9830
  1.9034
  2.1222
  1.8460
  1.9209
  1.8893
  1.9422
  1.8489
  1.8396
  1.9953
  2.0865
  1.8253
  1.7700
  1.8035
  1.7535
  1.8923
  1.8620
  1.8627
  1.7264
  1.8140
  1.9613
  1.8812
  1.8729
  2.0050
  1.7092
  1.7726
  1.9410
  1.8381
  1.8366
  1.7276
  1.8796
  1.7548
  1.9536
  1.8062
  1.8883
  2.0278
  1.8775
  1.9446
  1.8676
  1.8423
  1.7798
  1.9403
  1.8375
  2.0473
  1.9507
  1.8337
  1.8184
  1.7791
  1.8993
  1.8781
  1.8691
  1.8493
  1.7623
  1.9458
  1.7564
  1.7448
  1.8633
  1.6863
  1.8062
  1.8702
  2.0048
  1.8504
  1.8964
  1.9489
  1.8264
  1.9019
  1.8196
  1.9712
  1.8969
  1.8652
  1.8709
  1.6984
  1.8677
  1.8846
  1.9256
  1.8620
  1.6366
  1.8434
  1.7506
  1.8438
  1.5788
  1.9316
  1.9535
  1.7878
  1.7354
  2.0920
  1.9456
 [torch.FloatTensor of size 512], Parameter containing:
  0.2371
  0.3433
  0.3279
  0.4642
  0.2233
  0.2370
  0.2176
  0.3793
  0.3140
  0.2803
  0.2434
  0.2116
  0.2478
  0.2435
  0.2298
  0.3172
  0.2725
  0.6511
  0.2925
  0.2281
  0.2279
  0.4254
  0.2342
  0.3328
  0.2632
  0.2176
  0.3180
  0.3893
  0.1387
  0.2274
  0.3379
  0.0767
  0.2253
  0.2504
  0.1990
  0.1951
  0.2566
  0.3253
  0.2797
  0.3149
  0.2373
  0.2533
  0.1956
  0.3236
  0.2093
  0.2333
  0.2300
  0.5019
  0.2830
  0.1885
  0.3264
  0.2722
  0.2369
  0.2430
  0.3625
  0.2165
  0.4700
  0.3047
  0.3675
  0.2641
  0.1979
  0.2664
  0.3448
  0.2005
  0.2450
  0.4351
  0.2689
  0.1632
  0.3087
  0.1209
  0.2153
  0.1592
  0.2960
  0.1423
  0.2951
  0.2706
  0.2007
  0.2939
  0.2210
  0.2243
  0.2465
  0.3910
  0.4599
  0.5417
  0.2147
  0.3469
  0.2703
  0.2229
  0.3645
  0.2647
  0.2421
  0.2492
  0.1666
  0.2763
  0.2560
  0.2151
  0.3363
  0.2767
  0.2516
  0.2988
  0.2622
  0.3499
  0.3001
  0.3907
  0.3184
  0.2233
  0.2649
  0.2110
  0.2034
  0.2752
  0.2314
  0.3480
  0.2238
  0.2892
  0.1991
  0.2923
  0.3259
  0.0722
  0.3039
  0.3041
  0.3803
  0.2568
  0.2382
  0.3057
  0.2652
  0.1532
  0.2110
  0.2567
  0.3148
  0.2746
  0.1833
  0.1950
  0.1116
  0.2279
  0.3705
  0.2477
  0.2000
  0.3060
  0.2548
  0.2468
  0.3028
  0.1921
  0.2952
  0.1980
  0.2135
  0.1583
  0.1586
  0.3944
  0.2352
  0.3947
  0.2740
  0.2861
  0.1856
  0.2702
  0.2986
  0.1728
  0.2658
  0.2696
  0.2028
  0.1838
  0.3176
  0.6246
  0.2631
  0.3855
  0.2074
  0.2317
  0.4171
  0.2044
  0.2926
  0.3506
  0.2305
  0.2400
  0.1420
  0.1093
  0.2757
  0.3253
  0.2334
  0.1650
  0.4026
  0.2066
  0.1790
  0.3032
  0.5658
  0.3246
  0.3834
  0.3254
  0.1772
  0.2909
  0.2350
  0.2519
  0.1968
  0.2003
  0.3213
  0.4802
  0.2543
  0.2578
  0.3280
  0.2270
  0.3044
  0.2273
  0.2447
  0.2527
  0.4136
  0.2588
  0.3589
  0.2688
  0.2115
  0.2022
  0.3186
  0.3740
  0.1785
  0.2074
  0.2346
  0.3566
  0.2623
  0.2620
  0.2880
  0.1462
  0.1896
  0.2777
  0.1852
  0.3240
  0.2748
  0.2164
  0.3066
  0.1845
  0.3992
  0.1695
  0.4411
  0.2812
  0.2730
  0.2784
  0.1861
  0.3589
  0.1934
  0.3320
  0.3350
  0.2655
  0.2740
  0.3185
  0.2633
  0.2458
  0.2003
  0.2809
  0.3049
  0.2050
  0.2904
  0.2381
  0.3278
  0.3484
  0.4293
  0.2422
  0.2859
  0.1864
  0.2954
  0.5634
  0.2081
  0.3743
  0.2902
  0.3820
  0.3069
  0.2101
  0.2750
  0.2878
  0.1870
  0.3015
  0.1661
  0.2998
  0.3101
  0.2522
  0.2419
  0.1758
  0.2681
  0.2812
  0.1495
  0.2868
  0.3157
  0.2587
  0.2437
  0.1467
  0.5416
  0.2490
  0.2831
  0.2783
  0.1614
  0.1963
  0.2034
  0.2364
  0.2527
  0.1573
  0.3184
  0.2841
  0.1613
  0.1489
  0.2850
  0.1625
  0.3277
  0.4936
  0.2780
  0.3178
  0.1743
  0.2158
  0.2222
  0.2821
  0.4267
  0.2713
  0.1778
  0.3067
  0.2270
  0.1772
  0.3897
  0.2923
  0.4843
  0.2345
  0.2327
  0.2740
  0.2700
  0.2804
  0.4035
  0.1501
  0.3329
  0.3286
  0.2803
  0.2309
  0.1738
  0.3270
  0.3097
  0.1808
  0.2384
  0.2107
  0.3240
  0.3346
  0.2236
  0.2061
  0.2687
  0.2360
  0.3338
  0.2694
  0.3203
  0.2895
  0.1884
  0.1491
  0.3957
  0.5167
  0.3407
  0.1854
  0.1816
  0.2626
  0.1855
  0.2219
  0.1482
  0.2584
  0.2458
  0.2616
  0.2396
  0.2402
  0.2423
  0.3463
  0.2731
  0.1524
  0.2514
  0.2760
  0.1734
  0.2715
  0.4052
  0.2252
  0.3676
  0.3070
  0.3127
  0.1836
  0.4330
  0.2203
  0.2073
  0.2803
  0.2984
  0.2191
  0.3272
  0.2267
  0.2749
  0.3056
  0.4566
  0.2962
  0.3528
  0.3236
  0.4220
  0.2715
  0.2256
  0.2903
  0.1829
  0.3994
  0.2820
  0.2471
  0.1647
  0.3654
  0.4504
  0.2685
  0.2992
  0.2825
  0.2435
  0.2212
  0.4300
  0.4342
  0.1988
  0.2863
  0.3398
  0.2444
  0.2905
  0.2559
  0.2586
  0.1702
  0.1906
  0.2536
  0.2978
  0.2498
  0.3777
  0.2252
  0.2472
  0.2243
  0.1732
  0.2194
  0.2091
  0.2820
  0.2898
  0.2887
  0.3292
  0.1644
  0.2962
  0.3279
  0.2535
  0.2795
  0.2238
  0.2607
  0.1937
  0.2680
  0.2418
  0.5193
  0.2502
  0.3147
  0.2166
  0.2313
  0.2027
  0.1880
  0.2180
  0.3826
  0.3871
  0.2358
  0.3556
  0.2272
  0.3272
  0.3442
  0.3154
  0.1993
  0.3135
  0.2254
  0.3048
  0.2658
  0.3337
  0.2679
  0.2670
  0.2363
  0.4347
  0.1931
  0.1995
  0.2072
  0.3202
  0.2667
  0.2305
  0.2383
  0.2246
  0.2562
  0.2837
  0.4046
  0.2786
  0.2243
  0.1591
  0.1923
  0.1894
  0.2496
  0.1140
  0.3128
  0.3197
  0.3530
  0.2999
  0.2115
  0.4718
  0.2979
  0.3472
  0.2890
  0.4740
  0.2230
  0.3630
  0.4015
  0.2446
  0.1897
  0.1460
  0.1874
  0.2734
  0.2366
  0.3001
  0.2359
  0.2688
  0.3256
  0.2749
  0.2848
  0.2299
  0.3001
  0.4818
  0.3074
  0.3164
  0.3114
  0.3549
  0.2859
 [torch.FloatTensor of size 512], Parameter containing:
 1.00000e-02 *
  1.4658 -2.9785 -0.1085  ...   3.6860  3.9901  3.4233
 -0.1535  2.7937 -1.7376  ...  -2.3749  0.5214  3.9412
 [torch.FloatTensor of size 2x512], Parameter containing:
 1.00000e-02 *
   1.5492
   1.7279
 [torch.FloatTensor of size 2]]

In [32]:
criterion = nn.CrossEntropyLoss()
optimizer_ft = optim.SGD(model_ft.parameters(), lr=0.001, momentum=0.9)
exp_lr_scheduler = lr_scheduler.StepLR(optimizer_ft, step_size=7, gamma=0.1)

In [ ]:
%%time
model_ft = train_model(model_ft, criterion, optimizer_ft, exp_lr_scheduler,
                       num_epochs=25)


Epoch 0/24
----------
train Loss: 0.4143 Acc: 0.8115
val Loss: 0.3071 Acc: 0.8824
Epoch 1/24
----------
train Loss: 0.4749 Acc: 0.8238
val Loss: 0.2358 Acc: 0.9216
Epoch 2/24
----------
train Loss: 0.4017 Acc: 0.8238
val Loss: 0.4650 Acc: 0.8562
Epoch 3/24
----------
train Loss: 0.4429 Acc: 0.8279
val Loss: 0.2935 Acc: 0.9150
Epoch 4/24
----------
train Loss: 0.5350 Acc: 0.8033
val Loss: 0.3385 Acc: 0.9085
Epoch 5/24
----------
train Loss: 0.4534 Acc: 0.8320
val Loss: 0.3395 Acc: 0.8758
Epoch 6/24
----------
train Loss: 0.2927 Acc: 0.8730
val Loss: 0.2516 Acc: 0.9346
Epoch 7/24
----------
train Loss: 0.3242 Acc: 0.8648
val Loss: 0.2323 Acc: 0.9346
Epoch 8/24
----------
train Loss: 0.3141 Acc: 0.8730
val Loss: 0.2481 Acc: 0.9281
Epoch 9/24
----------
train Loss: 0.2950 Acc: 0.8811
val Loss: 0.2324 Acc: 0.9281
Epoch 10/24
----------
train Loss: 0.2746 Acc: 0.8689
val Loss: 0.2322 Acc: 0.9412
Epoch 11/24
----------

In [ ]:
#ToDO : tensorboardX를 사용해 loss떨어지는 것 구현하기

In [ ]:
visualize_model(model_ft)

ConvNet as fixed feature extractor

  • 마지막 레이러를 제외한 모든 네트워크를 freeze해야 합니다
  • backward()할 경우 계산이 되는 것을 방지하기 위해 requires_grad == False를 해서 파라미터를 freeze해야합니다!
  • Autograd 문서 참고하기

In [ ]:
model_conv = torchvision.models.resnet18(pretrained=True)
for param in model_conv.parameters():
    param.requires_grad = False

# Parameters of newly constructed modules have requires_grad=True by default
num_ftrs = model_conv.fc.in_features
model_conv.fc = nn.Linear(num_ftrs, 2)

if use_gpu:
    model_conv = model_conv.cuda()

criterion = nn.CrossEntropyLoss()

# Observe that only parameters of final layer are being optimized as
# opoosed to before.
optimizer_conv = optim.SGD(model_conv.fc.parameters(), lr=0.001, momentum=0.9)

# Decay LR by a factor of 0.1 every 7 epochs
exp_lr_scheduler = lr_scheduler.StepLR(optimizer_conv, step_size=7, gamma=0.1)

In [ ]:
model_conv = train_model(model_conv, criterion, optimizer_conv,
                         exp_lr_scheduler, num_epochs=25)

In [ ]:
visualize_model(model_conv)

plt.ioff()
plt.show()