In [1]:
import os, glob, platform, datetime, random
from collections import OrderedDict
import torch
import torch.nn as nn
import torch.utils.data as data_utils
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim as optim
from torch.autograd import Variable
from torch import functional as F
# import torchvision.datasets as datasets
import torchvision.models as models
import torchvision.transforms as transforms
import cv2
from PIL import Image
from tensorboardX import SummaryWriter
import numpy as np
from numpy.linalg import inv as denseinv
from scipy import sparse
from scipy.sparse import lil_matrix, csr_matrix
from scipy.sparse.linalg import spsolve
from scipy.sparse.linalg import inv as spinv
import scipy.misc
In [23]:
In [27]:
densenet = models.__dict__["densenet121"](pretrained=False)
for param in densenet.parameters():
param.requires_grad = False
net = GradientNet(densenet=densenet, pretrained_scale=2, debug=False)
x = Variable(torch.zeros(1,3,32,32))
y,m = net(x, go_through_merge=True)
Variable containing:
(0 ,0 ,.,.) =
Columns 0 to 8
0.0201 0.0265 0.0181 0.0124 -0.0142 -0.0279 -0.0351 -0.0000 0.0453
0.0320 0.0492 0.0220 -0.0174 -0.0765 -0.0823 -0.0599 -0.0007 0.0562
0.0494 0.0756 0.0270 -0.0365 -0.0984 -0.0858 -0.0383 -0.0025 0.0313
0.0388 0.0508 -0.0137 -0.0800 -0.1180 -0.0851 -0.0368 -0.0254 -0.0287
0.0124 0.0098 -0.0390 -0.0707 -0.0856 -0.0546 -0.0419 -0.0571 -0.0955
-0.0174 -0.0393 -0.0520 -0.0531 -0.0463 -0.0464 -0.0674 -0.0964 -0.1418
-0.0281 -0.0625 -0.0504 -0.0424 -0.0111 -0.0285 -0.0698 -0.1259 -0.1690
-0.0105 -0.0516 -0.0432 -0.0299 0.0237 0.0062 -0.0320 -0.0893 -0.1123
-0.0044 -0.0458 -0.0644 -0.0465 0.0057 0.0217 0.0175 0.0004 -0.0005
-0.0096 -0.0476 -0.0888 -0.0599 -0.0198 0.0305 0.0660 0.1113 0.1485
-0.0023 -0.0370 -0.0832 -0.0676 -0.0302 0.0397 0.0985 0.1741 0.2296
0.0246 0.0069 -0.0279 -0.0211 0.0076 0.0791 0.1240 0.1852 0.2152
0.0437 0.0372 0.0131 0.0121 0.0298 0.0852 0.0961 0.1270 0.1149
0.0348 0.0224 -0.0020 0.0062 0.0292 0.0711 0.0548 0.0502 0.0098
0.0134 -0.0065 -0.0318 -0.0265 0.0007 0.0335 0.0169 0.0027 -0.0326
0.0068 -0.0103 -0.0264 -0.0219 0.0000 0.0208 0.0132 -0.0011 -0.0232
Columns 9 to 15
0.0558 0.0148 -0.0219 -0.0171 0.0058 0.0084 0.0009
0.0606 -0.0033 -0.0658 -0.0668 -0.0294 -0.0053 -0.0035
0.0217 -0.0262 -0.0935 -0.1051 -0.0685 -0.0232 -0.0032
-0.0476 -0.0700 -0.1055 -0.1096 -0.0845 -0.0442 -0.0128
-0.1116 -0.1061 -0.1069 -0.1025 -0.0892 -0.0667 -0.0316
-0.1535 -0.1389 -0.1072 -0.0918 -0.0889 -0.0891 -0.0545
-0.1600 -0.1231 -0.0740 -0.0603 -0.0741 -0.1015 -0.0751
-0.1027 -0.0687 -0.0189 -0.0111 -0.0364 -0.0935 -0.0788
0.0132 0.0274 0.0498 0.0373 0.0087 -0.0542 -0.0540
0.1578 0.1398 0.1176 0.0695 0.0247 -0.0278 -0.0284
0.2415 0.2014 0.1462 0.0718 0.0178 -0.0151 -0.0121
0.2128 0.1713 0.1217 0.0371 -0.0250 -0.0436 -0.0207
0.0861 0.0435 0.0308 -0.0183 -0.0486 -0.0540 -0.0154
-0.0354 -0.0663 -0.0583 -0.0786 -0.0896 -0.0798 -0.0224
-0.0730 -0.1036 -0.1028 -0.1035 -0.1009 -0.0916 -0.0393
-0.0454 -0.0641 -0.0754 -0.0801 -0.0804 -0.0733 -0.0375
(0 ,1 ,.,.) =
Columns 0 to 8
-0.0784 -0.0620 -0.0614 -0.0572 -0.0513 -0.0376 -0.0466 -0.0620 -0.0839
-0.0960 -0.0855 -0.0926 -0.0812 -0.0753 -0.0515 -0.0487 -0.0478 -0.0634
-0.1445 -0.1564 -0.1850 -0.1758 -0.1823 -0.1443 -0.1258 -0.1000 -0.1013
-0.1672 -0.1894 -0.2268 -0.2314 -0.2421 -0.1991 -0.1591 -0.1255 -0.1266
-0.1913 -0.2266 -0.2620 -0.2631 -0.2567 -0.2138 -0.1810 -0.1714 -0.1720
-0.2236 -0.2682 -0.2846 -0.2520 -0.2137 -0.1645 -0.1488 -0.1607 -0.1674
-0.2410 -0.3007 -0.2971 -0.2274 -0.1561 -0.1123 -0.1018 -0.1212 -0.1181
-0.2206 -0.3019 -0.3052 -0.2247 -0.1497 -0.1251 -0.1450 -0.1653 -0.1455
-0.1784 -0.2606 -0.2746 -0.2089 -0.1422 -0.1500 -0.1820 -0.2163 -0.1889
-0.1565 -0.2317 -0.2654 -0.2292 -0.1791 -0.1852 -0.2224 -0.2556 -0.2251
-0.1537 -0.1929 -0.2253 -0.2211 -0.2046 -0.1882 -0.1775 -0.1868 -0.1692
-0.1560 -0.1874 -0.2282 -0.2447 -0.2469 -0.2153 -0.1924 -0.1810 -0.1559
-0.1545 -0.1814 -0.2126 -0.2313 -0.2529 -0.2388 -0.2096 -0.1739 -0.1391
-0.1512 -0.1888 -0.2119 -0.2237 -0.2356 -0.2459 -0.2362 -0.1973 -0.1650
-0.1668 -0.2015 -0.2172 -0.2107 -0.2241 -0.2477 -0.2485 -0.2063 -0.1621
-0.1504 -0.1747 -0.1849 -0.1757 -0.1807 -0.1948 -0.1996 -0.1760 -0.1490
Columns 9 to 15
-0.1153 -0.1932 -0.2374 -0.2475 -0.1909 -0.1484 -0.1179
-0.1092 -0.2455 -0.3356 -0.3634 -0.2777 -0.2121 -0.1538
-0.1574 -0.2870 -0.3624 -0.3577 -0.2723 -0.2260 -0.1700
-0.1710 -0.2431 -0.2758 -0.2427 -0.2008 -0.1955 -0.1681
-0.2118 -0.2317 -0.2205 -0.1374 -0.1082 -0.1163 -0.1252
-0.1964 -0.2109 -0.1821 -0.1053 -0.0795 -0.1116 -0.1275
-0.1450 -0.1757 -0.1696 -0.1216 -0.1037 -0.1386 -0.1487
-0.1323 -0.1285 -0.1299 -0.1263 -0.1436 -0.2050 -0.1995
-0.1526 -0.1200 -0.1321 -0.1575 -0.1914 -0.2354 -0.2131
-0.1802 -0.1435 -0.1673 -0.1932 -0.2088 -0.2140 -0.1801
-0.1612 -0.1660 -0.2033 -0.2181 -0.2070 -0.1700 -0.1370
-0.1583 -0.1738 -0.2153 -0.2145 -0.2021 -0.1463 -0.1191
-0.1669 -0.1911 -0.2062 -0.1863 -0.1917 -0.1607 -0.1387
-0.1995 -0.2159 -0.1977 -0.1721 -0.2108 -0.2158 -0.1801
-0.1765 -0.1852 -0.1618 -0.1560 -0.2074 -0.2333 -0.1928
-0.1465 -0.1440 -0.1320 -0.1427 -0.1857 -0.2023 -0.1689
(0 ,2 ,.,.) =
Columns 0 to 8
-0.0440 -0.0825 -0.0929 -0.1104 -0.1339 -0.1402 -0.1256 -0.0992 -0.0677
-0.0878 -0.1390 -0.1478 -0.1815 -0.2329 -0.2542 -0.2366 -0.1983 -0.1476
-0.0791 -0.1179 -0.1254 -0.1661 -0.2334 -0.2646 -0.2543 -0.2127 -0.1460
-0.0889 -0.1302 -0.1295 -0.1589 -0.2159 -0.2387 -0.2312 -0.1903 -0.1462
-0.0943 -0.1431 -0.1435 -0.1534 -0.1782 -0.1799 -0.1656 -0.1538 -0.1536
-0.1045 -0.1699 -0.1647 -0.1435 -0.1341 -0.1278 -0.1255 -0.1530 -0.2059
-0.0984 -0.1701 -0.1740 -0.1531 -0.1382 -0.1369 -0.1295 -0.1617 -0.2098
-0.0727 -0.1461 -0.1763 -0.1847 -0.1922 -0.2008 -0.1869 -0.1903 -0.1966
-0.0330 -0.0885 -0.1456 -0.1944 -0.2349 -0.2422 -0.2204 -0.1882 -0.1639
-0.0107 -0.0377 -0.0723 -0.1185 -0.2076 -0.2556 -0.2988 -0.2690 -0.2366
0.0013 0.0066 0.0197 -0.0201 -0.1345 -0.2440 -0.3584 -0.3590 -0.3201
-0.0052 0.0129 0.0580 0.0316 -0.0842 -0.2246 -0.3715 -0.3945 -0.3340
-0.0289 -0.0241 0.0153 0.0032 -0.0736 -0.1744 -0.2957 -0.3175 -0.2542
-0.0509 -0.0652 -0.0425 -0.0328 -0.0709 -0.1067 -0.1849 -0.1869 -0.1396
-0.0582 -0.0828 -0.0525 -0.0538 -0.0897 -0.1096 -0.1469 -0.1285 -0.0999
-0.0218 -0.0393 -0.0141 -0.0253 -0.0489 -0.0668 -0.0789 -0.0661 -0.0525
Columns 9 to 15
-0.0542 -0.0570 -0.0397 0.0090 0.0336 0.0065 -0.0147
-0.1260 -0.1374 -0.1321 -0.0667 -0.0207 -0.0430 -0.0556
-0.1279 -0.1426 -0.1735 -0.1233 -0.0712 -0.0552 -0.0370
-0.1424 -0.1657 -0.2112 -0.1873 -0.1563 -0.1187 -0.0602
-0.1758 -0.1976 -0.2368 -0.2327 -0.2241 -0.1737 -0.0865
-0.2453 -0.2470 -0.2401 -0.2461 -0.2716 -0.2531 -0.1470
-0.2457 -0.2252 -0.1873 -0.1855 -0.2342 -0.2530 -0.1645
-0.2207 -0.1699 -0.1050 -0.0776 -0.1384 -0.1957 -0.1396
-0.1723 -0.1439 -0.0845 -0.0317 -0.0591 -0.1215 -0.1000
-0.1960 -0.1671 -0.1104 -0.0655 -0.0654 -0.1207 -0.0941
-0.2321 -0.1944 -0.1381 -0.1115 -0.1111 -0.1539 -0.1080
-0.2056 -0.1437 -0.1136 -0.1423 -0.1799 -0.2020 -0.1291
-0.1485 -0.1003 -0.0962 -0.1430 -0.1990 -0.2001 -0.1178
-0.0729 -0.0796 -0.1281 -0.2149 -0.2715 -0.2504 -0.1373
-0.0821 -0.1205 -0.1666 -0.2155 -0.2445 -0.2359 -0.1377
-0.0541 -0.0914 -0.1284 -0.1619 -0.1750 -0.1674 -0.0933
[torch.FloatTensor of size 1x3x16x16]
Variable containing:
(0 ,0 ,.,.) =
-0.1043 -0.1140 -0.0840 -0.0730 -0.0968 -0.1089 -0.0962 -0.0626
-0.1262 -0.1365 -0.0967 -0.0884 -0.1345 -0.1615 -0.1479 -0.0915
-0.1215 -0.1336 -0.0969 -0.0954 -0.1432 -0.1616 -0.1373 -0.0774
-0.0944 -0.1228 -0.0991 -0.0729 -0.0725 -0.0677 -0.0718 -0.0438
-0.0962 -0.1215 -0.0974 -0.0496 -0.0238 0.0045 -0.0010 -0.0016
-0.0982 -0.1216 -0.1061 -0.0458 -0.0148 0.0237 0.0129 0.0053
-0.0935 -0.1037 -0.1036 -0.0743 -0.0789 -0.0354 -0.0171 -0.0022
-0.0696 -0.0825 -0.0926 -0.0822 -0.0899 -0.0550 -0.0361 -0.0155
(0 ,1 ,.,.) =
0.0260 0.0133 0.0012 0.0000 0.0084 0.0325 0.0592 0.0635
0.0246 0.0092 -0.0101 -0.0053 0.0201 0.0601 0.0904 0.0803
0.0351 0.0258 0.0166 0.0302 0.0567 0.0690 0.0708 0.0536
0.0677 0.0733 0.0842 0.0955 0.1144 0.0941 0.0698 0.0454
0.0982 0.1297 0.1585 0.1429 0.1146 0.0538 0.0350 0.0318
0.1217 0.1731 0.1983 0.1434 0.0803 0.0298 0.0350 0.0466
0.1275 0.1805 0.1910 0.1309 0.0736 0.0345 0.0461 0.0504
0.0984 0.1283 0.1279 0.0889 0.0617 0.0471 0.0498 0.0472
(0 ,2 ,.,.) =
-0.0325 -0.0331 0.0148 0.0500 0.0544 0.0296 0.0336 0.0297
-0.0434 -0.0387 0.0132 0.0653 0.0686 0.0455 0.0522 0.0441
0.0016 0.0022 0.0117 0.0346 0.0474 0.0610 0.0758 0.0624
0.0594 0.0725 0.0422 0.0341 0.0378 0.0597 0.0635 0.0468
0.1133 0.1350 0.0847 0.0342 0.0115 -0.0018 -0.0037 -0.0026
0.1302 0.1601 0.0889 0.0311 -0.0294 -0.0889 -0.0956 -0.0650
0.1257 0.1511 0.0688 0.0084 -0.0667 -0.1277 -0.1248 -0.0752
0.0827 0.0943 0.0279 -0.0069 -0.0546 -0.0821 -0.0762 -0.0401
[torch.FloatTensor of size 1x3x8x8]
Variable containing:
(0 ,0 ,.,.) =
0.0043 -0.0401 -0.1326 -0.1639
-0.0175 -0.0774 -0.1874 -0.1926
-0.0542 -0.0669 -0.1426 -0.1316
-0.1114 -0.1051 -0.1248 -0.0930
(0 ,1 ,.,.) =
-0.0295 -0.0013 0.0486 0.0416
-0.0089 0.0372 0.0996 0.0814
-0.0108 0.0262 0.0776 0.0579
-0.0158 0.0039 0.0287 0.0079
(0 ,2 ,.,.) =
-0.0028 0.0204 0.0029 -0.0264
0.0209 0.0660 0.0567 0.0108
0.0255 0.0626 0.0607 -0.0013
-0.0112 0.0021 0.0078 -0.0336
[torch.FloatTensor of size 1x3x4x4]
Variable containing:
(0 ,0 ,.,.) =
1.00000e-02 *
4.9669 4.9669
4.9669 4.9669
(0 ,1 ,.,.) =
1.00000e-02 *
6.7196 6.7196
6.7196 6.7196
(0 ,2 ,.,.) =
1.00000e-02 *
-4.4225 -4.4225
-4.4225 -4.4225
[torch.FloatTensor of size 1x3x2x2]
0
Variable containing:
(0 ,0 ,.,.) =
0.5093 0.5103 0.5093 ... 0.5074 0.5126 0.5079
0.5113 0.5117 0.5112 ... 0.5113 0.5142 0.5102
0.5078 0.5103 0.5110 ... 0.5090 0.5143 0.5089
... ⋱ ...
0.5099 0.5124 0.5122 ... 0.5140 0.5169 0.5113
0.5103 0.5120 0.5114 ... 0.5088 0.5129 0.5072
0.5084 0.5119 0.5091 ... 0.5111 0.5126 0.5095
(0 ,1 ,.,.) =
0.4941 0.4900 0.4910 ... 0.4913 0.4918 0.4930
0.4937 0.4901 0.4906 ... 0.4960 0.4950 0.4959
0.4952 0.4896 0.4912 ... 0.4937 0.4911 0.4962
... ⋱ ...
0.4931 0.4942 0.4955 ... 0.4933 0.4982 0.4959
0.4945 0.4943 0.4919 ... 0.4953 0.4924 0.4953
0.4938 0.4933 0.4955 ... 0.4951 0.5012 0.4963
(0 ,2 ,.,.) =
0.5046 0.5069 0.5055 ... 0.5126 0.5049 0.5072
0.5054 0.5058 0.5040 ... 0.5099 0.4994 0.5073
0.5038 0.5091 0.5064 ... 0.5191 0.5069 0.5099
... ⋱ ...
0.5060 0.5017 0.5079 ... 0.4987 0.5079 0.5017
0.5048 0.5090 0.5019 ... 0.5085 0.4920 0.5069
0.5041 0.5015 0.5052 ... 0.4986 0.5079 0.5021
[torch.FloatTensor of size 1x3x32x32]
In [32]:
m[4].size()
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-32-82fcb491b3e4> in <module>()
----> 1 m[4].size()
AttributeError: 'int' object has no attribute 'size'
In [ ]:
a = [64,64,128,256,1024]
a = [x//2 for x in a]
print(a)
In [ ]:
densenet = models.__dict__["densenet121"](pretrained=True).cuda(3)
x = Variable(torch.Tensor(1,3,256,256)).cuda(3)
y = densenet(x)
In [ ]:
y
Content source: albertxavier001/graduation-project
Similar notebooks: