Skip to content
Snippets Groups Projects
Commit b14ebf1f authored by Dawit Hailu's avatar Dawit Hailu
Browse files

updated work

parent 126e0b19
No related branches found
No related tags found
No related merge requests found
%% Cell type:code id: tags:
``` python
import dival.datasets.lodopab_dataset as lodopab
import matplotlib.pyplot as plt
dataset = lodopab.LoDoPaBDataset(impl='skimage')
sample_observ, sample_ground_truth = dataset.get_sample(1231)
plt.subplot(1, 2, 1)
plt.imshow(sample_observ)
plt.subplot(1, 2, 2)
plt.imshow(sample_ground_truth)
```
%% Output
<matplotlib.image.AxesImage at 0x7fdb6ec17610>
%% Cell type:code id: tags:
``` python
import torch
import torch.nn as nn
import torch.nn.functional as F
'''
input: 1000*513
output: 362*362
'''
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.inputlayer = nn.Linear(1000*513, 10, True)
self.layer2 = nn.Linear(10,362*362, True)
def forward(self, inp): #inp is a Vector of inputsize
x = self.inputlayer(inp)
x = F.relu(x)
x = self.layer2(x)
return x
mynet = Net()
print(mynet)
```
%% Output
Net(
(inputlayer): Linear(in_features=513000, out_features=10, bias=True)
(layer2): Linear(in_features=10, out_features=131044, bias=True)
)
%% Cell type:code id: tags:
``` python
'''data=dataset.get_sample(1230)
print(torch.is_tensor(data))
data2=torch.as_tensor(data)
print(torch.is_tensor(data2))
'''
data=torch.rand(1000*513)
print(data)
print(mynet(data).size()) #131 044 = 362*362
print(dataset.get_sample(1)[0].type())
```
%% Output
tensor([0.7736, 0.7270, 0.2761, ..., 0.7862, 0.7033, 0.8556])
torch.Size([131044])
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-19-0d3c77efb4fa> in <module>
7 print(data)
8 print(mynet(data).size()) #131 044 = 362*362
----> 9 print(dataset.get_sample(1)[0].type())
AttributeError: 'DiscreteLpElement' object has no attribute 'type'
%% Cell type:code id: tags:
``` python
from torchvision import transforms
torch.from_numpy(dataset.get_sample(1230)[0])
print(data)
```
%% Output
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-23-5ef64af544ca> in <module>
1 from torchvision import transforms
2
----> 3 torch.from_numpy(dataset.get_sample(1230)[0])
4 print(data)
TypeError: expected np.ndarray (got DiscreteLpElement)
%% Cell type:code id: tags:
``` python
data=dataset.get_sample(1231)
print(torch.as_tensor(data[0][0]).size())
```
%% Output
torch.Size([513])
%% Cell type:code id: tags:
``` python
import numpy as np
Transformer = transforms.Compose([
transforms.ToTensor()
])
#Transformer(data[1][0])
torch.from_numpy(data[1])
```
%% Output
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-64-741f17b0fc5e> in <module>
4 ])
5 #Transformer(data[1][0])
----> 6 torch.from_numpy(data[1])
TypeError: expected np.ndarray (got DiscreteLpElement)
%% Cell type:code id: tags:
``` python
x,y = data=dataset.get_samples(slice(0,1,1))
print(torch.from_numpy(x).reshape(513*1000))
print(torch.from_numpy(x))
```
%% Output
tensor([-7.1809e-05, -4.1940e-05, -2.4069e-04, ..., 8.4311e-05,
2.3020e-04, 1.7529e-04])
tensor([[[-7.1809e-05, -4.1940e-05, -2.4069e-04, ..., -1.0458e-04,
1.8018e-05, 1.4489e-04],
[-1.1648e-04, 6.9213e-05, -1.7874e-04, ..., -1.1945e-04,
-6.0001e-06, 1.7225e-04],
[-2.8182e-04, -3.2965e-05, -2.0238e-04, ..., -5.0908e-05,
-9.5652e-05, -3.8949e-05],
...,
[-3.5957e-05, 6.0031e-06, -2.3186e-04, ..., 4.4923e-04,
-2.3983e-05, 1.3578e-04],
[ 3.9072e-05, -2.8475e-04, -7.4792e-05, ..., -4.0148e-04,
-4.4930e-05, -4.4930e-05],
[ 3.9072e-05, 2.0577e-04, 4.1504e-04, ..., 8.4311e-05,
2.3020e-04, 1.7529e-04]]])
%% Cell type:code id: tags:
``` python
mynet(torch.from_numpy(x).reshape(513*1000))
```
%% Output
tensor([-5.9897e-02, 2.2034e-01, 1.2248e-01, ..., 2.7456e-05,
2.8770e-01, -1.5429e-01], grad_fn=<AddBackward0>)
%% Cell type:code id: tags:
``` python
```
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment