In [41]:
import random
import numpy as np
from work.config import STORED_FEATURES_PATH
from work.dataset.activitynet import ActivityNetDataset
length = 16
sequence_length = 20
batch_size = 256
# Loading dataset
print('Loading dataset')
dataset = ActivityNetDataset(
videos_path='../dataset/videos.json',
labels_path='../dataset/labels.txt'
)
nb_instances = sum([video.num_frames // length for video in dataset.videos])
print('Number of instances: %d' % nb_instances)
stacks = []
for _ in range(batch_size):
stacks.append([])
stacks_size = np.zeros(batch_size)
videos = dataset.videos
random.shuffle(videos)
for video in videos:
pos = np.argmin(stacks_size)
stacks[pos] += [video]
stacks_size[pos] += video.num_frames//16
In [42]:
print(np.argmin(stacks_size))
print(np.min(stacks_size))
In [43]:
print(np.argmax(stacks_size))
print(np.max(stacks_size))
In [18]:
import numpy as np
data = np.zeros((batch_size, max_seq, 4096))
In [20]:
data.shape
Out[20]:
In [27]:
# How to reshape all the data loaded
a = np.array([[1,2,3,4],[2,5,7,8], [9,8,7,6]])
print(a)
print(a.shape)
b=a.reshape((3,2,2))
b=b.transpose(1,0,2)
print(b)
print(b.shape)