-
Notifications
You must be signed in to change notification settings - Fork 72
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
8 changed files
with
396 additions
and
44 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,89 @@ | ||
# Byte-compiled / optimized / DLL files | ||
__pycache__/ | ||
*.py[cod] | ||
*$py.class | ||
|
||
# C extensions | ||
*.so | ||
|
||
# Distribution / packaging | ||
.Python | ||
env/ | ||
build/ | ||
develop-eggs/ | ||
dist/ | ||
downloads/ | ||
eggs/ | ||
.eggs/ | ||
lib/ | ||
lib64/ | ||
parts/ | ||
sdist/ | ||
var/ | ||
*.egg-info/ | ||
.installed.cfg | ||
*.egg | ||
|
||
# PyInstaller | ||
# Usually these files are written by a python script from a template | ||
# before PyInstaller builds the exe, so as to inject date/other infos into it. | ||
*.manifest | ||
*.spec | ||
|
||
# Installer logs | ||
pip-log.txt | ||
pip-delete-this-directory.txt | ||
|
||
# Unit test / coverage reports | ||
htmlcov/ | ||
.tox/ | ||
.coverage | ||
.coverage.* | ||
.cache | ||
nosetests.xml | ||
coverage.xml | ||
*,cover | ||
.hypothesis/ | ||
|
||
# Translations | ||
*.mo | ||
*.pot | ||
|
||
# Django stuff: | ||
*.log | ||
local_settings.py | ||
|
||
# Flask stuff: | ||
instance/ | ||
.webassets-cache | ||
|
||
# Scrapy stuff: | ||
.scrapy | ||
|
||
# Sphinx documentation | ||
docs/_build/ | ||
|
||
# PyBuilder | ||
target/ | ||
|
||
# IPython Notebook | ||
.ipynb_checkpoints | ||
|
||
# pyenv | ||
.python-version | ||
|
||
# celery beat schedule file | ||
celerybeat-schedule | ||
|
||
# dotenv | ||
.env | ||
|
||
# virtualenv | ||
venv/ | ||
ENV/ | ||
|
||
# Spyder project settings | ||
.spyderproject | ||
|
||
# Rope project settings | ||
.ropeproject |
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,111 @@ | ||
import json | ||
import glob | ||
import numpy as np | ||
import os | ||
import h5py | ||
import random | ||
import threading | ||
import queue | ||
|
||
class BackgroundGenerator(threading.Thread): | ||
def __init__(self, generator): | ||
threading.Thread.__init__(self) | ||
self.queue = queue.Queue(1) | ||
self.generator = generator | ||
self.daemon = True | ||
self.start() | ||
|
||
def run(self): | ||
for item in self.generator: | ||
self.queue.put(item) | ||
self.queue.put(None) | ||
|
||
def __iter__(self): | ||
return self | ||
|
||
def __next__(self): | ||
next_item = self.queue.get() | ||
if next_item is None: | ||
raise StopIteration | ||
return next_item | ||
|
||
def get_good_files(hdf5_path, train=True): | ||
if train: | ||
# https://gist.github.com/crizCraig/65677883e07c74bdc08f987e806cd95f | ||
with open(hdf5_path + "/good_files.json", "rb") as f: | ||
ids = json.loads(f.read().decode('utf8')) | ||
ids.remove(1) | ||
else: | ||
ids = [1] | ||
|
||
ret = [] | ||
for i in ids: | ||
name = os.path.join(hdf5_path, "train_%04d.h5" % i) | ||
ret += [name] | ||
return set(ret) | ||
|
||
def load_file(h5_filename): | ||
mean_pixel = np.array([104., 117., 123.], np.float32) | ||
out_images = [] | ||
out_targets = [] | ||
|
||
with h5py.File(h5_filename, 'r') as hf: | ||
images = list(hf.get('images')) | ||
targets = list(hf.get('targets')) | ||
perm = np.arange(len(images)) | ||
for i in range(len(images)): | ||
idx = perm[i] | ||
img = images[idx].transpose((1, 2, 0)) # CHW => HWC | ||
img = img[:, :, ::-1] # BGR => RGB | ||
img = img.astype(np.float32) | ||
img -= mean_pixel | ||
out_images.append(img) | ||
out_targets.append(targets[idx]) | ||
return out_images, out_targets | ||
|
||
def file_loader(file_stream): | ||
for h5_filename in file_stream: | ||
print('input file: {}'.format(h5_filename)) | ||
yield load_file(h5_filename) | ||
|
||
def batch_gen(file_stream, batch_size): | ||
gen = BackgroundGenerator(file_loader(file_stream)) | ||
for images, targets in gen: | ||
num_iters = len(images) // batch_size | ||
for i in range(num_iters): | ||
yield images[i * batch_size:(i+1) * batch_size], targets[i * batch_size:(i+1) * batch_size] | ||
|
||
class Dataset(object): | ||
def __init__(self, files): | ||
self._files = files | ||
|
||
def iterate_once(self, batch_size): | ||
def file_stream(): | ||
for file_name in self._files: | ||
yield file_name | ||
yield from batch_gen(file_stream(), batch_size) | ||
|
||
def iterate_forever(self, batch_size): | ||
def file_stream(): | ||
while True: | ||
random.shuffle(self._files) | ||
for file_name in self._files: | ||
yield file_name | ||
yield from batch_gen(file_stream(), batch_size) | ||
|
||
|
||
def get_dataset(hdf5_path, train=True): | ||
good_files = get_good_files(hdf5_path, train=train) | ||
file_names = glob.glob(hdf5_path + "/*.h5") | ||
file_names = [fname for fname in file_names if fname in good_files] | ||
return Dataset(file_names) | ||
|
||
def run(): | ||
hdf5_path = os.environ('DEEPDRIVE_HDF5_PATH') | ||
|
||
# print(get_good_files(hdf5_path)) | ||
dataset = get_dataset(hdf5_path) | ||
print(dataset) | ||
|
||
if __name__ == "__main__": | ||
run() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,43 @@ | ||
import numpy as np | ||
import tensorflow as tf | ||
|
||
def conv(input, kernel, biases, k_h, k_w, c_o, s_h, s_w, padding="VALID", group=1): | ||
'''From https://github.com/ethereon/caffe-tensorflow | ||
''' | ||
c_i = input.get_shape()[-1] | ||
assert c_i % group == 0 | ||
assert c_o % group == 0 | ||
def convolve(i, k): | ||
return tf.nn.conv2d(i, k, [1, s_h, s_w, 1], padding=padding) | ||
|
||
# TODO: random weight initialization | ||
# W = tf.get_variable("W", shape=[784, 256], | ||
# initializer=tf.contrib.layers.xavier_initializer()) | ||
|
||
if group == 1: | ||
conv = convolve(input, kernel) | ||
else: | ||
input_groups = tf.split(3, group, input) | ||
kernel_groups = tf.split(3, group, kernel) | ||
output_groups = [convolve(i, k) for i, k in zip(input_groups, kernel_groups)] | ||
conv = tf.concat(3, output_groups) | ||
return tf.reshape(tf.nn.bias_add(conv, biases), [-1] + conv.get_shape().as_list()[1:]) | ||
|
||
def conv2d(x, name, num_features, kernel_size, stride, group): | ||
input_features = x.get_shape()[3] | ||
w = tf.get_variable(name + "_W", [kernel_size, kernel_size, input_features // group, num_features]) | ||
b = tf.get_variable(name + "_b", [num_features]) | ||
return conv(x, w, b, kernel_size, kernel_size, num_features, stride, stride, padding="SAME", group=group) | ||
|
||
def linear(x, name, size): | ||
input_size = np.prod(list(map(int, x.get_shape()[1:]))) | ||
x = tf.reshape(x, [-1, input_size]) | ||
w = tf.get_variable(name + "_W", [input_size, size], initializer=tf.random_normal_initializer(0.0, 0.005)) | ||
b = tf.get_variable(name + "_b", [size], initializer=tf.zeros_initializer) | ||
return tf.matmul(x, w) + b | ||
|
||
def max_pool_2x2(x): | ||
return tf.nn.max_pool(x, ksize=[1, 3, 3, 1], strides=[1, 2, 2, 1], padding='VALID') | ||
|
||
def lrn(x): | ||
return tf.nn.local_response_normalization(x, depth_radius=2, alpha=2e-05, beta=0.75, bias=1.0) |
Oops, something went wrong.