Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 13 additions & 14 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ install:
- export TENSORFLOW_INSTALL="$(python setup.py --package-version)"

stages:
- lint
- build
- lint
- release

jobs:
Expand All @@ -38,30 +38,28 @@ jobs:
- stage: build
name: "Install Build on Ubuntu 16.04"
script:
- export TENSORFLOW_INSTALL="tf-nightly"
- docker run -i -t --rm -v $PWD:/v -v $PWD/.cache/pip/:/root/.cache/pip -w /v --net=host buildpack-deps:14.04 bash -x -e .travis/python.release.sh "${TENSORFLOW_INSTALL}" python python3.5
- docker run -i -t --rm -v $PWD:/v -v $PWD/.cache/pip/:/root/.cache/pip -w /v --net=host -e GITHUB_PAT=9eecea9200150af1ec29f70bb067575eb2e56fc7 buildpack-deps:16.04 bash -x -e .travis/wheel.test.sh
- stage: build
name: "Install Build on Ubuntu 18.04"
script:
- export TENSORFLOW_INSTALL="tf-nightly"
- docker run -i -t --rm -v $PWD:/v -v $PWD/.cache/pip/:/root/.cache/pip -w /v --net=host buildpack-deps:14.04 bash -x -e .travis/python.release.sh "${TENSORFLOW_INSTALL}" python python3.6
- docker run -i -t --rm -v $PWD:/v -v $PWD/.cache/pip/:/root/.cache/pip -w /v --net=host -e GITHUB_PAT=9eecea9200150af1ec29f70bb067575eb2e56fc7 buildpack-deps:18.04 bash -x -e .travis/wheel.test.sh
# Developer Builds make sure the source code of the repo could be
# build and run on commodity developer environment (Ubuntu 16.04/18.04).
- stage: build
name: "Developer Build on Ubuntu 16.04"
before_script: &developer_build
- |
echo "bash -x -e .travis/bazel.configure.sh \"${TENSORFLOW_INSTALL}\"" > script.sh
echo "bash -x -e .travis/bazel.build.sh" >> script.sh
echo "bash -x -e .travis/build.test.sh \"${TENSORFLOW_INSTALL}\"" >> script.sh
- cat script.sh
name: "Install Build on Ubuntu 16.04 with tf-nightly-2.0-preview"
script:
- docker run -i -t --rm -v $PWD:/v -v $PWD/.cache/pip/:/root/.cache/pip -w /v --net=host buildpack-deps:16.04 bash -x -e script.sh
- export TENSORFLOW_INSTALL="tf-nightly-2.0-preview"
- docker run -i -t --rm -v $PWD:/v -v $PWD/.cache/pip/:/root/.cache/pip -w /v --net=host buildpack-deps:14.04 bash -x -e .travis/python.release.sh "${TENSORFLOW_INSTALL}" python python3.5
- docker run -i -t --rm -v $PWD:/v -v $PWD/.cache/pip/:/root/.cache/pip -w /v --net=host -e GITHUB_PAT=9eecea9200150af1ec29f70bb067575eb2e56fc7 buildpack-deps:16.04 bash -x -e .travis/wheel.test.sh
- stage: build
name: "Developer Build on Ubuntu 18.04"
before_script: *developer_build
name: "Install Build on Ubuntu 18.04 with tf-nightly-2.0-preview"
script:
- docker run -i -t --rm -v $PWD:/v -v $PWD/.cache/pip/:/root/.cache/pip -w /v --net=host buildpack-deps:18.04 bash -x -e script.sh
- export TENSORFLOW_INSTALL="tf-nightly-2.0-preview"
- docker run -i -t --rm -v $PWD:/v -v $PWD/.cache/pip/:/root/.cache/pip -w /v --net=host buildpack-deps:14.04 bash -x -e .travis/python.release.sh "${TENSORFLOW_INSTALL}" python python3.6
- docker run -i -t --rm -v $PWD:/v -v $PWD/.cache/pip/:/root/.cache/pip -w /v --net=host -e GITHUB_PAT=9eecea9200150af1ec29f70bb067575eb2e56fc7 buildpack-deps:18.04 bash -x -e .travis/wheel.test.sh

# Preview Release Builds are for TensorFlow 2.0 Preview release.
# Note only Linux (Ubuntu 18.04) and macOS are supported.
- stage: release
Expand All @@ -86,6 +84,7 @@ jobs:
if [[ ( ${TRAVIS_BRANCH} == "master" ) && ( ${TRAVIS_EVENT_TYPE} != "pull_request" ) ]]; then
twine upload wheelhouse/*.whl
fi

# Release Builds are for nightly release.
# Note Python 2.7, 3.4, 3.5, 3.6 are supported on Linux
# for Ubuntu 14.04/16.04/18.04, and Python 2.7 for macOS.
Expand Down
7 changes: 7 additions & 0 deletions config_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,13 @@ def write_config():

bazel_rc.write('build --action_env TF_SHARED_LIBRARY_DIR="{}"\n'
.format(libdir_list[0][2:]))
library_name = library_list[0][2:]
if library_name.startswith(":"):
library_name = library_name[1:]
else:
library_name = "lib" + library_name + ".so"
bazel_rc.write('build --action_env TF_SHARED_LIBRARY_NAME="{}"\n'
.format(library_name))
bazel_rc.close()
except OSError:
print("ERROR: Writing .bazelrc")
Expand Down
128 changes: 48 additions & 80 deletions tensorflow_io/mnist/python/ops/mnist_dataset_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,112 +18,80 @@
from __future__ import print_function

import tensorflow
from tensorflow import dtypes
from tensorflow.compat.v1 import data
from tensorflow.compat.v2 import data
from tensorflow_io import _load_library
mnist_ops = _load_library('_mnist_ops.so')

class _MNISTBaseDataset(data.Dataset):
"""A MNIST Dataset
"""

def __init__(self, mnist_op_class):
"""Create a MNISTReader.
class InputDataset(data.Dataset):
"""An InputDataset"""

Args:
mnist_op_class: The op of the dataset, either
mnist_ops.mnist_image_dataset or mnist_ops.mnist_label_dataset.
filenames: A `tf.string` tensor containing one or more filenames.
"""
self._func = mnist_op_class
super(_MNISTBaseDataset, self).__init__()
def __init__(self, fn, data_input, output_types, output_shapes):
"""Create an InputDataset."""
self._data_input = data_input
self._output_types = output_types
self._output_shapes = output_shapes
super(InputDataset, self).__init__(fn(
self._data_input,
output_types=self._output_types,
output_shapes=self._output_shapes))

def _inputs(self):
return []

def _as_variant_tensor(self):
return self._func(
self._data_input,
output_types=self.output_types,
output_shapes=self.output_shapes)

@property
def output_classes(self):
return tensorflow.Tensor
def _element_structure(self):
e = [
tensorflow.data.experimental.TensorStructure(
p, q.as_list()) for (p, q) in zip(
self.output_types, self.output_shapes)
]
if len(e) == 1:
return e[0]
return tensorflow.data.experimental.NestedStructure(e)

@property
def output_types(self):
return tuple([dtypes.uint8])

class MNISTImageDataset(_MNISTBaseDataset):
"""A MNIST Image Dataset
"""

def __init__(self, filename):
"""Create a MNISTReader.

Args:
filenames: A `tf.string` tensor containing one or more filenames.
"""
self._data_input = mnist_ops.mnist_image_input(filename, ["none", "gz"])
super(MNISTImageDataset, self).__init__(
mnist_ops.mnist_image_dataset)
return self._output_types

@property
def output_shapes(self):
return tuple([tensorflow.TensorShape([None, None])])
return self._output_shapes


class MNISTLabelDataset(_MNISTBaseDataset):
"""A MNIST Label Dataset
class MNISTLabelDataset(InputDataset):
"""A MNISTLabelDataset
"""

def __init__(self, filename):
"""Create a MNISTReader.
"""Create a MNISTLabelDataset.

Args:
filenames: A `tf.string` tensor containing one or more filenames.
"""
self._data_input = mnist_ops.mnist_label_input(filename, ["none", "gz"])
super(MNISTLabelDataset, self).__init__(
mnist_ops.mnist_label_dataset)

@property
def output_shapes(self):
return tuple([tensorflow.TensorShape([])])

class MNISTDataset(data.Dataset):
"""A MNIST Dataset
mnist_ops.mnist_label_dataset,
mnist_ops.mnist_label_input(filename, ["none", "gz"]),
[tensorflow.uint8],
[tensorflow.TensorShape([])]
)

class MNISTImageDataset(InputDataset):
"""A MNISTImageDataset
"""

def __init__(self, image, label):
"""Create a MNISTReader.
def __init__(self, filename):
"""Create a MNISTImageDataset.

Args:
image: A `tf.string` tensor containing image filename.
label: A `tf.string` tensor containing label filename.
filenames: A `tf.string` tensor containing one or more filenames.
"""
self._image = image
self._label = label
super(MNISTDataset, self).__init__()

def _inputs(self):
return []

def _as_variant_tensor(self):
return data.Dataset.zip( # pylint: disable=protected-access
(MNISTImageDataset(self._image),
MNISTLabelDataset(self._label))
)._as_variant_tensor()

@property
def output_shapes(self):
return (tensorflow.TensorShape([None, None]), tensorflow.TensorShape([]))

@property
def output_classes(self):
return tensorflow.Tensor, tensorflow.Tensor

@property
def output_types(self):
return dtypes.uint8, dtypes.uint8
super(MNISTImageDataset, self).__init__(
mnist_ops.mnist_image_dataset,
mnist_ops.mnist_image_input(filename, ["none", "gz"]),
[tensorflow.uint8],
[tensorflow.TensorShape([None, None])]
)

def MNISTDataset(image_filename, label_filename):
return data.Dataset.zip((
MNISTImageDataset(image_filename),
MNISTLabelDataset(label_filename)))
102 changes: 43 additions & 59 deletions tests/test_mnist.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,74 +22,58 @@
import numpy as np

import tensorflow
tensorflow.compat.v1.disable_eager_execution()
tensorflow.compat.v1.enable_eager_execution()

from tensorflow import errors # pylint: disable=wrong-import-position
from tensorflow import test # pylint: disable=wrong-import-position
from tensorflow.compat.v1 import data # pylint: disable=wrong-import-position
from tensorflow_io import mnist as mnist_io # pylint: disable=wrong-import-position

from tensorflow_io.mnist.python.ops import mnist_dataset_ops # pylint: disable=wrong-import-position

def test_mnist_dataset():
"""Test case for MNIST Dataset.
"""
mnist_filename = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"test_mnist",
"mnist.npz")
with np.load(mnist_filename) as f:
(x_test, y_test) = f['x_test'], f['y_test']

class MNISTDatasetTest(test.TestCase):
"""MNISTDatasetTest"""
def test_mnist_dataset(self):
"""Test case for MNIST Dataset.
"""
mnist_filename = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"test_mnist",
"mnist.npz")
with np.load(mnist_filename) as f:
(x_test, y_test) = f['x_test'], f['y_test']
image_filename = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"test_mnist",
"t10k-images-idx3-ubyte.gz")
label_filename = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"test_mnist",
"t10k-labels-idx1-ubyte.gz")

image_filename = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"test_mnist",
"t10k-images-idx3-ubyte.gz")
label_filename = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"test_mnist",
"t10k-labels-idx1-ubyte.gz")
image_dataset = mnist_io.MNISTImageDataset(image_filename)
label_dataset = mnist_io.MNISTLabelDataset(label_filename)

image_dataset = mnist_dataset_ops.MNISTImageDataset(image_filename)
label_dataset = mnist_dataset_ops.MNISTLabelDataset(label_filename)
i = 0
for m_x in image_dataset:
v_x = x_test[i]
assert np.alltrue(v_x == m_x.numpy())
i += 1
assert i == len(y_test)

dataset = mnist_dataset_ops.MNISTDataset(
image_filename, label_filename)
i = 0
for m_y in label_dataset:
v_y = y_test[i]
assert np.alltrue(v_y == m_y.numpy())
i += 1
assert i == len(y_test)

iterator = data.Dataset.zip(
(image_dataset, label_dataset)).make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
dataset = mnist_io.MNISTDataset(
image_filename, label_filename)

with self.cached_session() as sess:
sess.run(init_op)
l = len(y_test)
for i in range(l):
v_x = x_test[i]
v_y = y_test[i]
m_x, m_y = sess.run(get_next)
self.assertEqual(v_y, m_y)
self.assertAllEqual(v_x, m_x)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)

iterator = dataset.make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()

with self.cached_session() as sess:
sess.run(init_op)
l = len(y_test)
for i in range(l):
v_x = x_test[i]
v_y = y_test[i]
m_x, m_y = sess.run(get_next)
self.assertEqual(v_y, m_y)
self.assertAllEqual(v_x, m_x)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
i = 0
for (m_x, m_y) in dataset:
v_x = x_test[i]
v_y = y_test[i]
assert np.alltrue(v_y == m_y.numpy())
assert np.alltrue(v_x == m_x.numpy())
i += 1
assert i == len(y_test)

if __name__ == "__main__":
test.main()
7 changes: 0 additions & 7 deletions tests/test_mnist_eager.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,10 @@
from __future__ import print_function

import os
import pytest

import tensorflow as tf
import tensorflow_io.mnist as mnist_io

@pytest.mark.skipif(
not (hasattr(tf, "version") and
tf.version.VERSION.startswith("2.0.")), reason=None)
def test_mnist_tutorial():
"""test_mnist_tutorial"""
image_filename = os.path.join(
Expand Down Expand Up @@ -55,9 +51,6 @@ def test_mnist_tutorial():

model.fit(d_train, epochs=5)

@pytest.mark.skipif(
not (hasattr(tf, "version") and
tf.version.VERSION.startswith("2.0.")), reason=None)
def test_mnist_tutorial_uncompressed():
"""test_mnist_tutorial_uncompressed"""
image_filename = os.path.join(
Expand Down
Loading