From 30ca1ad07c10d6b69f6476f7a6be3e08c536fdc6 Mon Sep 17 00:00:00 2001 From: pkan2 <34614124+pkan2@users.noreply.github.com> Date: Fri, 9 Aug 2019 15:27:05 -0500 Subject: [PATCH 01/20] Add files via upload --- .../optimizers/conditional_gradient.py | 125 ++++ .../optimizers/conditional_gradient_test.py | 588 ++++++++++++++++++ 2 files changed, 713 insertions(+) create mode 100644 tensorflow_addons/optimizers/conditional_gradient.py create mode 100644 tensorflow_addons/optimizers/conditional_gradient_test.py diff --git a/tensorflow_addons/optimizers/conditional_gradient.py b/tensorflow_addons/optimizers/conditional_gradient.py new file mode 100644 index 0000000000..8f8721ff7c --- /dev/null +++ b/tensorflow_addons/optimizers/conditional_gradient.py @@ -0,0 +1,125 @@ +# Copyright 2018 Vishnu sai rao suresh Lokhande & Pengyu Kan. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +"""Conditional Gradient method for TensorFlow.""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +from tensorflow.python.framework import ops +from tensorflow.python.ops import math_ops +from tensorflow.python.training import optimizer +#from tensorflow.python.training import training_ops +from tensorflow.python.util.tf_export import tf_export +from tensorflow.python.ops import control_flow_ops +from tensorflow.python.ops import state_ops + +from tensorflow.python.ops import resource_variable_ops +#from tensorflow.python.ops import variable_scope +from tensorflow.python.ops import array_ops + + + +@tf_export(v1=["train.ConditionalGradientOptimizer"]) +class ConditionalGradientOptimizer(optimizer.Optimizer): + """Optimizer that implements the Conditional Gradient optimization. + Helps handle constraints well. + Currently only supports frobenius norm constraint. + See https://arxiv.org/pdf/1803.06453.pdf + ``` + variable -= (1-learning_rate) + * (variable + lamda * gradient / frobenius_norm(gradient)) + ``` + """ + + def __init__(self, learning_rate, lamda, + use_locking=False, name="ConditionalGradient"): + """Construct a conditional gradient optimizer. + Args: + learning_rate: A `Tensor` or a floating point value. The learning rate. + lamda: A `Tensor` or a floating point value. The constraint. + use_locking: If `True` use locks for update operations. + name: Optional name prefix for the operations created when applying + gradients. Defaults to "ConditionalGradient" + """ + super(ConditionalGradientOptimizer, self).__init__(use_locking, name) + self._learning_rate = learning_rate + self._lamda = lamda + + def _create_slots(self, var_list): + for v in var_list: + self._zeros_slot(v, "conditional_gradient", self._name) + + def _prepare(self): + learning_rate = self._learning_rate + if callable(learning_rate): + learning_rate = learning_rate() + self._learning_rate_tensor = ops.convert_to_tensor(learning_rate, + name="learning_rate") + lamda = self._lamda + if callable(lamda): + lamda = lamda() + self._lamda_tensor = ops.convert_to_tensor(lamda, name="lamda") + + def _apply_dense(self, grad, var): + def frobenius_norm(m): + return math_ops.reduce_sum(m ** 2) ** 0.5 + norm = ops.convert_to_tensor(frobenius_norm(grad), name="norm") + norm = math_ops.cast(norm, var.dtype.base_dtype) + lr = math_ops.cast(self._learning_rate_tensor, var.dtype.base_dtype) + lamda = math_ops.cast(self._lamda_tensor, var.dtype.base_dtype) + var_update = state_ops.assign(var, math_ops.multiply(var, lr) \ + - (1-lr)* lamda * grad / norm, use_locking=self._use_locking) + return control_flow_ops.group(var_update) + + def _resource_apply_dense(self, grad, var): + def frobenius_norm(m): + return math_ops.reduce_sum(m ** 2) ** 0.5 + norm = ops.convert_to_tensor(frobenius_norm(grad), name="norm") + norm = math_ops.cast(norm, var.dtype.base_dtype) + lr = math_ops.cast(self._learning_rate_tensor, var.dtype.base_dtype) + lamda = math_ops.cast(self._lamda_tensor, var.dtype.base_dtype) + var_update_tensor = math_ops.multiply(var, lr) - (1-lr)* lamda * grad / norm + var_update_op = resource_variable_ops.assign_variable_op(var.handle, + var_update_tensor) + return control_flow_ops.group(var_update_op) + + def _apply_sparse(self, grad, var): + def frobenius_norm(m): + return math_ops.reduce_sum(m ** 2) ** 0.5 + norm = ops.convert_to_tensor(frobenius_norm(grad.value), name="norm") + norm = math_ops.cast(norm, var.dtype.base_dtype) + lr = math_ops.cast(self._learning_rate_tensor, var.dtype.base_dtype) + lamda = math_ops.cast(self._lamda_tensor, var.dtype.base_dtype) + var_slice = array_ops.gather(var, grad.indices) + var_update_value = math_ops.multiply(var_slice, lr) \ + - (1-lr)* lamda * grad.value / norm + var_update = state_ops.scatter_update(var, grad.indices, \ + var_update_value, use_locking=self._use_locking) + return control_flow_ops.group(var_update) + + def _resource_apply_sparse(self, grad, var, indices): + def frobenius_norm(m): + return math_ops.reduce_sum(m ** 2) ** 0.5 + norm = ops.convert_to_tensor(frobenius_norm(grad), name="norm") + norm = math_ops.cast(norm, var.dtype.base_dtype) + lr = math_ops.cast(self._learning_rate_tensor, var.dtype.base_dtype) + lamda = math_ops.cast(self._lamda_tensor, var.dtype.base_dtype) + var_slice = array_ops.gather(var, indices) + var_update_value = math_ops.multiply(var_slice, lr) \ + - (1-lr) * lamda * grad / norm + var_update_op = resource_variable_ops.resource_scatter_update \ + (var.handle, indices, var_update_value) + return control_flow_ops.group(var_update_op) diff --git a/tensorflow_addons/optimizers/conditional_gradient_test.py b/tensorflow_addons/optimizers/conditional_gradient_test.py new file mode 100644 index 0000000000..04cda21c75 --- /dev/null +++ b/tensorflow_addons/optimizers/conditional_gradient_test.py @@ -0,0 +1,588 @@ +# Copyright 2019 Vishnu sai rao suresh Lokhande & Pengyu Kan. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for Conditional Gradient.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import numpy as np +from six.moves import xrange # pylint: disable=redefined-builtin + +from tensorflow.python.eager import context +from tensorflow.python.framework import constant_op +from tensorflow.python.framework import dtypes +from tensorflow.python.framework import ops +from tensorflow.python.framework import test_util +from tensorflow.python.ops import array_ops +from tensorflow.python.ops import embedding_ops +from tensorflow.python.ops import math_ops +from tensorflow.python.ops import resource_variable_ops +from tensorflow.python.ops import variables +from tensorflow.python.platform import test +from tensorflow_addons.optimizers import conditional_gradient as cg_lib + +class ConditionalGradientTest(test.TestCase): + + + def _update_conditional_gradient_numpy(self, var, norm, g, lr, lamda): + var = var * lr - (1 - lr) * lamda * g /norm + return var + + def doTestBasic(self, use_resource=False, use_callable_params=False): + for i, dtype in enumerate([dtypes.half, dtypes.float32, dtypes.float64]): + if use_resource: + var0 = resource_variable_ops.ResourceVariable( + [1.0, 2.0], dtype=dtype, name="var0_%d" % i) + var1 = resource_variable_ops.ResourceVariable( + [3.0, 4.0], dtype=dtype, name="var1_%d" % i) + else: + var0 = variables.Variable([1.0, 2.0], dtype=dtype) + var1 = variables.Variable([3.0, 4.0], dtype=dtype) + grads0 = constant_op.constant([0.1, 0.1], dtype=dtype) + grads1 = constant_op.constant([0.01, 0.01], dtype=dtype) + norm0 = math_ops.reduce_sum(grads0 ** 2) ** 0.5 + norm1 = math_ops.reduce_sum(grads1 ** 2) ** 0.5 + learning_rate = lambda: 0.5 + lamda = lambda: 0.01 + if not use_callable_params: + learning_rate = learning_rate() + lamda = lamda() + cg_opt = cg_lib.ConditionalGradientOptimizer( + learning_rate=learning_rate, lamda=lamda) + cg_update = cg_opt.apply_gradients( + zip([grads0, grads1], [var0, var1])) + + if not context.executing_eagerly(): + self.evaluate(variables.global_variables_initializer()) + # Fetch params to validate initial values + self.assertAllClose([1.0, 2.0], self.evaluate(var0)) + self.assertAllClose([3.0, 4.0], self.evaluate(var1)) + + # Check we have slots + self.assertEqual(["conditional_gradient"], cg_opt.get_slot_names()) + slot0 = cg_opt.get_slot(var0, "conditional_gradient") + self.assertEquals(slot0.get_shape(), var0.get_shape()) + slot1 = cg_opt.get_slot(var1, "conditional_gradient") + self.assertEquals(slot1.get_shape(), var1.get_shape()) + if not context.executing_eagerly(): + self.assertFalse(slot0 in variables.trainable_variables()) + self.assertFalse(slot1 in variables.trainable_variables()) + + if not context.executing_eagerly(): + self.evaluate(cg_update) + + # Check that the parameters have been updated. + norm0 = self.evaluate(norm0) + norm1 = self.evaluate(norm1) + self.assertAllCloseAccordingToType( + np.array([ + 1.0 * 0.5 - (1-0.5) * 0.01 * 0.1 / norm0, + 2.0 * 0.5 - (1-0.5) * 0.01 * 0.1 / norm0 + ]), self.evaluate(var0)) + self.assertAllCloseAccordingToType( + np.array([ + 3.0 * 0.5 - (1-0.5) * 0.01 * 0.01 / norm1, + 4.0 * 0.5 - (1-0.5) * 0.01 * 0.01 / norm1 + ]), self.evaluate(var1)) + + # Step 2: the conditional_gradient contain the previous update. + if context.executing_eagerly(): + cg_opt.apply_gradients(zip([grads0, grads1], [var0, var1])) + else: + self.evaluate(cg_update) + self.assertAllCloseAccordingToType( + np.array([ + (1.0 * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 \ + - (1 - 0.5) * 0.01 * 0.1 / norm0, + (2.0 * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 \ + - (1 - 0.5) * 0.01 * 0.1 / norm0 + ]), self.evaluate(var0)) + self.assertAllCloseAccordingToType( + np.array([ + (3.0 * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 \ + - (1 - 0.5) * 0.01 * 0.01 / norm1, + (4.0 * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 \ + - (1 - 0.5) * 0.01 * 0.01 / norm1 + ]), self.evaluate(var1)) + + + def testBasic(self): + with self.cached_session(): + self.doTestBasic(use_resource=False) + + + @test_util.run_in_graph_and_eager_modes(reset_test=True) + def testResourceBasic(self): + self.doTestBasic(use_resource=True) + + def testBasicCallableParams(self): + with context.eager_mode(): + self.doTestBasic(use_resource=True, use_callable_params=True) + + def testVariablesAcrossGraphs(self): + optimizer = cg_lib.ConditionalGradientOptimizer(0.01, 0.5) + with ops.Graph().as_default(): + var0 = resource_variable_ops.ResourceVariable( + [1.0, 2.0], dtype=dtypes.float32, name="var0") + var1 = resource_variable_ops.ResourceVariable( + [3.0, 4.0], dtype=dtypes.float32, name="var1") + loss = math_ops.reduce_sum(var0 + var1) + optimizer.minimize(loss) + optimizer_variables = optimizer.variables() + self.assertStartsWith(optimizer_variables[0].name, "var0") + self.assertStartsWith(optimizer_variables[1].name, "var1") + self.assertEqual(2, len(optimizer_variables)) + + with ops.Graph().as_default(): + var2 = resource_variable_ops.ResourceVariable( + [1.0, 2.0], dtype=dtypes.float32, name="var2") + var3 = resource_variable_ops.ResourceVariable( + [3.0, 4.0], dtype=dtypes.float32, name="var3") + loss = math_ops.reduce_sum(var2 + var3) + optimizer.minimize(loss) + optimizer_variables = optimizer.variables() + self.assertStartsWith(optimizer_variables[0].name, "var2") + self.assertStartsWith(optimizer_variables[1].name, "var3") + self.assertEqual(2, len(optimizer_variables)) + + + @test_util.run_in_graph_and_eager_modes(reset_test=True) + def testMinimizeSparseResourceVariable(self): + for dtype in [dtypes.half, dtypes.float32, dtypes.float64]: + # This test invokes the ResourceSparseApplyConditionalGradient operation, which + # did not have a registered GPU kernel as of April 2018. With graph + # execution, the placement algorithm notices this and automatically + # places the variable in CPU (host) memory. With eager execution, + # the variable would be placed in GPU memory if available, which + # would then conflict with the future invocation of the + # ResourceSparseApplyConditionalGradient operation. + # To work around this discrepancy, for now we force the variable + # to be placed on CPU. + with ops.device("/cpu:0"): + var0 = resource_variable_ops.ResourceVariable([[1.0, 2.0]], dtype=dtype) + + # pylint: disable=cell-var-from-loop + def loss(): + x = constant_op.constant([[4.0], [5.0]], dtype=dtype) + pred = math_ops.matmul(embedding_ops.embedding_lookup([var0], [0]), x) + return pred * pred + + #the gradient based on the current loss function + grads0_0 = 32 * 1.0 + 40 * 2.0 + grads0_1 = 40 * 1.0 + 50 * 2.0 + grads0 = constant_op.constant([[grads0_0, grads0_1]], dtype=dtype) + norm0 = math_ops.reduce_sum(grads0 ** 2) ** 0.5 + + # pylint: enable=cell-var-from-loop + learning_rate = 0.1 + lamda = 0.1 + opt = cg_lib.ConditionalGradientOptimizer(learning_rate=learning_rate, \ + lamda=lamda) + cg_op = opt.minimize(loss) + self.evaluate(variables.global_variables_initializer()) + + # Run 1 step of cg_op + self.evaluate(cg_op) + + # Validate updated params + norm0 = self.evaluate(norm0) + self.assertAllCloseAccordingToType([ + [1.0 * learning_rate - (1-learning_rate)*lamda*grads0_0/norm0, + 2.0 * learning_rate - (1-learning_rate)*lamda*grads0_1/norm0] + ], self.evaluate(var0)) + + + @test_util.run_in_graph_and_eager_modes(reset_test=True) + def testMinimizeWith2DIndiciesForEmbeddingLookup(self): + # This test invokes the ResourceSparseApplyConditionalGradient operation, which + # did not have a registered GPU kernel as of April 2018. With graph + # execution, the placement algorithm notices this and automatically + # places the variable in CPU (host) memory. With eager execution, + # the variable would be placed in GPU memory if available, which + # would then conflict with the future invocation of the + # ResourceSparseApplyConditionalGradient operation. + # To work around this discrepancy, for now we force the variable + # to be placed on CPU. + with ops.device("/cpu:0"): + var0 = resource_variable_ops.ResourceVariable(array_ops.ones([2, 2])) + + def loss(): + return math_ops.reduce_sum(embedding_ops.embedding_lookup(var0, [[1]])) + + # the gradient for this loss function: + grads0 = constant_op.constant([[0, 0], [1, 1]], dtype=dtypes.float32) + norm0 = math_ops.reduce_sum(grads0 ** 2) ** 0.5 + + learning_rate = 0.1 + lamda = 0.1 + opt = cg_lib.ConditionalGradientOptimizer(learning_rate=learning_rate, \ + lamda=lamda) + cg_op = opt.minimize(loss) + self.evaluate(variables.global_variables_initializer()) + + # Run 1 step of cg_op + self.evaluate(cg_op) + norm0 = self.evaluate(norm0) + ''' + This is the test case we need to pass, if we only want to update the + sparse dimension's entry of the var. + ''' + self.assertAllCloseAccordingToType([ + [1, + 1], + [learning_rate * 1 - (1-learning_rate)*lamda*1/norm0, + learning_rate * 1 - (1-learning_rate)*lamda*1/norm0] + ], self.evaluate(var0)) + + @test_util.run_deprecated_v1 + def testTensorLearningRateAndConditionalGradient(self): + for dtype in [dtypes.half, dtypes.float32, dtypes.float64]: + with self.cached_session(): + var0 = variables.Variable([1.0, 2.0], dtype=dtype) + var1 = variables.Variable([3.0, 4.0], dtype=dtype) + grads0 = constant_op.constant([0.1, 0.1], dtype=dtype) + grads1 = constant_op.constant([0.01, 0.01], dtype=dtype) + norm0 = math_ops.reduce_sum(grads0 ** 2) ** 0.5 + norm1 = math_ops.reduce_sum(grads1 ** 2) ** 0.5 + cg_opt = cg_lib.ConditionalGradientOptimizer( + learning_rate=constant_op.constant(0.5), + lamda=constant_op.constant(0.01)) + cg_update = cg_opt.apply_gradients( + zip([grads0, grads1], [var0, var1])) + variables.global_variables_initializer().run() + # Check we have slots + self.assertEqual(["conditional_gradient"], cg_opt.get_slot_names()) + slot0 = cg_opt.get_slot(var0, "conditional_gradient") + self.assertEquals(slot0.get_shape(), var0.get_shape()) + slot1 = cg_opt.get_slot(var1, "conditional_gradient") + self.assertEquals(slot1.get_shape(), var1.get_shape()) + if not context.executing_eagerly(): + self.assertFalse(slot0 in variables.trainable_variables()) + self.assertFalse(slot1 in variables.trainable_variables()) + + # Fetch params to validate initial values + self.assertAllClose([1.0, 2.0], self.evaluate(var0)) + self.assertAllClose([3.0, 4.0], self.evaluate(var1)) + + cg_update.run() + # Check that the parameters have been updated. + norm0 = self.evaluate(norm0) + norm1 = self.evaluate(norm1) + self.assertAllCloseAccordingToType( + np.array([ + 1.0 * 0.5 - (1-0.5) * 0.01 * 0.1 / norm0, + 2.0 * 0.5 - (1-0.5) * 0.01 * 0.1 / norm0 + ]), self.evaluate(var0)) + self.assertAllCloseAccordingToType( + np.array([ + 3.0 * 0.5 - (1-0.5) * 0.01 * 0.01 / norm1, + 4.0 * 0.5 - (1-0.5) * 0.01 * 0.01 / norm1 + ]), self.evaluate(var1)) + # Step 2: the conditional_gradient contain the previous update. + cg_update.run() + # Check that the parameters have been updated. + self.assertAllCloseAccordingToType( + np.array([ + (1.0 * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 \ + - (1 - 0.5) * 0.01 * 0.1 / norm0, + (2.0 * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 \ + - (1 - 0.5) * 0.01 * 0.1 / norm0 + ]), self.evaluate(var0)) + self.assertAllCloseAccordingToType( + np.array([ + (3.0 * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 \ + - (1 - 0.5) * 0.01 * 0.01 / norm1, + (4.0 * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 \ + - (1 - 0.5) * 0.01 * 0.01 / norm1 + ]), self.evaluate(var1)) + + def _dbParamsCG01(self): + """Return dist-belief conditional_gradient values. + + Return values been generated from the dist-belief conditional_gradient + unittest, running with a learning rate of 0.1 and a lamda of 0.1. + + These values record how a parameter vector of size 10, initialized with 0.0, + gets updated with 10 consecutive conditional_gradient steps. + It uses random gradients. + + Returns: + db_grad: The gradients to apply + db_out: The parameters after the conditional_gradient update. + """ + db_grad = [[]] * 10 + db_out = [[]] * 10 + # pylint: disable=line-too-long + db_grad[0] = [ + 0.00096264342, 0.17914793, 0.93945462, 0.41396621, 0.53037018, + 0.93197989, 0.78648776, 0.50036013, 0.55345792, 0.96722615 + ] + db_out[0] = [ + -4.1555551e-05, -7.7334875e-03, -4.0554531e-02, -1.7870162e-02, + -2.2895107e-02, -4.0231861e-02, -3.3951234e-02, -2.1599628e-02, + -2.3891762e-02, -4.1753378e-02 + ] + db_grad[1] = [ + 0.17075552, 0.88821375, 0.20873757, 0.25236958, 0.57578111, 0.15312378, + 0.5513742, 0.94687688, 0.16012503, 0.22159521 + ] + db_out[1] = [ + -0.00961733, -0.0507779, -0.01580694, -0.01599489, -0.03470477, -0.01264373, + -0.03443632, -0.05546713, -0.01140388, -0.01665068 + ] + db_grad[2] = [ + 0.35077485, 0.47304362, 0.44412705, 0.44368884, 0.078527533, 0.81223965, + 0.31168157, 0.43203235, 0.16792089, 0.24644311 + ] + db_out[2] = [ + -0.02462724, -0.03699233, -0.03154434, -0.03153357, -0.00876844, -0.05606323, + -0.02447166, -0.03469437, -0.0124694, -0.01829169 + ] + db_grad[3] = [ + 0.9694621, 0.75035888, 0.28171822, 0.83813518, 0.53807181, 0.3728098, + 0.81454384, 0.03848977, 0.89759839, 0.93665648 + ] + db_out[3] = [ + -0.04124615, -0.03371741, -0.0144246, -0.03668303, -0.02240246, -0.02052062, + -0.03503307, -0.00500922, -0.03715545, -0.0393002 + ] + db_grad[4] = [ + 0.38578293, 0.8536852, 0.88722926, 0.66276771, 0.13678469, 0.94036359, + 0.69107032, 0.81897682, 0.5433259, 0.67860287 + ] + db_out[4] = [ + -0.01979208, -0.0380417, -0.03747472, -0.0305847, -0.00779536, -0.04024222, + -0.03156913, -0.0337613, -0.02578116, -0.03148952 + ] + db_grad[5] = [ + 0.27885768, 0.76100707, 0.24625534, 0.81354135, 0.18959245, 0.48038563, + 0.84163809, 0.41172323, 0.83259648, 0.44941229 + ] + db_out[5] = [ + -0.01555188, -0.04084422, -0.01573331, -0.04265549, -0.01000746, -0.02740575, + -0.04412147, -0.02341569, -0.0431026, -0.02502293 + ] + db_grad[6] = [ + 0.27233034, 0.056316052, 0.5039115, 0.24105175, 0.35697976, 0.75913221, + 0.73577434, 0.16014607, 0.57500273, 0.071136251 + ] + db_out[6] = [ + -0.01890448, -0.00767214, -0.03367592, -0.01962219, -0.02374279, -0.05110247, + -0.05128598, -0.01254396, -0.04094185, -0.00703416 + ] + db_grad[7] = [ + 0.58697265, 0.2494842, 0.08106143, 0.39954534, 0.15892942, 0.12683646, + 0.74053431, 0.16033, 0.66625422, 0.73515922 + ] + db_out[7] = [ + -0.03772914, -0.01599993, -0.00831695, -0.02635719, -0.01207801, -0.01285448, + -0.05034328, -0.01104364, -0.04477356, -0.04558991 + ] + db_grad[8] = [ + 0.8215279, 0.41994119, 0.95172721, 0.68000203, 0.79439718, 0.43384039, + 0.55561525, 0.22567581, 0.93331909, 0.29438227 + ] + db_out[8] = [ + -0.03919835, -0.01970845, -0.04187151, -0.03195836, -0.03546333, -0.01999326, + -0.02899324, -0.01083582, -0.04472339, -0.01725317 + ] + db_grad[9] = [ + 0.68297005, 0.67758518, 0.1748755, 0.13266537, 0.70697063, 0.055731893, + 0.68593478, 0.50580865, 0.12602448, 0.093537711 + ] + db_out[9] = [ + -0.04510314, -0.04282944, -0.0147322, -0.0111956, -0.04617687, -0.00535998, + -0.0442614, -0.03158399, -0.01207165, -0.00736567 + ] + # pylint: enable=line-too-long + return db_grad, db_out + + @test_util.run_deprecated_v1 + def testLikeDistBeliefCG01(self): + with self.cached_session(): + db_grad, db_out = self._dbParamsCG01() + num_samples = len(db_grad) + var0 = variables.Variable([0.0] * num_samples) + grads0 = constant_op.constant([0.0] * num_samples) + cg_opt = cg_lib.ConditionalGradientOptimizer(learning_rate=0.1, lamda=0.1) + cg_update = cg_opt.apply_gradients(zip([grads0], [var0])) + variables.global_variables_initializer().run() + for i in xrange(num_samples): + cg_update.run(feed_dict={grads0: db_grad[i]}) + self.assertAllClose(np.array(db_out[i]), self.evaluate(var0)) + + + @test_util.run_deprecated_v1 + def testSparse(self): + for dtype in [dtypes.half, dtypes.float32, dtypes.float64]: + with self.cached_session(): + var0 = variables.Variable(array_ops.zeros([4, 2], dtype=dtype)) + var1 = variables.Variable(constant_op.constant(1.0, dtype, [4, 2])) + grads0 = ops.IndexedSlices( + constant_op.constant( + [[.1, .1]], dtype=dtype), + constant_op.constant([1]), + constant_op.constant([4, 2])) + grads1 = ops.IndexedSlices( + constant_op.constant( + [[.01, .01], [.01, .01]], dtype=dtype), + constant_op.constant([2, 3]), + constant_op.constant([4, 2])) + norm0 = math_ops.reduce_sum(math_ops.multiply(grads0, grads0)) ** 0.5 + norm1 = math_ops.reduce_sum(math_ops.multiply(grads1, grads1)) ** 0.5 + learning_rate = 0.1 + lamda = 0.1 + cg_opt = cg_lib.ConditionalGradientOptimizer( + learning_rate=learning_rate, lamda=lamda) + cg_update = cg_opt.apply_gradients( + zip([grads0, grads1], [var0, var1])) + variables.global_variables_initializer().run() + # Check we have slots + self.assertEqual(["conditional_gradient"], cg_opt.get_slot_names()) + slot0 = cg_opt.get_slot(var0, "conditional_gradient") + self.assertEquals(slot0.get_shape(), var0.get_shape()) + slot1 = cg_opt.get_slot(var1, "conditional_gradient") + self.assertEquals(slot1.get_shape(), var1.get_shape()) + if not context.executing_eagerly(): + self.assertFalse(slot0 in variables.trainable_variables()) + self.assertFalse(slot1 in variables.trainable_variables()) + + # Fetch params to validate initial values + self.assertAllClose([0, 0], self.evaluate(var0)[0]) + self.assertAllClose([0, 0], self.evaluate(var0)[1]) + self.assertAllClose([1, 1], self.evaluate(var1)[2]) + + # Step 1: + cg_update.run() + # Check that the parameters have been updated. + norm0 = self.evaluate(norm0) + norm1 = self.evaluate(norm1) + self.assertAllCloseAccordingToType( + np.array([0 - (1 - learning_rate) * lamda * 0 / norm0, + 0 - (1 - learning_rate) * lamda * 0 / norm0]), + self.evaluate(var0)[0]) + self.assertAllCloseAccordingToType( + np.array([0 - (1 - learning_rate) * lamda * 0.1 / norm0, + 0 - (1 - learning_rate) * lamda * 0.1 / norm0]), + self.evaluate(var0)[1]) + self.assertAllCloseAccordingToType( + np.array([1.0 * learning_rate- (1 - learning_rate) \ + * lamda * 0.01 / norm1, + 1.0 * learning_rate- (1 - learning_rate) \ + * lamda * 0.01 / norm1]), + self.evaluate(var1)[2]) + # Step 2: the conditional_gradient contain the previous update. + cg_update.run() + # Check that the parameters have been updated. + self.assertAllClose(np.array([0, 0]), self.evaluate(var0)[0]) + self.assertAllCloseAccordingToType( + np.array([ + (0 - (1 - learning_rate) * lamda * 0.1 / norm0) \ + * learning_rate \ + - (1 - learning_rate) * lamda * 0.1 / norm0, + (0 - (1 - learning_rate) * lamda * 0.1 / norm0) \ + * learning_rate \ + - (1 - learning_rate) * lamda * 0.1 / norm0]), + self.evaluate(var0)[1]) + self.assertAllCloseAccordingToType( + np.array([ + (1.0 * learning_rate - \ + (1 - learning_rate) * lamda * 0.01 / norm1) \ + * learning_rate - (1 - learning_rate) \ + * lamda * 0.01 / norm1, + (1.0 * learning_rate- \ + (1 - learning_rate) * lamda * 0.01 / norm1) \ + * learning_rate - (1 - learning_rate) \ + * lamda * 0.01 / norm1]), + self.evaluate(var1)[2]) + + + @test_util.run_deprecated_v1 + def testSharing(self): + for dtype in [dtypes.half, dtypes.float32, dtypes.float64]: + with self.cached_session(): + var0 = variables.Variable([1.0, 2.0], dtype=dtype) + var1 = variables.Variable([3.0, 4.0], dtype=dtype) + grads0 = constant_op.constant([0.1, 0.1], dtype=dtype) + grads1 = constant_op.constant([0.01, 0.01], dtype=dtype) + norm0 = math_ops.reduce_sum(grads0 ** 2) ** 0.5 + norm1 = math_ops.reduce_sum(grads1 ** 2) ** 0.5 + learning_rate = 0.1 + lamda = 0.1 + cg_opt = cg_lib.ConditionalGradientOptimizer( + learning_rate=learning_rate, lamda=lamda) + cg_update1 = cg_opt.apply_gradients( + zip([grads0, grads1], [var0, var1])) + cg_update2 = cg_opt.apply_gradients( + zip([grads0, grads1], [var0, var1])) + variables.global_variables_initializer().run() + # Check we have slots + self.assertEqual(["conditional_gradient"], cg_opt.get_slot_names()) + slot0 = cg_opt.get_slot(var0, "conditional_gradient") + self.assertEquals(slot0.get_shape(), var0.get_shape()) + slot1 = cg_opt.get_slot(var1, "conditional_gradient") + self.assertEquals(slot1.get_shape(), var1.get_shape()) + if not context.executing_eagerly(): + self.assertFalse(slot0 in variables.trainable_variables()) + self.assertFalse(slot1 in variables.trainable_variables()) + # Fetch params to validate initial values + self.assertAllClose([1.0, 2.0], self.evaluate(var0)) + self.assertAllClose([3.0, 4.0], self.evaluate(var1)) + + cg_update1.run() + # Check that the parameters have been updated. + norm0 = self.evaluate(norm0) + norm1 = self.evaluate(norm1) + self.assertAllCloseAccordingToType( + np.array( + [1.0 * learning_rate - (1-learning_rate) \ + * lamda * 0.1 / norm0, + 2.0 * learning_rate - (1-learning_rate) \ + * lamda * 0.1 / norm0]), + self.evaluate(var0)) + self.assertAllCloseAccordingToType( + np.array( + [3.0 * learning_rate - (1-learning_rate) \ + * lamda * 0.01 / norm1, + 4.0 * learning_rate - (1-learning_rate) \ + * lamda * 0.01 / norm1]), + self.evaluate(var1)) + + # Step 2: the second conditional_gradient contain the previous update. + cg_update2.run() + + # Check that the parameters have been updated. + self.assertAllCloseAccordingToType( + np.array([ + (1.0 * learning_rate - (1-learning_rate) \ + * lamda * 0.1 / norm0) * learning_rate \ + - (1 - learning_rate) * lamda * 0.1 / norm0, + (2.0 * learning_rate - (1-learning_rate) \ + * lamda * 0.1 / norm0) * learning_rate \ + - (1 - learning_rate) * lamda * 0.1 / norm0 + ]), self.evaluate(var0)) + self.assertAllCloseAccordingToType( + np.array([ + (3.0 * learning_rate - (1-learning_rate) \ + * lamda * 0.01 / norm1) * learning_rate \ + - (1 - learning_rate) * lamda * 0.01 / norm1, + (4.0 * learning_rate - (1-learning_rate) \ + * lamda * 0.01 / norm1) * learning_rate \ + - (1 - learning_rate) * lamda * 0.01 / norm1 + ]), self.evaluate(var1)) + +if __name__ == "__main__": + test.main() From eb64aba7c21f9ea0d7e08230aae266c336201bfc Mon Sep 17 00:00:00 2001 From: pkan2 <34614124+pkan2@users.noreply.github.com> Date: Sat, 31 Aug 2019 15:34:17 -0500 Subject: [PATCH 02/20] Add files via upload --- tensorflow_addons/optimizers/BUILD | 14 + tensorflow_addons/optimizers/__init__.py | 2 + .../optimizers/conditional_gradient.py | 179 ++- .../optimizers/conditional_gradient_test.py | 1195 +++++++++-------- 4 files changed, 734 insertions(+), 656 deletions(-) diff --git a/tensorflow_addons/optimizers/BUILD b/tensorflow_addons/optimizers/BUILD index 1e49a0f1bf..bb6a0d0641 100644 --- a/tensorflow_addons/optimizers/BUILD +++ b/tensorflow_addons/optimizers/BUILD @@ -9,6 +9,7 @@ py_library( "lazy_adam.py", "moving_average.py", "weight_decay_optimizers.py", + "conditional_gradient.py", ], srcs_version = "PY2AND3", deps = [ @@ -54,3 +55,16 @@ py_test( ":optimizers", ], ) + +py_test( + name = "conditional_gradient_test", + size = "small", + srcs = [ + "conditional_gradient_test.py", + ], + main = "conditional_gradient_test.py", + srcs_version = "PY2AND3", + deps = [ + ":optimizers", + ], +) diff --git a/tensorflow_addons/optimizers/__init__.py b/tensorflow_addons/optimizers/__init__.py index ccb5eda3cc..ec1341f212 100644 --- a/tensorflow_addons/optimizers/__init__.py +++ b/tensorflow_addons/optimizers/__init__.py @@ -24,3 +24,5 @@ from tensorflow_addons.optimizers.weight_decay_optimizers import SGDW from tensorflow_addons.optimizers.weight_decay_optimizers import ( extend_with_decoupled_weight_decay) +from conditional_gradient import ConditionalGradientOptimizer +#from tensorflow_addons.optimizers.conditional_gradient import ConditionalGradientOptimizer diff --git a/tensorflow_addons/optimizers/conditional_gradient.py b/tensorflow_addons/optimizers/conditional_gradient.py index 8f8721ff7c..7808e37f64 100644 --- a/tensorflow_addons/optimizers/conditional_gradient.py +++ b/tensorflow_addons/optimizers/conditional_gradient.py @@ -18,108 +18,95 @@ from __future__ import division from __future__ import print_function -from tensorflow.python.framework import ops -from tensorflow.python.ops import math_ops -from tensorflow.python.training import optimizer -#from tensorflow.python.training import training_ops -from tensorflow.python.util.tf_export import tf_export -from tensorflow.python.ops import control_flow_ops -from tensorflow.python.ops import state_ops +import tensorflow as tf +from tensorflow_addons.utils import keras_utils -from tensorflow.python.ops import resource_variable_ops -#from tensorflow.python.ops import variable_scope -from tensorflow.python.ops import array_ops - - - -@tf_export(v1=["train.ConditionalGradientOptimizer"]) -class ConditionalGradientOptimizer(optimizer.Optimizer): - """Optimizer that implements the Conditional Gradient optimization. - Helps handle constraints well. - Currently only supports frobenius norm constraint. - See https://arxiv.org/pdf/1803.06453.pdf - ``` - variable -= (1-learning_rate) - * (variable + lamda * gradient / frobenius_norm(gradient)) - ``` - """ - - def __init__(self, learning_rate, lamda, - use_locking=False, name="ConditionalGradient"): - """Construct a conditional gradient optimizer. - Args: - learning_rate: A `Tensor` or a floating point value. The learning rate. - lamda: A `Tensor` or a floating point value. The constraint. - use_locking: If `True` use locks for update operations. - name: Optional name prefix for the operations created when applying - gradients. Defaults to "ConditionalGradient" +@keras_utils.register_keras_custom_object + #class ConditionalGradientOptimizer(tf.keras.optimizer_v2.OptimizerV2): +class ConditionalGradientOptimizer(tf.keras.optimizers.Optimizer): + """Optimizer that implements the Conditional Gradient optimization. + Helps handle constraints well. + Currently only supports frobenius norm constraint. + See https://arxiv.org/pdf/1803.06453.pdf + ``` + variable -= (1-learning_rate) + * (variable + lamda * gradient / frobenius_norm(gradient)) + ``` """ - super(ConditionalGradientOptimizer, self).__init__(use_locking, name) - self._learning_rate = learning_rate - self._lamda = lamda - def _create_slots(self, var_list): - for v in var_list: - self._zeros_slot(v, "conditional_gradient", self._name) + def __init__(self, learning_rate, lamda, + use_locking=False, name="ConditionalGradient"): + """Construct a conditional gradient optimizer. + Args: + learning_rate: A `Tensor` or a floating point value. + The learning rate. + lamda: A `Tensor` or a floating point value. The constraint. + use_locking: If `True` use locks for update operations. + name: Optional name prefix for the operations created when + applying gradients. Defaults to "ConditionalGradient" + """ + super(ConditionalGradientOptimizer, self).__init__(name=name) + self._set_hyper("learning_rate", learning_rate) + self._set_hyper("lamda", lamda) + + def get_config(self): + config = { + 'learning_rate': self._learning_rate, + 'lamda': self._lamda, + 'use_locking': self._use_locking + } + base_config = super(ConditionalGradient, self).get_config() + return dict(list(base_config.items()) + list(config.items())) - def _prepare(self): - learning_rate = self._learning_rate - if callable(learning_rate): - learning_rate = learning_rate() - self._learning_rate_tensor = ops.convert_to_tensor(learning_rate, - name="learning_rate") - lamda = self._lamda - if callable(lamda): - lamda = lamda() - self._lamda_tensor = ops.convert_to_tensor(lamda, name="lamda") + def _create_slots(self, var_list): + for v in var_list: + self.add_slot(v, "conditional_gradient") - def _apply_dense(self, grad, var): - def frobenius_norm(m): - return math_ops.reduce_sum(m ** 2) ** 0.5 - norm = ops.convert_to_tensor(frobenius_norm(grad), name="norm") - norm = math_ops.cast(norm, var.dtype.base_dtype) - lr = math_ops.cast(self._learning_rate_tensor, var.dtype.base_dtype) - lamda = math_ops.cast(self._lamda_tensor, var.dtype.base_dtype) - var_update = state_ops.assign(var, math_ops.multiply(var, lr) \ - - (1-lr)* lamda * grad / norm, use_locking=self._use_locking) - return control_flow_ops.group(var_update) + def _prepare(self, var_list): + learning_rate = self.learning_rate + if callable(learning_rate): + learning_rate = learning_rate() + self._learning_rate_tensor = tf.convert_to_tensor( + learning_rate, name="learning_rate") + lamda = self.lamda + if callable(lamda): + lamda = lamda() + self._lamda_tensor = tf.convert_to_tensor(lamda, name="lamda") - def _resource_apply_dense(self, grad, var): - def frobenius_norm(m): - return math_ops.reduce_sum(m ** 2) ** 0.5 - norm = ops.convert_to_tensor(frobenius_norm(grad), name="norm") - norm = math_ops.cast(norm, var.dtype.base_dtype) - lr = math_ops.cast(self._learning_rate_tensor, var.dtype.base_dtype) - lamda = math_ops.cast(self._lamda_tensor, var.dtype.base_dtype) - var_update_tensor = math_ops.multiply(var, lr) - (1-lr)* lamda * grad / norm - var_update_op = resource_variable_ops.assign_variable_op(var.handle, - var_update_tensor) - return control_flow_ops.group(var_update_op) + def _resource_apply_dense(self, grad, var): + def frobenius_norm(m): + return tf.math.reduce_sum(m ** 2) ** 0.5 + norm = tf.convert_to_tensor(frobenius_norm(grad), name="norm") + norm = tf.dtypes.cast(norm, var.dtype.base_dtype) + lr = tf.dtypes.cast(self._learning_rate_tensor, + var.dtype.base_dtype) + lamda = tf.dtypes.cast(self._lamda_tensor, var.dtype.base_dtype) + var_update_tensor = tf.math.multiply(var, lr) \ + - (1-lr)* lamda * grad / norm + var_update_kwargs = { + 'resource': var.handle, + 'value': var_update_tensor, + } - def _apply_sparse(self, grad, var): - def frobenius_norm(m): - return math_ops.reduce_sum(m ** 2) ** 0.5 - norm = ops.convert_to_tensor(frobenius_norm(grad.value), name="norm") - norm = math_ops.cast(norm, var.dtype.base_dtype) - lr = math_ops.cast(self._learning_rate_tensor, var.dtype.base_dtype) - lamda = math_ops.cast(self._lamda_tensor, var.dtype.base_dtype) - var_slice = array_ops.gather(var, grad.indices) - var_update_value = math_ops.multiply(var_slice, lr) \ - - (1-lr)* lamda * grad.value / norm - var_update = state_ops.scatter_update(var, grad.indices, \ - var_update_value, use_locking=self._use_locking) - return control_flow_ops.group(var_update) + var_update_op = tf.raw_ops.AssignVariableOp(**var_update_kwargs) + return tf.group(var_update_op) - def _resource_apply_sparse(self, grad, var, indices): - def frobenius_norm(m): - return math_ops.reduce_sum(m ** 2) ** 0.5 - norm = ops.convert_to_tensor(frobenius_norm(grad), name="norm") - norm = math_ops.cast(norm, var.dtype.base_dtype) - lr = math_ops.cast(self._learning_rate_tensor, var.dtype.base_dtype) - lamda = math_ops.cast(self._lamda_tensor, var.dtype.base_dtype) - var_slice = array_ops.gather(var, indices) - var_update_value = math_ops.multiply(var_slice, lr) \ + def _resource_apply_sparse(self, grad, var, indices): + def frobenius_norm(m): + return tf.reduce_sum(m ** 2) ** 0.5 + norm = tf.convert_to_tensor(frobenius_norm(grad), name="norm") + norm = tf.dtypes.cast(norm, var.dtype.base_dtype) + lr = tf.dtypes.cast(self._learning_rate_tensor, + var.dtype.base_dtype) + lamda = tf.dtypes.cast(self._lamda_tensor, var.dtype.base_dtype) + var_slice = tf.gather(var, indices) + var_update_value = tf.math.multiply(var_slice, lr) \ - (1-lr) * lamda * grad / norm - var_update_op = resource_variable_ops.resource_scatter_update \ - (var.handle, indices, var_update_value) - return control_flow_ops.group(var_update_op) + var_update_kwargs = { + 'resource': var.handle, + 'indices': indices, + 'updates': var_update_value + } + var_update_op = tf.raw_ops.ResourceScatterUpdate \ + (**var_update_kwargs) + return tf.group(var_update_op) diff --git a/tensorflow_addons/optimizers/conditional_gradient_test.py b/tensorflow_addons/optimizers/conditional_gradient_test.py index 04cda21c75..5cfc119964 100644 --- a/tensorflow_addons/optimizers/conditional_gradient_test.py +++ b/tensorflow_addons/optimizers/conditional_gradient_test.py @@ -18,571 +18,646 @@ from __future__ import division from __future__ import print_function +import tensorflow as tf +from tensorflow_addons.utils import test_utils import numpy as np from six.moves import xrange # pylint: disable=redefined-builtin +import conditional_gradient as cg_lib + +@test_utils.run_all_in_graph_and_eager_modes +class ConditionalGradientTest(tf.test.TestCase): + + + def _update_conditional_gradient_numpy(self, var, norm, g, lr, lamda): + var = var * lr - (1 - lr) * lamda * g /norm + return var + + def doTestBasic(self, use_resource=False, use_callable_params=False): + for i, dtype in enumerate([tf.half, tf.float32, tf.float64]): + if use_resource: + var0 = tf.Variable( + [1.0, 2.0], dtype=dtype, name="var0_%d" % i) + var1 = tf.Variable( + [3.0, 4.0], dtype=dtype, name="var1_%d" % i) + else: + var0 = tf.Variable([1.0, 2.0], dtype=dtype) + var1 = tf.Variable([3.0, 4.0], dtype=dtype) + grads0 = tf.constant([0.1, 0.1], dtype=dtype) + grads1 = tf.constant([0.01, 0.01], dtype=dtype) + norm0 = tf.math.reduce_sum(grads0 ** 2) ** 0.5 + norm1 = tf.math.reduce_sum(grads1 ** 2) ** 0.5 + learning_rate = lambda: 0.5 + lamda = lambda: 0.01 + if not use_callable_params: + learning_rate = learning_rate() + lamda = lamda() + cg_opt = cg_lib.ConditionalGradientOptimizer( + learning_rate=learning_rate, lamda=lamda) + cg_update = cg_opt.apply_gradients( + zip([grads0, grads1], [var0, var1])) + + if not tf.executing_eagerly(): + self.evaluate(tf.compat.v1.global_variables_initializer()) + # Fetch params to validate initial values + self.assertAllClose([1.0, 2.0], self.evaluate(var0)) + self.assertAllClose([3.0, 4.0], self.evaluate(var1)) + + # Check we have slots + self.assertEqual(["conditional_gradient"], + cg_opt.get_slot_names()) + slot0 = cg_opt.get_slot(var0, "conditional_gradient") + self.assertEquals(slot0.get_shape(), var0.get_shape()) + slot1 = cg_opt.get_slot(var1, "conditional_gradient") + self.assertEquals(slot1.get_shape(), var1.get_shape()) + ''' + if not tf.executing_eagerly(): + self.assertFalse(slot0 in tf.trainable_variables()) + self.assertFalse(slot1 in tf.trainable_variables()) + ''' + if not tf.executing_eagerly(): + self.evaluate(cg_update) + + # Check that the parameters have been updated. + norm0 = self.evaluate(norm0) + norm1 = self.evaluate(norm1) + self.assertAllCloseAccordingToType( + np.array([ + 1.0 * 0.5 - (1-0.5) * 0.01 * 0.1 / norm0, + 2.0 * 0.5 - (1-0.5) * 0.01 * 0.1 / norm0 + ]), self.evaluate(var0)) + self.assertAllCloseAccordingToType( + np.array([ + 3.0 * 0.5 - (1-0.5) * 0.01 * 0.01 / norm1, + 4.0 * 0.5 - (1-0.5) * 0.01 * 0.01 / norm1 + ]), self.evaluate(var1)) + + # Step 2: the conditional_gradient contain the previous update. + if tf.executing_eagerly(): + cg_opt.apply_gradients(zip([grads0, grads1], [var0, var1])) + else: + self.evaluate(cg_update) + self.assertAllCloseAccordingToType( + np.array([ + (1.0 * 0.5 - \ + (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 \ + - (1 - 0.5) * 0.01 * 0.1 / norm0, + (2.0 * 0.5 - \ + (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 \ + - (1 - 0.5) * 0.01 * 0.1 / norm0 + ]), self.evaluate(var0)) + self.assertAllCloseAccordingToType( + np.array([ + (3.0 * 0.5 - \ + (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 \ + - (1 - 0.5) * 0.01 * 0.01 / norm1, + (4.0 * 0.5 - \ + (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 \ + - (1 - 0.5) * 0.01 * 0.01 / norm1 + ]), self.evaluate(var1)) + + + def testBasic(self): + with self.cached_session(): + self.doTestBasic(use_resource=False) + + + @test_utils.run_in_graph_and_eager_modes(reset_test=True) + def testResourceBasic(self): + self.doTestBasic(use_resource=True) + + def testBasicCallableParams(self): + #with tf.enable_eager_execution(): + self.doTestBasic(use_resource=True, use_callable_params=True) + + def testVariablesAcrossGraphs(self): + optimizer = cg_lib.ConditionalGradientOptimizer(0.01, 0.5) + with tf.Graph().as_default(): + var0 = tf.Variable( + [1.0, 2.0], dtype=tf.float32, name="var0") + var1 = tf.Variable( + [3.0, 4.0], dtype=tf.float32, name="var1") + loss = lambda: tf.math.reduce_sum(var0 + var1) + optimizer.minimize(loss, var_list=[var0, var1]) + optimizer_variables = optimizer.variables() + # There should be three items. The first item is iteration, + #and one item for each variable. + self.assertStartsWith(optimizer_variables[1].name, + "ConditionalGradient/var0") + self.assertStartsWith(optimizer_variables[2].name, + "ConditionalGradient/var1") + self.assertEqual(3, len(optimizer_variables)) + ''' + with tf.Graph().as_default(): + var2 = tf.Variable( + [1.0, 2.0], dtype=tf.float32, name="var2") + var3 = tf.Variable( + [3.0, 4.0], dtype=tf.float32, name="var3") + loss = lambda: tf.math.reduce_sum(var2 + var3) + optimizer.minimize(loss, var_list=[var2, var3]) + optimizer_variables = optimizer.variables() + self.assertStartsWith(optimizer_variables[1].name, + "ConditionalGraident/var2") + self.assertStartsWith(optimizer_variables[2].name, + "ConditionalGraident/var3") + self.assertEqual(3, len(optimizer_variables)) + ''' + + @test_utils.run_in_graph_and_eager_modes(reset_test=True) + def testMinimizeSparseResourceVariable(self): + for dtype in [tf.half, tf.float32, tf.float64]: + # This test invokes the ResourceSparseApplyConditionalGradient + # operation, which did not have a registered GPU kernel as of + # April 2018. + # With graph execution, the placement algorithm notices this + # and automatically places the variable in CPU (host) memory. + # With eager execution, the variable would be placed in GPU + # memory if available, which would then conflict with the + # future invocation of the + # ResourceSparseApplyConditionalGradient operation. + # To work around this discrepancy, for now we force the variable + # to be placed on CPU. + with tf.device("/cpu:0"): + var0 = tf.Variable([[1.0, 2.0]], dtype=dtype) + + # pylint: disable=cell-var-from-loop + def loss(): + x = tf.constant([[4.0], [5.0]], dtype=dtype) + pred = tf.matmul(tf.nn.embedding_lookup([var0], [0]), x) + return pred * pred + + #the gradient based on the current loss function + grads0_0 = 32 * 1.0 + 40 * 2.0 + grads0_1 = 40 * 1.0 + 50 * 2.0 + grads0 = tf.constant([[grads0_0, grads0_1]], dtype=dtype) + norm0 = tf.math.reduce_sum(grads0 ** 2) ** 0.5 + + # pylint: enable=cell-var-from-loop + learning_rate = 0.1 + lamda = 0.1 + opt = cg_lib.ConditionalGradientOptimizer( + learning_rate=learning_rate, + lamda=lamda) + cg_op = opt.minimize(loss, var_list=[var0]) + self.evaluate(tf.compat.v1.global_variables_initializer()) + + # Run 1 step of cg_op + self.evaluate(cg_op) + + # Validate updated params + norm0 = self.evaluate(norm0) + self.assertAllCloseAccordingToType([ + [1.0 * learning_rate - \ + (1-learning_rate)*lamda*grads0_0/norm0, + 2.0 * learning_rate - \ + (1-learning_rate)*lamda*grads0_1/norm0] + ], self.evaluate(var0)) + + + @test_utils.run_in_graph_and_eager_modes(reset_test=True) + def testMinimizeWith2DIndiciesForEmbeddingLookup(self): + # This test invokes the ResourceSparseApplyConditionalGradient + # operation, which + # did not have a registered GPU kernel as of April 2018. With graph + # execution, the placement algorithm notices this and automatically + # places the variable in CPU (host) memory. With eager execution, + # the variable would be placed in GPU memory if available, which + # would then conflict with the future invocation of the + # ResourceSparseApplyConditionalGradient operation. + # To work around this discrepancy, for now we force the variable + # to be placed on CPU. + with tf.device("/cpu:0"): + var0 = tf.Variable(tf.ones([2, 2])) + + def loss(): + return tf.math.reduce_sum(tf.nn.embedding_lookup(var0, [[1]])) + + # the gradient for this loss function: + grads0 = tf.constant([[0, 0], [1, 1]], dtype=tf.float32) + norm0 = tf.math.reduce_sum(grads0 ** 2) ** 0.5 -from tensorflow.python.eager import context -from tensorflow.python.framework import constant_op -from tensorflow.python.framework import dtypes -from tensorflow.python.framework import ops -from tensorflow.python.framework import test_util -from tensorflow.python.ops import array_ops -from tensorflow.python.ops import embedding_ops -from tensorflow.python.ops import math_ops -from tensorflow.python.ops import resource_variable_ops -from tensorflow.python.ops import variables -from tensorflow.python.platform import test -from tensorflow_addons.optimizers import conditional_gradient as cg_lib - -class ConditionalGradientTest(test.TestCase): - - - def _update_conditional_gradient_numpy(self, var, norm, g, lr, lamda): - var = var * lr - (1 - lr) * lamda * g /norm - return var - - def doTestBasic(self, use_resource=False, use_callable_params=False): - for i, dtype in enumerate([dtypes.half, dtypes.float32, dtypes.float64]): - if use_resource: - var0 = resource_variable_ops.ResourceVariable( - [1.0, 2.0], dtype=dtype, name="var0_%d" % i) - var1 = resource_variable_ops.ResourceVariable( - [3.0, 4.0], dtype=dtype, name="var1_%d" % i) - else: - var0 = variables.Variable([1.0, 2.0], dtype=dtype) - var1 = variables.Variable([3.0, 4.0], dtype=dtype) - grads0 = constant_op.constant([0.1, 0.1], dtype=dtype) - grads1 = constant_op.constant([0.01, 0.01], dtype=dtype) - norm0 = math_ops.reduce_sum(grads0 ** 2) ** 0.5 - norm1 = math_ops.reduce_sum(grads1 ** 2) ** 0.5 - learning_rate = lambda: 0.5 - lamda = lambda: 0.01 - if not use_callable_params: - learning_rate = learning_rate() - lamda = lamda() - cg_opt = cg_lib.ConditionalGradientOptimizer( - learning_rate=learning_rate, lamda=lamda) - cg_update = cg_opt.apply_gradients( - zip([grads0, grads1], [var0, var1])) - - if not context.executing_eagerly(): - self.evaluate(variables.global_variables_initializer()) - # Fetch params to validate initial values - self.assertAllClose([1.0, 2.0], self.evaluate(var0)) - self.assertAllClose([3.0, 4.0], self.evaluate(var1)) - - # Check we have slots - self.assertEqual(["conditional_gradient"], cg_opt.get_slot_names()) - slot0 = cg_opt.get_slot(var0, "conditional_gradient") - self.assertEquals(slot0.get_shape(), var0.get_shape()) - slot1 = cg_opt.get_slot(var1, "conditional_gradient") - self.assertEquals(slot1.get_shape(), var1.get_shape()) - if not context.executing_eagerly(): - self.assertFalse(slot0 in variables.trainable_variables()) - self.assertFalse(slot1 in variables.trainable_variables()) - - if not context.executing_eagerly(): - self.evaluate(cg_update) - - # Check that the parameters have been updated. - norm0 = self.evaluate(norm0) - norm1 = self.evaluate(norm1) - self.assertAllCloseAccordingToType( - np.array([ - 1.0 * 0.5 - (1-0.5) * 0.01 * 0.1 / norm0, - 2.0 * 0.5 - (1-0.5) * 0.01 * 0.1 / norm0 - ]), self.evaluate(var0)) - self.assertAllCloseAccordingToType( - np.array([ - 3.0 * 0.5 - (1-0.5) * 0.01 * 0.01 / norm1, - 4.0 * 0.5 - (1-0.5) * 0.01 * 0.01 / norm1 - ]), self.evaluate(var1)) - - # Step 2: the conditional_gradient contain the previous update. - if context.executing_eagerly(): - cg_opt.apply_gradients(zip([grads0, grads1], [var0, var1])) - else: - self.evaluate(cg_update) - self.assertAllCloseAccordingToType( - np.array([ - (1.0 * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 \ - - (1 - 0.5) * 0.01 * 0.1 / norm0, - (2.0 * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 \ - - (1 - 0.5) * 0.01 * 0.1 / norm0 - ]), self.evaluate(var0)) - self.assertAllCloseAccordingToType( - np.array([ - (3.0 * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 \ - - (1 - 0.5) * 0.01 * 0.01 / norm1, - (4.0 * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 \ - - (1 - 0.5) * 0.01 * 0.01 / norm1 - ]), self.evaluate(var1)) - - - def testBasic(self): - with self.cached_session(): - self.doTestBasic(use_resource=False) - - - @test_util.run_in_graph_and_eager_modes(reset_test=True) - def testResourceBasic(self): - self.doTestBasic(use_resource=True) - - def testBasicCallableParams(self): - with context.eager_mode(): - self.doTestBasic(use_resource=True, use_callable_params=True) - - def testVariablesAcrossGraphs(self): - optimizer = cg_lib.ConditionalGradientOptimizer(0.01, 0.5) - with ops.Graph().as_default(): - var0 = resource_variable_ops.ResourceVariable( - [1.0, 2.0], dtype=dtypes.float32, name="var0") - var1 = resource_variable_ops.ResourceVariable( - [3.0, 4.0], dtype=dtypes.float32, name="var1") - loss = math_ops.reduce_sum(var0 + var1) - optimizer.minimize(loss) - optimizer_variables = optimizer.variables() - self.assertStartsWith(optimizer_variables[0].name, "var0") - self.assertStartsWith(optimizer_variables[1].name, "var1") - self.assertEqual(2, len(optimizer_variables)) - - with ops.Graph().as_default(): - var2 = resource_variable_ops.ResourceVariable( - [1.0, 2.0], dtype=dtypes.float32, name="var2") - var3 = resource_variable_ops.ResourceVariable( - [3.0, 4.0], dtype=dtypes.float32, name="var3") - loss = math_ops.reduce_sum(var2 + var3) - optimizer.minimize(loss) - optimizer_variables = optimizer.variables() - self.assertStartsWith(optimizer_variables[0].name, "var2") - self.assertStartsWith(optimizer_variables[1].name, "var3") - self.assertEqual(2, len(optimizer_variables)) - - - @test_util.run_in_graph_and_eager_modes(reset_test=True) - def testMinimizeSparseResourceVariable(self): - for dtype in [dtypes.half, dtypes.float32, dtypes.float64]: - # This test invokes the ResourceSparseApplyConditionalGradient operation, which - # did not have a registered GPU kernel as of April 2018. With graph - # execution, the placement algorithm notices this and automatically - # places the variable in CPU (host) memory. With eager execution, - # the variable would be placed in GPU memory if available, which - # would then conflict with the future invocation of the - # ResourceSparseApplyConditionalGradient operation. - # To work around this discrepancy, for now we force the variable - # to be placed on CPU. - with ops.device("/cpu:0"): - var0 = resource_variable_ops.ResourceVariable([[1.0, 2.0]], dtype=dtype) - - # pylint: disable=cell-var-from-loop - def loss(): - x = constant_op.constant([[4.0], [5.0]], dtype=dtype) - pred = math_ops.matmul(embedding_ops.embedding_lookup([var0], [0]), x) - return pred * pred - - #the gradient based on the current loss function - grads0_0 = 32 * 1.0 + 40 * 2.0 - grads0_1 = 40 * 1.0 + 50 * 2.0 - grads0 = constant_op.constant([[grads0_0, grads0_1]], dtype=dtype) - norm0 = math_ops.reduce_sum(grads0 ** 2) ** 0.5 - - # pylint: enable=cell-var-from-loop - learning_rate = 0.1 - lamda = 0.1 - opt = cg_lib.ConditionalGradientOptimizer(learning_rate=learning_rate, \ - lamda=lamda) - cg_op = opt.minimize(loss) - self.evaluate(variables.global_variables_initializer()) - - # Run 1 step of cg_op - self.evaluate(cg_op) - - # Validate updated params - norm0 = self.evaluate(norm0) - self.assertAllCloseAccordingToType([ - [1.0 * learning_rate - (1-learning_rate)*lamda*grads0_0/norm0, - 2.0 * learning_rate - (1-learning_rate)*lamda*grads0_1/norm0] - ], self.evaluate(var0)) - - - @test_util.run_in_graph_and_eager_modes(reset_test=True) - def testMinimizeWith2DIndiciesForEmbeddingLookup(self): - # This test invokes the ResourceSparseApplyConditionalGradient operation, which - # did not have a registered GPU kernel as of April 2018. With graph - # execution, the placement algorithm notices this and automatically - # places the variable in CPU (host) memory. With eager execution, - # the variable would be placed in GPU memory if available, which - # would then conflict with the future invocation of the - # ResourceSparseApplyConditionalGradient operation. - # To work around this discrepancy, for now we force the variable - # to be placed on CPU. - with ops.device("/cpu:0"): - var0 = resource_variable_ops.ResourceVariable(array_ops.ones([2, 2])) - - def loss(): - return math_ops.reduce_sum(embedding_ops.embedding_lookup(var0, [[1]])) - - # the gradient for this loss function: - grads0 = constant_op.constant([[0, 0], [1, 1]], dtype=dtypes.float32) - norm0 = math_ops.reduce_sum(grads0 ** 2) ** 0.5 - - learning_rate = 0.1 - lamda = 0.1 - opt = cg_lib.ConditionalGradientOptimizer(learning_rate=learning_rate, \ - lamda=lamda) - cg_op = opt.minimize(loss) - self.evaluate(variables.global_variables_initializer()) - - # Run 1 step of cg_op - self.evaluate(cg_op) - norm0 = self.evaluate(norm0) - ''' - This is the test case we need to pass, if we only want to update the - sparse dimension's entry of the var. - ''' - self.assertAllCloseAccordingToType([ - [1, - 1], - [learning_rate * 1 - (1-learning_rate)*lamda*1/norm0, - learning_rate * 1 - (1-learning_rate)*lamda*1/norm0] - ], self.evaluate(var0)) - - @test_util.run_deprecated_v1 - def testTensorLearningRateAndConditionalGradient(self): - for dtype in [dtypes.half, dtypes.float32, dtypes.float64]: - with self.cached_session(): - var0 = variables.Variable([1.0, 2.0], dtype=dtype) - var1 = variables.Variable([3.0, 4.0], dtype=dtype) - grads0 = constant_op.constant([0.1, 0.1], dtype=dtype) - grads1 = constant_op.constant([0.01, 0.01], dtype=dtype) - norm0 = math_ops.reduce_sum(grads0 ** 2) ** 0.5 - norm1 = math_ops.reduce_sum(grads1 ** 2) ** 0.5 - cg_opt = cg_lib.ConditionalGradientOptimizer( - learning_rate=constant_op.constant(0.5), - lamda=constant_op.constant(0.01)) - cg_update = cg_opt.apply_gradients( - zip([grads0, grads1], [var0, var1])) - variables.global_variables_initializer().run() - # Check we have slots - self.assertEqual(["conditional_gradient"], cg_opt.get_slot_names()) - slot0 = cg_opt.get_slot(var0, "conditional_gradient") - self.assertEquals(slot0.get_shape(), var0.get_shape()) - slot1 = cg_opt.get_slot(var1, "conditional_gradient") - self.assertEquals(slot1.get_shape(), var1.get_shape()) - if not context.executing_eagerly(): - self.assertFalse(slot0 in variables.trainable_variables()) - self.assertFalse(slot1 in variables.trainable_variables()) - - # Fetch params to validate initial values - self.assertAllClose([1.0, 2.0], self.evaluate(var0)) - self.assertAllClose([3.0, 4.0], self.evaluate(var1)) - - cg_update.run() - # Check that the parameters have been updated. - norm0 = self.evaluate(norm0) - norm1 = self.evaluate(norm1) - self.assertAllCloseAccordingToType( - np.array([ - 1.0 * 0.5 - (1-0.5) * 0.01 * 0.1 / norm0, - 2.0 * 0.5 - (1-0.5) * 0.01 * 0.1 / norm0 - ]), self.evaluate(var0)) - self.assertAllCloseAccordingToType( - np.array([ - 3.0 * 0.5 - (1-0.5) * 0.01 * 0.01 / norm1, - 4.0 * 0.5 - (1-0.5) * 0.01 * 0.01 / norm1 - ]), self.evaluate(var1)) - # Step 2: the conditional_gradient contain the previous update. - cg_update.run() - # Check that the parameters have been updated. - self.assertAllCloseAccordingToType( - np.array([ - (1.0 * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 \ - - (1 - 0.5) * 0.01 * 0.1 / norm0, - (2.0 * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 \ - - (1 - 0.5) * 0.01 * 0.1 / norm0 - ]), self.evaluate(var0)) - self.assertAllCloseAccordingToType( - np.array([ - (3.0 * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 \ - - (1 - 0.5) * 0.01 * 0.01 / norm1, - (4.0 * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 \ - - (1 - 0.5) * 0.01 * 0.01 / norm1 - ]), self.evaluate(var1)) - - def _dbParamsCG01(self): - """Return dist-belief conditional_gradient values. - - Return values been generated from the dist-belief conditional_gradient - unittest, running with a learning rate of 0.1 and a lamda of 0.1. - - These values record how a parameter vector of size 10, initialized with 0.0, - gets updated with 10 consecutive conditional_gradient steps. - It uses random gradients. - - Returns: - db_grad: The gradients to apply - db_out: The parameters after the conditional_gradient update. - """ - db_grad = [[]] * 10 - db_out = [[]] * 10 - # pylint: disable=line-too-long - db_grad[0] = [ - 0.00096264342, 0.17914793, 0.93945462, 0.41396621, 0.53037018, - 0.93197989, 0.78648776, 0.50036013, 0.55345792, 0.96722615 - ] - db_out[0] = [ - -4.1555551e-05, -7.7334875e-03, -4.0554531e-02, -1.7870162e-02, - -2.2895107e-02, -4.0231861e-02, -3.3951234e-02, -2.1599628e-02, - -2.3891762e-02, -4.1753378e-02 - ] - db_grad[1] = [ - 0.17075552, 0.88821375, 0.20873757, 0.25236958, 0.57578111, 0.15312378, - 0.5513742, 0.94687688, 0.16012503, 0.22159521 - ] - db_out[1] = [ - -0.00961733, -0.0507779, -0.01580694, -0.01599489, -0.03470477, -0.01264373, - -0.03443632, -0.05546713, -0.01140388, -0.01665068 - ] - db_grad[2] = [ - 0.35077485, 0.47304362, 0.44412705, 0.44368884, 0.078527533, 0.81223965, - 0.31168157, 0.43203235, 0.16792089, 0.24644311 - ] - db_out[2] = [ - -0.02462724, -0.03699233, -0.03154434, -0.03153357, -0.00876844, -0.05606323, - -0.02447166, -0.03469437, -0.0124694, -0.01829169 - ] - db_grad[3] = [ - 0.9694621, 0.75035888, 0.28171822, 0.83813518, 0.53807181, 0.3728098, - 0.81454384, 0.03848977, 0.89759839, 0.93665648 - ] - db_out[3] = [ - -0.04124615, -0.03371741, -0.0144246, -0.03668303, -0.02240246, -0.02052062, - -0.03503307, -0.00500922, -0.03715545, -0.0393002 - ] - db_grad[4] = [ - 0.38578293, 0.8536852, 0.88722926, 0.66276771, 0.13678469, 0.94036359, - 0.69107032, 0.81897682, 0.5433259, 0.67860287 - ] - db_out[4] = [ - -0.01979208, -0.0380417, -0.03747472, -0.0305847, -0.00779536, -0.04024222, - -0.03156913, -0.0337613, -0.02578116, -0.03148952 - ] - db_grad[5] = [ - 0.27885768, 0.76100707, 0.24625534, 0.81354135, 0.18959245, 0.48038563, - 0.84163809, 0.41172323, 0.83259648, 0.44941229 - ] - db_out[5] = [ - -0.01555188, -0.04084422, -0.01573331, -0.04265549, -0.01000746, -0.02740575, - -0.04412147, -0.02341569, -0.0431026, -0.02502293 - ] - db_grad[6] = [ - 0.27233034, 0.056316052, 0.5039115, 0.24105175, 0.35697976, 0.75913221, - 0.73577434, 0.16014607, 0.57500273, 0.071136251 - ] - db_out[6] = [ - -0.01890448, -0.00767214, -0.03367592, -0.01962219, -0.02374279, -0.05110247, - -0.05128598, -0.01254396, -0.04094185, -0.00703416 - ] - db_grad[7] = [ - 0.58697265, 0.2494842, 0.08106143, 0.39954534, 0.15892942, 0.12683646, - 0.74053431, 0.16033, 0.66625422, 0.73515922 - ] - db_out[7] = [ - -0.03772914, -0.01599993, -0.00831695, -0.02635719, -0.01207801, -0.01285448, - -0.05034328, -0.01104364, -0.04477356, -0.04558991 - ] - db_grad[8] = [ - 0.8215279, 0.41994119, 0.95172721, 0.68000203, 0.79439718, 0.43384039, - 0.55561525, 0.22567581, 0.93331909, 0.29438227 - ] - db_out[8] = [ - -0.03919835, -0.01970845, -0.04187151, -0.03195836, -0.03546333, -0.01999326, - -0.02899324, -0.01083582, -0.04472339, -0.01725317 - ] - db_grad[9] = [ - 0.68297005, 0.67758518, 0.1748755, 0.13266537, 0.70697063, 0.055731893, - 0.68593478, 0.50580865, 0.12602448, 0.093537711 - ] - db_out[9] = [ - -0.04510314, -0.04282944, -0.0147322, -0.0111956, -0.04617687, -0.00535998, - -0.0442614, -0.03158399, -0.01207165, -0.00736567 - ] - # pylint: enable=line-too-long - return db_grad, db_out - - @test_util.run_deprecated_v1 - def testLikeDistBeliefCG01(self): - with self.cached_session(): - db_grad, db_out = self._dbParamsCG01() - num_samples = len(db_grad) - var0 = variables.Variable([0.0] * num_samples) - grads0 = constant_op.constant([0.0] * num_samples) - cg_opt = cg_lib.ConditionalGradientOptimizer(learning_rate=0.1, lamda=0.1) - cg_update = cg_opt.apply_gradients(zip([grads0], [var0])) - variables.global_variables_initializer().run() - for i in xrange(num_samples): - cg_update.run(feed_dict={grads0: db_grad[i]}) - self.assertAllClose(np.array(db_out[i]), self.evaluate(var0)) - - - @test_util.run_deprecated_v1 - def testSparse(self): - for dtype in [dtypes.half, dtypes.float32, dtypes.float64]: - with self.cached_session(): - var0 = variables.Variable(array_ops.zeros([4, 2], dtype=dtype)) - var1 = variables.Variable(constant_op.constant(1.0, dtype, [4, 2])) - grads0 = ops.IndexedSlices( - constant_op.constant( - [[.1, .1]], dtype=dtype), - constant_op.constant([1]), - constant_op.constant([4, 2])) - grads1 = ops.IndexedSlices( - constant_op.constant( - [[.01, .01], [.01, .01]], dtype=dtype), - constant_op.constant([2, 3]), - constant_op.constant([4, 2])) - norm0 = math_ops.reduce_sum(math_ops.multiply(grads0, grads0)) ** 0.5 - norm1 = math_ops.reduce_sum(math_ops.multiply(grads1, grads1)) ** 0.5 - learning_rate = 0.1 - lamda = 0.1 - cg_opt = cg_lib.ConditionalGradientOptimizer( - learning_rate=learning_rate, lamda=lamda) - cg_update = cg_opt.apply_gradients( - zip([grads0, grads1], [var0, var1])) - variables.global_variables_initializer().run() - # Check we have slots - self.assertEqual(["conditional_gradient"], cg_opt.get_slot_names()) - slot0 = cg_opt.get_slot(var0, "conditional_gradient") - self.assertEquals(slot0.get_shape(), var0.get_shape()) - slot1 = cg_opt.get_slot(var1, "conditional_gradient") - self.assertEquals(slot1.get_shape(), var1.get_shape()) - if not context.executing_eagerly(): - self.assertFalse(slot0 in variables.trainable_variables()) - self.assertFalse(slot1 in variables.trainable_variables()) - - # Fetch params to validate initial values - self.assertAllClose([0, 0], self.evaluate(var0)[0]) - self.assertAllClose([0, 0], self.evaluate(var0)[1]) - self.assertAllClose([1, 1], self.evaluate(var1)[2]) - - # Step 1: - cg_update.run() - # Check that the parameters have been updated. - norm0 = self.evaluate(norm0) - norm1 = self.evaluate(norm1) - self.assertAllCloseAccordingToType( - np.array([0 - (1 - learning_rate) * lamda * 0 / norm0, - 0 - (1 - learning_rate) * lamda * 0 / norm0]), - self.evaluate(var0)[0]) - self.assertAllCloseAccordingToType( - np.array([0 - (1 - learning_rate) * lamda * 0.1 / norm0, - 0 - (1 - learning_rate) * lamda * 0.1 / norm0]), - self.evaluate(var0)[1]) - self.assertAllCloseAccordingToType( - np.array([1.0 * learning_rate- (1 - learning_rate) \ - * lamda * 0.01 / norm1, - 1.0 * learning_rate- (1 - learning_rate) \ - * lamda * 0.01 / norm1]), - self.evaluate(var1)[2]) - # Step 2: the conditional_gradient contain the previous update. - cg_update.run() - # Check that the parameters have been updated. - self.assertAllClose(np.array([0, 0]), self.evaluate(var0)[0]) - self.assertAllCloseAccordingToType( - np.array([ - (0 - (1 - learning_rate) * lamda * 0.1 / norm0) \ - * learning_rate \ - - (1 - learning_rate) * lamda * 0.1 / norm0, - (0 - (1 - learning_rate) * lamda * 0.1 / norm0) \ - * learning_rate \ - - (1 - learning_rate) * lamda * 0.1 / norm0]), - self.evaluate(var0)[1]) - self.assertAllCloseAccordingToType( - np.array([ - (1.0 * learning_rate - \ - (1 - learning_rate) * lamda * 0.01 / norm1) \ - * learning_rate - (1 - learning_rate) \ - * lamda * 0.01 / norm1, - (1.0 * learning_rate- \ - (1 - learning_rate) * lamda * 0.01 / norm1) \ - * learning_rate - (1 - learning_rate) \ - * lamda * 0.01 / norm1]), - self.evaluate(var1)[2]) - - - @test_util.run_deprecated_v1 - def testSharing(self): - for dtype in [dtypes.half, dtypes.float32, dtypes.float64]: - with self.cached_session(): - var0 = variables.Variable([1.0, 2.0], dtype=dtype) - var1 = variables.Variable([3.0, 4.0], dtype=dtype) - grads0 = constant_op.constant([0.1, 0.1], dtype=dtype) - grads1 = constant_op.constant([0.01, 0.01], dtype=dtype) - norm0 = math_ops.reduce_sum(grads0 ** 2) ** 0.5 - norm1 = math_ops.reduce_sum(grads1 ** 2) ** 0.5 learning_rate = 0.1 lamda = 0.1 - cg_opt = cg_lib.ConditionalGradientOptimizer( - learning_rate=learning_rate, lamda=lamda) - cg_update1 = cg_opt.apply_gradients( - zip([grads0, grads1], [var0, var1])) - cg_update2 = cg_opt.apply_gradients( - zip([grads0, grads1], [var0, var1])) - variables.global_variables_initializer().run() - # Check we have slots - self.assertEqual(["conditional_gradient"], cg_opt.get_slot_names()) - slot0 = cg_opt.get_slot(var0, "conditional_gradient") - self.assertEquals(slot0.get_shape(), var0.get_shape()) - slot1 = cg_opt.get_slot(var1, "conditional_gradient") - self.assertEquals(slot1.get_shape(), var1.get_shape()) - if not context.executing_eagerly(): - self.assertFalse(slot0 in variables.trainable_variables()) - self.assertFalse(slot1 in variables.trainable_variables()) - # Fetch params to validate initial values - self.assertAllClose([1.0, 2.0], self.evaluate(var0)) - self.assertAllClose([3.0, 4.0], self.evaluate(var1)) - - cg_update1.run() - # Check that the parameters have been updated. + opt = cg_lib.ConditionalGradientOptimizer( + learning_rate=learning_rate, + lamda=lamda) + cg_op = opt.minimize(loss, var_list=[var0]) + self.evaluate(tf.compat.v1.global_variables_initializer()) + + # Run 1 step of cg_op + self.evaluate(cg_op) norm0 = self.evaluate(norm0) - norm1 = self.evaluate(norm1) - self.assertAllCloseAccordingToType( - np.array( - [1.0 * learning_rate - (1-learning_rate) \ - * lamda * 0.1 / norm0, - 2.0 * learning_rate - (1-learning_rate) \ - * lamda * 0.1 / norm0]), - self.evaluate(var0)) - self.assertAllCloseAccordingToType( - np.array( - [3.0 * learning_rate - (1-learning_rate) \ - * lamda * 0.01 / norm1, - 4.0 * learning_rate - (1-learning_rate) \ - * lamda * 0.01 / norm1]), - self.evaluate(var1)) - - # Step 2: the second conditional_gradient contain the previous update. - cg_update2.run() - - # Check that the parameters have been updated. - self.assertAllCloseAccordingToType( - np.array([ - (1.0 * learning_rate - (1-learning_rate) \ - * lamda * 0.1 / norm0) * learning_rate \ - - (1 - learning_rate) * lamda * 0.1 / norm0, - (2.0 * learning_rate - (1-learning_rate) \ - * lamda * 0.1 / norm0) * learning_rate \ - - (1 - learning_rate) * lamda * 0.1 / norm0 - ]), self.evaluate(var0)) - self.assertAllCloseAccordingToType( - np.array([ - (3.0 * learning_rate - (1-learning_rate) \ - * lamda * 0.01 / norm1) * learning_rate \ - - (1 - learning_rate) * lamda * 0.01 / norm1, - (4.0 * learning_rate - (1-learning_rate) \ - * lamda * 0.01 / norm1) * learning_rate \ - - (1 - learning_rate) * lamda * 0.01 / norm1 - ]), self.evaluate(var1)) + ''' + #If we have to update all the sparse dimension's entry of var, + this is the test case we have to pass. + + self.assertAllCloseAccordingToType([ + [learning_rate * 1 - (1-learning_rate)*lamda*0/norm0, + learning_rate * 1 - (1-learning_rate)*lamda*0/norm0], + [learning_rate * 1 - (1-learning_rate)*lamda*1/norm0, + learning_rate * 1 - (1-learning_rate)*lamda*1/norm0] + ],self.evaluate(var0)) + ''' + ''' + This is the test case we need to pass, if we only want to + update the sparse dimension's entry of the var. + ''' + self.assertAllCloseAccordingToType([ + [1, + 1], + [learning_rate * 1 - (1-learning_rate)*lamda*1/norm0, + learning_rate * 1 - (1-learning_rate)*lamda*1/norm0] + ], self.evaluate(var0)) + + @test_utils.run_deprecated_v1 + def testTensorLearningRateAndConditionalGradient(self): + for dtype in [tf.half, tf.float32, tf.float64]: + with self.cached_session(): + var0 = tf.Variable([1.0, 2.0], dtype=dtype) + var1 = tf.Variable([3.0, 4.0], dtype=dtype) + grads0 = tf.constant([0.1, 0.1], dtype=dtype) + grads1 = tf.constant([0.01, 0.01], dtype=dtype) + norm0 = tf.math.reduce_sum(grads0 ** 2) ** 0.5 + norm1 = tf.math.reduce_sum(grads1 ** 2) ** 0.5 + cg_opt = cg_lib.ConditionalGradientOptimizer( + learning_rate=tf.constant(0.5), + lamda=tf.constant(0.01)) + cg_update = cg_opt.apply_gradients( + zip([grads0, grads1], [var0, var1])) + self.evaluate(tf.compat.v1.global_variables_initializer()) + + # Check we have slots + self.assertEqual(["conditional_gradient"], + cg_opt.get_slot_names()) + slot0 = cg_opt.get_slot(var0, "conditional_gradient") + self.assertEquals(slot0.get_shape(), var0.get_shape()) + slot1 = cg_opt.get_slot(var1, "conditional_gradient") + self.assertEquals(slot1.get_shape(), var1.get_shape()) + ''' + if not tf.executing_eagerly(): + self.assertFalse(slot0 in tf.trainable_variables()) + self.assertFalse(slot1 in tf.trainable_variables()) + ''' + + # Fetch params to validate initial values + self.assertAllClose([1.0, 2.0], self.evaluate(var0)) + self.assertAllClose([3.0, 4.0], self.evaluate(var1)) + + cg_update.run() + # Check that the parameters have been updated. + norm0 = self.evaluate(norm0) + norm1 = self.evaluate(norm1) + self.assertAllCloseAccordingToType( + np.array([ + 1.0 * 0.5 - (1-0.5) * 0.01 * 0.1 / norm0, + 2.0 * 0.5 - (1-0.5) * 0.01 * 0.1 / norm0 + ]), self.evaluate(var0)) + self.assertAllCloseAccordingToType( + np.array([ + 3.0 * 0.5 - (1-0.5) * 0.01 * 0.01 / norm1, + 4.0 * 0.5 - (1-0.5) * 0.01 * 0.01 / norm1 + ]), self.evaluate(var1)) + # Step 2: the conditional_gradient contain the + # previous update. + cg_update.run() + # Check that the parameters have been updated. + self.assertAllCloseAccordingToType( + np.array([ + (1.0 * 0.5 - \ + (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 \ + - (1 - 0.5) * 0.01 * 0.1 / norm0, + (2.0 * 0.5 - \ + (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 \ + - (1 - 0.5) * 0.01 * 0.1 / norm0 + ]), self.evaluate(var0)) + self.assertAllCloseAccordingToType( + np.array([ + (3.0 * 0.5 - \ + (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 \ + - (1 - 0.5) * 0.01 * 0.01 / norm1, + (4.0 * 0.5 - \ + (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 \ + - (1 - 0.5) * 0.01 * 0.01 / norm1 + ]), self.evaluate(var1)) + + def _dbParamsCG01(self): + """Return dist-belief conditional_gradient values. + + Return values been generated from the dist-belief + conditional_gradient unittest, running with a learning rate of 0.1 + and a lamda of 0.1. + + These values record how a parameter vector of size 10, initialized + with 0.0, gets updated with 10 consecutive conditional_gradient + steps. + It uses random gradients. + + Returns: + db_grad: The gradients to apply + db_out: The parameters after the conditional_gradient update. + """ + db_grad = [[]] * 10 + db_out = [[]] * 10 + # pylint: disable=line-too-long + db_grad[0] = [ + 0.00096264342, 0.17914793, 0.93945462, 0.41396621, 0.53037018, + 0.93197989, 0.78648776, 0.50036013, 0.55345792, 0.96722615 + ] + db_out[0] = [ + -4.1555551e-05, -7.7334875e-03, -4.0554531e-02, -1.7870162e-02, + -2.2895107e-02, -4.0231861e-02, -3.3951234e-02, -2.1599628e-02, + -2.3891762e-02, -4.1753378e-02 + ] + db_grad[1] = [ + 0.17075552, 0.88821375, 0.20873757, 0.25236958, 0.57578111, + 0.15312378, 0.5513742, 0.94687688, 0.16012503, 0.22159521 + ] + db_out[1] = [ + -0.00961733, -0.0507779, -0.01580694, -0.01599489, -0.03470477, + -0.01264373, -0.03443632, -0.05546713, -0.01140388, -0.01665068 + ] + db_grad[2] = [ + 0.35077485, 0.47304362, 0.44412705, 0.44368884, 0.078527533, + 0.81223965, 0.31168157, 0.43203235, 0.16792089, 0.24644311 + ] + db_out[2] = [ + -0.02462724, -0.03699233, -0.03154434, -0.03153357, -0.00876844, + -0.05606323, -0.02447166, -0.03469437, -0.0124694, -0.01829169 + ] + db_grad[3] = [ + 0.9694621, 0.75035888, 0.28171822, 0.83813518, 0.53807181, + 0.3728098, 0.81454384, 0.03848977, 0.89759839, 0.93665648 + ] + db_out[3] = [ + -0.04124615, -0.03371741, -0.0144246, -0.03668303, -0.02240246, + -0.02052062, -0.03503307, -0.00500922, -0.03715545, -0.0393002 + ] + db_grad[4] = [ + 0.38578293, 0.8536852, 0.88722926, 0.66276771, 0.13678469, + 0.94036359, 0.69107032, 0.81897682, 0.5433259, 0.67860287 + ] + db_out[4] = [ + -0.01979208, -0.0380417, -0.03747472, -0.0305847, -0.00779536, + -0.04024222, -0.03156913, -0.0337613, -0.02578116, -0.03148952 + ] + db_grad[5] = [ + 0.27885768, 0.76100707, 0.24625534, 0.81354135, 0.18959245, + 0.48038563, 0.84163809, 0.41172323, 0.83259648, 0.44941229 + ] + db_out[5] = [ + -0.01555188, -0.04084422, -0.01573331, -0.04265549, -0.01000746, + -0.02740575, -0.04412147, -0.02341569, -0.0431026, -0.02502293 + ] + db_grad[6] = [ + 0.27233034, 0.056316052, 0.5039115, 0.24105175, 0.35697976, + 0.75913221, 0.73577434, 0.16014607, 0.57500273, 0.071136251 + ] + db_out[6] = [ + -0.01890448, -0.00767214, -0.03367592, -0.01962219, -0.02374279, + -0.05110247, -0.05128598, -0.01254396, -0.04094185, -0.00703416 + ] + db_grad[7] = [ + 0.58697265, 0.2494842, 0.08106143, 0.39954534, 0.15892942, + 0.12683646, 0.74053431, 0.16033, 0.66625422, 0.73515922 + ] + db_out[7] = [ + -0.03772914, -0.01599993, -0.00831695, -0.02635719, -0.01207801, + -0.01285448, -0.05034328, -0.01104364, -0.04477356, -0.04558991 + ] + db_grad[8] = [ + 0.8215279, 0.41994119, 0.95172721, 0.68000203, 0.79439718, + 0.43384039, 0.55561525, 0.22567581, 0.93331909, 0.29438227 + ] + db_out[8] = [ + -0.03919835, -0.01970845, -0.04187151, -0.03195836, -0.03546333, + -0.01999326, -0.02899324, -0.01083582, -0.04472339, -0.01725317 + ] + db_grad[9] = [ + 0.68297005, 0.67758518, 0.1748755, 0.13266537, 0.70697063, + 0.055731893, 0.68593478, 0.50580865, 0.12602448, 0.093537711 + ] + db_out[9] = [ + -0.04510314, -0.04282944, -0.0147322, -0.0111956, -0.04617687, + -0.00535998, -0.0442614, -0.03158399, -0.01207165, -0.00736567 + ] + # pylint: enable=line-too-long + return db_grad, db_out + + @test_utils.run_deprecated_v1 + def testLikeDistBeliefCG01(self): + with self.cached_session(): + db_grad, db_out = self._dbParamsCG01() + num_samples = len(db_grad) + var0 = tf.Variable([0.0] * num_samples) + grads0 = tf.constant([0.0] * num_samples) + cg_opt = cg_lib.ConditionalGradientOptimizer( + learning_rate=0.1, lamda=0.1) + cg_update = cg_opt.apply_gradients(zip([grads0], [var0])) + self.evaluate(tf.compat.v1.global_variables_initializer()) + for i in xrange(num_samples): + cg_update.run(feed_dict={grads0: db_grad[i]}) + self.assertAllClose( + np.array(db_out[i]), self.evaluate(var0)) + + + @test_utils.run_deprecated_v1 + def testSparse(self): + for dtype in [tf.half, tf.float32, tf.float64]: + with self.cached_session(): + var0 = tf.Variable(tf.zeros([4, 2], dtype=dtype)) + var1 = tf.Variable(tf.constant(1.0, dtype, [4, 2])) + grads0 = tf.IndexedSlices( + tf.constant( + [[.1, .1]], dtype=dtype), + tf.constant([1]), + tf.constant([4, 2])) + grads1 = tf.IndexedSlices( + tf.constant( + [[.01, .01], [.01, .01]], dtype=dtype), + tf.constant([2, 3]), + tf.constant([4, 2])) + norm0 = tf.math.reduce_sum( + tf.math.multiply(grads0, grads0)) ** 0.5 + norm1 = tf.math.reduce_sum( + tf.math.multiply(grads1, grads1)) ** 0.5 + learning_rate = 0.1 + lamda = 0.1 + cg_opt = cg_lib.ConditionalGradientOptimizer( + learning_rate=learning_rate, lamda=lamda) + cg_update = cg_opt.apply_gradients( + zip([grads0, grads1], [var0, var1])) + self.evaluate(tf.compat.v1.global_variables_initializer()) + + # Check we have slots + self.assertEqual(["conditional_gradient"], + cg_opt.get_slot_names()) + slot0 = cg_opt.get_slot(var0, "conditional_gradient") + self.assertEquals(slot0.get_shape(), var0.get_shape()) + slot1 = cg_opt.get_slot(var1, "conditional_gradient") + self.assertEquals(slot1.get_shape(), var1.get_shape()) + ''' + if not tf.executing_eagerly(): + self.assertFalse(slot0 in tf.trainable_variables()) + self.assertFalse(slot1 in tf.trainable_variables()) + ''' + # Fetch params to validate initial values + self.assertAllClose([0, 0], self.evaluate(var0)[0]) + self.assertAllClose([0, 0], self.evaluate(var0)[1]) + self.assertAllClose([1, 1], self.evaluate(var1)[2]) + + # Step 1: + cg_update.run() + # Check that the parameters have been updated. + norm0 = self.evaluate(norm0) + norm1 = self.evaluate(norm1) + self.assertAllCloseAccordingToType( + np.array([0 - (1 - learning_rate) \ + * lamda * 0 / norm0, + 0 - (1 - learning_rate) * lamda * 0 \ + / norm0]), + self.evaluate(var0)[0]) + self.assertAllCloseAccordingToType( + np.array([0 - (1 - learning_rate) \ + * lamda * 0.1 / norm0, + 0 - (1 - learning_rate) * lamda * 0.1\ + / norm0]), + self.evaluate(var0)[1]) + self.assertAllCloseAccordingToType( + np.array([1.0 * learning_rate - \ + (1 - learning_rate) * lamda * 0.01 \ + / norm1, + 1.0 * learning_rate - \ + (1 - learning_rate) * lamda * 0.01 \ + / norm1]), + self.evaluate(var1)[2]) + # Step 2: the conditional_gradient contain the + # previous update. + cg_update.run() + # Check that the parameters have been updated. + self.assertAllClose(np.array([0, 0]), + self.evaluate(var0)[0]) + self.assertAllCloseAccordingToType( + np.array([ + (0 - (1 - learning_rate) \ + * lamda * 0.1 / norm0) \ + * learning_rate \ + - (1 - learning_rate) * lamda * 0.1\ + / norm0, + (0 - (1 - learning_rate) \ + * lamda * 0.1 / norm0) \ + * learning_rate \ + - (1 - learning_rate) \ + * lamda * 0.1 / norm0]), + self.evaluate(var0)[1]) + self.assertAllCloseAccordingToType( + np.array([ + (1.0 * learning_rate - \ + (1 - learning_rate) * lamda * 0.01 \ + / norm1) * learning_rate \ + - (1 - learning_rate) \ + * lamda * 0.01 / norm1, + (1.0 * learning_rate- \ + (1 - learning_rate) * lamda * 0.01 \ + / norm1) \ + * learning_rate - \ + (1 - learning_rate) \ + * lamda * 0.01 / norm1]), + self.evaluate(var1)[2]) + + + @test_utils.run_deprecated_v1 + def testSharing(self): + for dtype in [tf.half, tf.float32, tf.float64]: + with self.cached_session(): + var0 = tf.Variable([1.0, 2.0], dtype=dtype) + var1 = tf.Variable([3.0, 4.0], dtype=dtype) + grads0 = tf.constant([0.1, 0.1], dtype=dtype) + grads1 = tf.constant([0.01, 0.01], dtype=dtype) + norm0 = tf.math.reduce_sum(grads0 ** 2) ** 0.5 + norm1 = tf.math.reduce_sum(grads1 ** 2) ** 0.5 + learning_rate = 0.1 + lamda = 0.1 + cg_opt = cg_lib.ConditionalGradientOptimizer( + learning_rate=learning_rate, lamda=lamda) + cg_update1 = cg_opt.apply_gradients( + zip([grads0, grads1], [var0, var1])) + cg_update2 = cg_opt.apply_gradients( + zip([grads0, grads1], [var0, var1])) + self.evaluate(tf.compat.v1.global_variables_initializer()) + + # Check we have slots + self.assertEqual(["conditional_gradient"], + cg_opt.get_slot_names()) + slot0 = cg_opt.get_slot(var0, "conditional_gradient") + self.assertEquals(slot0.get_shape(), var0.get_shape()) + slot1 = cg_opt.get_slot(var1, "conditional_gradient") + self.assertEquals(slot1.get_shape(), var1.get_shape()) + ''' + if not tf.executing_eagerly(): + self.assertFalse(slot0 in tf.trainable_variables()) + self.assertFalse(slot1 in tf.trainable_variables()) + ''' + # Fetch params to validate initial values + self.assertAllClose([1.0, 2.0], self.evaluate(var0)) + self.assertAllClose([3.0, 4.0], self.evaluate(var1)) + + cg_update1.run() + # Check that the parameters have been updated. + norm0 = self.evaluate(norm0) + norm1 = self.evaluate(norm1) + self.assertAllCloseAccordingToType( + np.array( + [1.0 * learning_rate \ + - (1-learning_rate) \ + * lamda * 0.1 / norm0, + 2.0 * learning_rate \ + - (1-learning_rate) \ + * lamda * 0.1 / norm0]), + self.evaluate(var0)) + self.assertAllCloseAccordingToType( + np.array( + [3.0 * learning_rate \ + - (1-learning_rate) \ + * lamda * 0.01 / norm1, + 4.0 * learning_rate \ + - (1-learning_rate) \ + * lamda * 0.01 / norm1]), + self.evaluate(var1)) + + # Step 2: the second conditional_gradient contain + # the previous update. + cg_update2.run() + + # Check that the parameters have been updated. + self.assertAllCloseAccordingToType( + np.array([ + (1.0 * learning_rate \ + - (1-learning_rate) \ + * lamda * 0.1 / norm0) \ + * learning_rate \ + - (1 - learning_rate) * lamda \ + * 0.1 / norm0, + (2.0 * learning_rate \ + - (1-learning_rate) \ + * lamda * 0.1 / norm0) \ + * learning_rate \ + - (1 - learning_rate) * lamda \ + * 0.1 / norm0 + ]), self.evaluate(var0)) + self.assertAllCloseAccordingToType( + np.array([ + (3.0 * learning_rate \ + - (1-learning_rate) \ + * lamda * 0.01 / norm1) \ + * learning_rate \ + - (1 - learning_rate) * lamda \ + * 0.01 / norm1, + (4.0 * learning_rate \ + - (1-learning_rate) \ + * lamda * 0.01 / norm1) \ + * learning_rate \ + - (1 - learning_rate) \ + * lamda * 0.01 / norm1 \ + ]), self.evaluate(var1)) if __name__ == "__main__": - test.main() + tf.test.main() From 802c313b5a2b05758fca92c634ddefddc0da1a55 Mon Sep 17 00:00:00 2001 From: pkan2 <34614124+pkan2@users.noreply.github.com> Date: Sat, 31 Aug 2019 15:39:50 -0500 Subject: [PATCH 03/20] Add files via upload --- tensorflow_addons/optimizers/__init__.py | 3 +-- .../optimizers/conditional_gradient.py | 1 - .../optimizers/conditional_gradient_test.py | 20 ------------------- 3 files changed, 1 insertion(+), 23 deletions(-) diff --git a/tensorflow_addons/optimizers/__init__.py b/tensorflow_addons/optimizers/__init__.py index ec1341f212..d8b284b17e 100644 --- a/tensorflow_addons/optimizers/__init__.py +++ b/tensorflow_addons/optimizers/__init__.py @@ -24,5 +24,4 @@ from tensorflow_addons.optimizers.weight_decay_optimizers import SGDW from tensorflow_addons.optimizers.weight_decay_optimizers import ( extend_with_decoupled_weight_decay) -from conditional_gradient import ConditionalGradientOptimizer -#from tensorflow_addons.optimizers.conditional_gradient import ConditionalGradientOptimizer +from tensorflow_addons.optimizers.conditional_gradient import ConditionalGradientOptimizer diff --git a/tensorflow_addons/optimizers/conditional_gradient.py b/tensorflow_addons/optimizers/conditional_gradient.py index 7808e37f64..4a96ef89de 100644 --- a/tensorflow_addons/optimizers/conditional_gradient.py +++ b/tensorflow_addons/optimizers/conditional_gradient.py @@ -22,7 +22,6 @@ from tensorflow_addons.utils import keras_utils @keras_utils.register_keras_custom_object - #class ConditionalGradientOptimizer(tf.keras.optimizer_v2.OptimizerV2): class ConditionalGradientOptimizer(tf.keras.optimizers.Optimizer): """Optimizer that implements the Conditional Gradient optimization. Helps handle constraints well. diff --git a/tensorflow_addons/optimizers/conditional_gradient_test.py b/tensorflow_addons/optimizers/conditional_gradient_test.py index 5cfc119964..8650900edd 100644 --- a/tensorflow_addons/optimizers/conditional_gradient_test.py +++ b/tensorflow_addons/optimizers/conditional_gradient_test.py @@ -246,21 +246,6 @@ def loss(): # Run 1 step of cg_op self.evaluate(cg_op) norm0 = self.evaluate(norm0) - ''' - #If we have to update all the sparse dimension's entry of var, - this is the test case we have to pass. - - self.assertAllCloseAccordingToType([ - [learning_rate * 1 - (1-learning_rate)*lamda*0/norm0, - learning_rate * 1 - (1-learning_rate)*lamda*0/norm0], - [learning_rate * 1 - (1-learning_rate)*lamda*1/norm0, - learning_rate * 1 - (1-learning_rate)*lamda*1/norm0] - ],self.evaluate(var0)) - ''' - ''' - This is the test case we need to pass, if we only want to - update the sparse dimension's entry of the var. - ''' self.assertAllCloseAccordingToType([ [1, 1], @@ -292,11 +277,6 @@ def testTensorLearningRateAndConditionalGradient(self): self.assertEquals(slot0.get_shape(), var0.get_shape()) slot1 = cg_opt.get_slot(var1, "conditional_gradient") self.assertEquals(slot1.get_shape(), var1.get_shape()) - ''' - if not tf.executing_eagerly(): - self.assertFalse(slot0 in tf.trainable_variables()) - self.assertFalse(slot1 in tf.trainable_variables()) - ''' # Fetch params to validate initial values self.assertAllClose([1.0, 2.0], self.evaluate(var0)) From ae7dfcd699b072ea62d19abd55d952ea74b0bdfe Mon Sep 17 00:00:00 2001 From: pkan2 <34614124+pkan2@users.noreply.github.com> Date: Sat, 31 Aug 2019 15:46:20 -0500 Subject: [PATCH 04/20] Add files via upload --- tensorflow_addons/optimizers/README.md | 2 ++ tensorflow_addons/optimizers/conditional_gradient.py | 2 +- tensorflow_addons/optimizers/conditional_gradient_test.py | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/tensorflow_addons/optimizers/README.md b/tensorflow_addons/optimizers/README.md index f45cb5fb1c..6afbe39222 100644 --- a/tensorflow_addons/optimizers/README.md +++ b/tensorflow_addons/optimizers/README.md @@ -6,6 +6,7 @@ | lazy_adam | Saishruthi Swaminathan | saishruthi.tn@gmail.com | | moving_average | Dheeraj R. Reddy | dheeraj98reddy@gmail.com | | weight_decay_optimizers | Phil Jund | ijund.phil@googlemail.com | +| conditional_gradient | Pengyu Kan | pkan2@wisc.edu | ## Components @@ -14,6 +15,7 @@ | lazy_adam | LazyAdam | https://arxiv.org/abs/1412.6980 | | moving_average | MovingAverage | | | weight_decay_optimizers | SGDW, AdamW, extend_with_decoupled_weight_decay | https://arxiv.org/pdf/1711.05101.pdf | +| conditional_gradient | ConditionalGradientOptimizer | https://arxiv.org/pdf/1803.06453.pdf | ## Contribution Guidelines diff --git a/tensorflow_addons/optimizers/conditional_gradient.py b/tensorflow_addons/optimizers/conditional_gradient.py index 4a96ef89de..6ebc273d1f 100644 --- a/tensorflow_addons/optimizers/conditional_gradient.py +++ b/tensorflow_addons/optimizers/conditional_gradient.py @@ -1,4 +1,4 @@ -# Copyright 2018 Vishnu sai rao suresh Lokhande & Pengyu Kan. All Rights Reserved. +# Copyright 2018 Pengyu Kan & Vishnu sai rao suresh Lokhande. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tensorflow_addons/optimizers/conditional_gradient_test.py b/tensorflow_addons/optimizers/conditional_gradient_test.py index 8650900edd..905fc45ba7 100644 --- a/tensorflow_addons/optimizers/conditional_gradient_test.py +++ b/tensorflow_addons/optimizers/conditional_gradient_test.py @@ -1,4 +1,4 @@ -# Copyright 2019 Vishnu sai rao suresh Lokhande & Pengyu Kan. All Rights Reserved. +# Copyright 2019 Pengyu Kan & Vishnu sai rao suresh Lokhande. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 6768daf9c7a29fb4bd5aca652886c75a1c3160f1 Mon Sep 17 00:00:00 2001 From: pkan2 <34614124+pkan2@users.noreply.github.com> Date: Sun, 1 Sep 2019 13:30:10 -0500 Subject: [PATCH 05/20] Add files via upload --- tensorflow_addons/optimizers/BUILD | 28 ++++++++++++------------ tensorflow_addons/optimizers/README.md | 4 ++-- tensorflow_addons/optimizers/__init__.py | 2 +- 3 files changed, 17 insertions(+), 17 deletions(-) diff --git a/tensorflow_addons/optimizers/BUILD b/tensorflow_addons/optimizers/BUILD index bb6a0d0641..ebd61571df 100644 --- a/tensorflow_addons/optimizers/BUILD +++ b/tensorflow_addons/optimizers/BUILD @@ -6,10 +6,10 @@ py_library( name = "optimizers", srcs = [ "__init__.py", + "conditional_gradient.py", "lazy_adam.py", "moving_average.py", - "weight_decay_optimizers.py", - "conditional_gradient.py", + "weight_decay_optimizers.py" ], srcs_version = "PY2AND3", deps = [ @@ -18,12 +18,12 @@ py_library( ) py_test( - name = "lazy_adam_test", + name = "conditional_gradient_test", size = "small", srcs = [ - "lazy_adam_test.py", + "conditional_gradient_test.py", ], - main = "lazy_adam_test.py", + main = "conditional_gradient_test.py", srcs_version = "PY2AND3", deps = [ ":optimizers", @@ -31,12 +31,12 @@ py_test( ) py_test( - name = "moving_average_test", + name = "lazy_adam_test", size = "small", srcs = [ - "moving_average_test.py", + "lazy_adam_test.py", ], - main = "moving_average_test.py", + main = "lazy_adam_test.py", srcs_version = "PY2AND3", deps = [ ":optimizers", @@ -44,12 +44,12 @@ py_test( ) py_test( - name = "weight_decay_optimizers_test", + name = "moving_average_test", size = "small", srcs = [ - "weight_decay_optimizers_test.py", + "moving_average_test.py", ], - main = "weight_decay_optimizers_test.py", + main = "moving_average_test.py", srcs_version = "PY2AND3", deps = [ ":optimizers", @@ -57,12 +57,12 @@ py_test( ) py_test( - name = "conditional_gradient_test", + name = "weight_decay_optimizers_test", size = "small", srcs = [ - "conditional_gradient_test.py", + "weight_decay_optimizers_test.py", ], - main = "conditional_gradient_test.py", + main = "weight_decay_optimizers_test.py", srcs_version = "PY2AND3", deps = [ ":optimizers", diff --git a/tensorflow_addons/optimizers/README.md b/tensorflow_addons/optimizers/README.md index 6afbe39222..8f087fd7eb 100644 --- a/tensorflow_addons/optimizers/README.md +++ b/tensorflow_addons/optimizers/README.md @@ -3,19 +3,19 @@ ## Maintainers | Submodule | Maintainers | Contact Info | |:---------- |:------------- |:--------------| +| conditional_gradient | Pengyu Kan | pkan2@wisc.edu | | lazy_adam | Saishruthi Swaminathan | saishruthi.tn@gmail.com | | moving_average | Dheeraj R. Reddy | dheeraj98reddy@gmail.com | | weight_decay_optimizers | Phil Jund | ijund.phil@googlemail.com | -| conditional_gradient | Pengyu Kan | pkan2@wisc.edu | ## Components | Submodule | Optimizer | Reference | |:--------- |:---------- |:---------| +| conditional_gradient | ConditionalGradient | https://arxiv.org/pdf/1803.06453.pdf | | lazy_adam | LazyAdam | https://arxiv.org/abs/1412.6980 | | moving_average | MovingAverage | | | weight_decay_optimizers | SGDW, AdamW, extend_with_decoupled_weight_decay | https://arxiv.org/pdf/1711.05101.pdf | -| conditional_gradient | ConditionalGradientOptimizer | https://arxiv.org/pdf/1803.06453.pdf | ## Contribution Guidelines diff --git a/tensorflow_addons/optimizers/__init__.py b/tensorflow_addons/optimizers/__init__.py index d8b284b17e..142f71c6f5 100644 --- a/tensorflow_addons/optimizers/__init__.py +++ b/tensorflow_addons/optimizers/__init__.py @@ -18,10 +18,10 @@ from __future__ import division from __future__ import print_function +from tensorflow_addons.optimizers.conditional_gradient import ConditionalGradient from tensorflow_addons.optimizers.lazy_adam import LazyAdam from tensorflow_addons.optimizers.moving_average import MovingAverage from tensorflow_addons.optimizers.weight_decay_optimizers import AdamW from tensorflow_addons.optimizers.weight_decay_optimizers import SGDW from tensorflow_addons.optimizers.weight_decay_optimizers import ( extend_with_decoupled_weight_decay) -from tensorflow_addons.optimizers.conditional_gradient import ConditionalGradientOptimizer From aced15519c22990dff8c1b8819eab0222a4eaa7d Mon Sep 17 00:00:00 2001 From: pkan2 <34614124+pkan2@users.noreply.github.com> Date: Sun, 1 Sep 2019 14:33:45 -0500 Subject: [PATCH 06/20] Add files via upload --- .../optimizers/conditional_gradient.py | 24 +++++++++---------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/tensorflow_addons/optimizers/conditional_gradient.py b/tensorflow_addons/optimizers/conditional_gradient.py index 6ebc273d1f..86dbfe5a0b 100644 --- a/tensorflow_addons/optimizers/conditional_gradient.py +++ b/tensorflow_addons/optimizers/conditional_gradient.py @@ -1,4 +1,4 @@ -# Copyright 2018 Pengyu Kan & Vishnu sai rao suresh Lokhande. All Rights Reserved. +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -22,7 +22,8 @@ from tensorflow_addons.utils import keras_utils @keras_utils.register_keras_custom_object -class ConditionalGradientOptimizer(tf.keras.optimizers.Optimizer): + #class ConditionalGradient(tf.keras.optimizer_v2.OptimizerV2): +class ConditionalGradient(tf.keras.optimizers.Optimizer): """Optimizer that implements the Conditional Gradient optimization. Helps handle constraints well. Currently only supports frobenius norm constraint. @@ -44,7 +45,7 @@ def __init__(self, learning_rate, lamda, name: Optional name prefix for the operations created when applying gradients. Defaults to "ConditionalGradient" """ - super(ConditionalGradientOptimizer, self).__init__(name=name) + super(ConditionalGradient, self).__init__(name=name) self._set_hyper("learning_rate", learning_rate) self._set_hyper("lamda", lamda) @@ -77,11 +78,10 @@ def frobenius_norm(m): return tf.math.reduce_sum(m ** 2) ** 0.5 norm = tf.convert_to_tensor(frobenius_norm(grad), name="norm") norm = tf.dtypes.cast(norm, var.dtype.base_dtype) - lr = tf.dtypes.cast(self._learning_rate_tensor, - var.dtype.base_dtype) + lr = tf.dtypes.cast(self._learning_rate_tensor, var.dtype.base_dtype) lamda = tf.dtypes.cast(self._lamda_tensor, var.dtype.base_dtype) - var_update_tensor = tf.math.multiply(var, lr) \ - - (1-lr)* lamda * grad / norm + var_update_tensor = (tf.math.multiply(var, lr) - + (1 - lr) * lamda * grad / norm) var_update_kwargs = { 'resource': var.handle, 'value': var_update_tensor, @@ -95,17 +95,15 @@ def frobenius_norm(m): return tf.reduce_sum(m ** 2) ** 0.5 norm = tf.convert_to_tensor(frobenius_norm(grad), name="norm") norm = tf.dtypes.cast(norm, var.dtype.base_dtype) - lr = tf.dtypes.cast(self._learning_rate_tensor, - var.dtype.base_dtype) + lr = tf.dtypes.cast(self._learning_rate_tensor, var.dtype.base_dtype) lamda = tf.dtypes.cast(self._lamda_tensor, var.dtype.base_dtype) var_slice = tf.gather(var, indices) - var_update_value = tf.math.multiply(var_slice, lr) \ - - (1-lr) * lamda * grad / norm + var_update_value = (tf.math.multiply(var_slice, lr) - + (1 - lr) * lamda * grad / norm) var_update_kwargs = { 'resource': var.handle, 'indices': indices, 'updates': var_update_value } - var_update_op = tf.raw_ops.ResourceScatterUpdate \ - (**var_update_kwargs) + var_update_op = tf.raw_ops.ResourceScatterUpdate(**var_update_kwargs) return tf.group(var_update_op) From b14081a9c97b0b8e0366f9a7025965261adf00a9 Mon Sep 17 00:00:00 2001 From: pkan2 <34614124+pkan2@users.noreply.github.com> Date: Sun, 1 Sep 2019 14:57:08 -0500 Subject: [PATCH 07/20] Add files via upload --- .../optimizers/conditional_gradient_test.py | 195 +++++++++--------- 1 file changed, 100 insertions(+), 95 deletions(-) diff --git a/tensorflow_addons/optimizers/conditional_gradient_test.py b/tensorflow_addons/optimizers/conditional_gradient_test.py index 905fc45ba7..5bb658d8b7 100644 --- a/tensorflow_addons/optimizers/conditional_gradient_test.py +++ b/tensorflow_addons/optimizers/conditional_gradient_test.py @@ -1,4 +1,4 @@ -# Copyright 2019 Pengyu Kan & Vishnu sai rao suresh Lokhande. All Rights Reserved. +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -51,7 +51,7 @@ def doTestBasic(self, use_resource=False, use_callable_params=False): if not use_callable_params: learning_rate = learning_rate() lamda = lamda() - cg_opt = cg_lib.ConditionalGradientOptimizer( + cg_opt = cg_lib.ConditionalGradient( learning_rate=learning_rate, lamda=lamda) cg_update = cg_opt.apply_gradients( zip([grads0, grads1], [var0, var1])) @@ -63,7 +63,7 @@ def doTestBasic(self, use_resource=False, use_callable_params=False): self.assertAllClose([3.0, 4.0], self.evaluate(var1)) # Check we have slots - self.assertEqual(["conditional_gradient"], + self.assertEqual(["conditional_gradient"], cg_opt.get_slot_names()) slot0 = cg_opt.get_slot(var0, "conditional_gradient") self.assertEquals(slot0.get_shape(), var0.get_shape()) @@ -82,13 +82,13 @@ def doTestBasic(self, use_resource=False, use_callable_params=False): norm1 = self.evaluate(norm1) self.assertAllCloseAccordingToType( np.array([ - 1.0 * 0.5 - (1-0.5) * 0.01 * 0.1 / norm0, - 2.0 * 0.5 - (1-0.5) * 0.01 * 0.1 / norm0 + 1.0 * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0, + 2.0 * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0 ]), self.evaluate(var0)) self.assertAllCloseAccordingToType( np.array([ - 3.0 * 0.5 - (1-0.5) * 0.01 * 0.01 / norm1, - 4.0 * 0.5 - (1-0.5) * 0.01 * 0.01 / norm1 + 3.0 * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1, + 4.0 * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1 ]), self.evaluate(var1)) # Step 2: the conditional_gradient contain the previous update. @@ -98,20 +98,20 @@ def doTestBasic(self, use_resource=False, use_callable_params=False): self.evaluate(cg_update) self.assertAllCloseAccordingToType( np.array([ - (1.0 * 0.5 - \ - (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 \ + (1.0 * 0.5 - + (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0, - (2.0 * 0.5 - \ - (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 \ + (2.0 * 0.5 - + (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0 ]), self.evaluate(var0)) self.assertAllCloseAccordingToType( np.array([ - (3.0 * 0.5 - \ - (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 \ + (3.0 * 0.5 - + (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1, - (4.0 * 0.5 - \ - (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 \ + (4.0 * 0.5 - + (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1 ]), self.evaluate(var1)) @@ -130,7 +130,7 @@ def testBasicCallableParams(self): self.doTestBasic(use_resource=True, use_callable_params=True) def testVariablesAcrossGraphs(self): - optimizer = cg_lib.ConditionalGradientOptimizer(0.01, 0.5) + optimizer = cg_lib.ConditionalGradient(0.01, 0.5) with tf.Graph().as_default(): var0 = tf.Variable( [1.0, 2.0], dtype=tf.float32, name="var0") @@ -194,7 +194,7 @@ def loss(): # pylint: enable=cell-var-from-loop learning_rate = 0.1 lamda = 0.1 - opt = cg_lib.ConditionalGradientOptimizer( + opt = cg_lib.ConditionalGradient( learning_rate=learning_rate, lamda=lamda) cg_op = opt.minimize(loss, var_list=[var0]) @@ -206,10 +206,10 @@ def loss(): # Validate updated params norm0 = self.evaluate(norm0) self.assertAllCloseAccordingToType([ - [1.0 * learning_rate - \ - (1-learning_rate)*lamda*grads0_0/norm0, - 2.0 * learning_rate - \ - (1-learning_rate)*lamda*grads0_1/norm0] + [1.0 * learning_rate - + (1 - learning_rate)*lamda*grads0_0/norm0, + 2.0 * learning_rate - + (1 - learning_rate)*lamda*grads0_1/norm0] ], self.evaluate(var0)) @@ -237,7 +237,7 @@ def loss(): learning_rate = 0.1 lamda = 0.1 - opt = cg_lib.ConditionalGradientOptimizer( + opt = cg_lib.ConditionalGradient( learning_rate=learning_rate, lamda=lamda) cg_op = opt.minimize(loss, var_list=[var0]) @@ -249,8 +249,8 @@ def loss(): self.assertAllCloseAccordingToType([ [1, 1], - [learning_rate * 1 - (1-learning_rate)*lamda*1/norm0, - learning_rate * 1 - (1-learning_rate)*lamda*1/norm0] + [learning_rate * 1 - (1 - learning_rate)*lamda*1/norm0, + learning_rate * 1 - (1 - learning_rate)*lamda*1/norm0] ], self.evaluate(var0)) @test_utils.run_deprecated_v1 @@ -263,7 +263,7 @@ def testTensorLearningRateAndConditionalGradient(self): grads1 = tf.constant([0.01, 0.01], dtype=dtype) norm0 = tf.math.reduce_sum(grads0 ** 2) ** 0.5 norm1 = tf.math.reduce_sum(grads1 ** 2) ** 0.5 - cg_opt = cg_lib.ConditionalGradientOptimizer( + cg_opt = cg_lib.ConditionalGradient( learning_rate=tf.constant(0.5), lamda=tf.constant(0.01)) cg_update = cg_opt.apply_gradients( @@ -277,6 +277,11 @@ def testTensorLearningRateAndConditionalGradient(self): self.assertEquals(slot0.get_shape(), var0.get_shape()) slot1 = cg_opt.get_slot(var1, "conditional_gradient") self.assertEquals(slot1.get_shape(), var1.get_shape()) + ''' + if not tf.executing_eagerly(): + self.assertFalse(slot0 in tf.trainable_variables()) + self.assertFalse(slot1 in tf.trainable_variables()) + ''' # Fetch params to validate initial values self.assertAllClose([1.0, 2.0], self.evaluate(var0)) @@ -288,13 +293,13 @@ def testTensorLearningRateAndConditionalGradient(self): norm1 = self.evaluate(norm1) self.assertAllCloseAccordingToType( np.array([ - 1.0 * 0.5 - (1-0.5) * 0.01 * 0.1 / norm0, - 2.0 * 0.5 - (1-0.5) * 0.01 * 0.1 / norm0 + 1.0 * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0, + 2.0 * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0 ]), self.evaluate(var0)) self.assertAllCloseAccordingToType( np.array([ - 3.0 * 0.5 - (1-0.5) * 0.01 * 0.01 / norm1, - 4.0 * 0.5 - (1-0.5) * 0.01 * 0.01 / norm1 + 3.0 * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1, + 4.0 * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1 ]), self.evaluate(var1)) # Step 2: the conditional_gradient contain the # previous update. @@ -302,20 +307,20 @@ def testTensorLearningRateAndConditionalGradient(self): # Check that the parameters have been updated. self.assertAllCloseAccordingToType( np.array([ - (1.0 * 0.5 - \ - (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 \ + (1.0 * 0.5 - + (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0, - (2.0 * 0.5 - \ - (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 \ + (2.0 * 0.5 - + (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0 ]), self.evaluate(var0)) self.assertAllCloseAccordingToType( np.array([ - (3.0 * 0.5 - \ - (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 \ + (3.0 * 0.5 - + (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1, - (4.0 * 0.5 - \ - (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 \ + (4.0 * 0.5 - + (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1 ]), self.evaluate(var1)) @@ -429,7 +434,7 @@ def testLikeDistBeliefCG01(self): num_samples = len(db_grad) var0 = tf.Variable([0.0] * num_samples) grads0 = tf.constant([0.0] * num_samples) - cg_opt = cg_lib.ConditionalGradientOptimizer( + cg_opt = cg_lib.ConditionalGradient( learning_rate=0.1, lamda=0.1) cg_update = cg_opt.apply_gradients(zip([grads0], [var0])) self.evaluate(tf.compat.v1.global_variables_initializer()) @@ -461,14 +466,14 @@ def testSparse(self): tf.math.multiply(grads1, grads1)) ** 0.5 learning_rate = 0.1 lamda = 0.1 - cg_opt = cg_lib.ConditionalGradientOptimizer( + cg_opt = cg_lib.ConditionalGradient( learning_rate=learning_rate, lamda=lamda) cg_update = cg_opt.apply_gradients( zip([grads0, grads1], [var0, var1])) self.evaluate(tf.compat.v1.global_variables_initializer()) # Check we have slots - self.assertEqual(["conditional_gradient"], + self.assertEqual(["conditional_gradient"], cg_opt.get_slot_names()) slot0 = cg_opt.get_slot(var0, "conditional_gradient") self.assertEquals(slot0.get_shape(), var0.get_shape()) @@ -490,23 +495,23 @@ def testSparse(self): norm0 = self.evaluate(norm0) norm1 = self.evaluate(norm1) self.assertAllCloseAccordingToType( - np.array([0 - (1 - learning_rate) \ + np.array([0 - (1 - learning_rate) * lamda * 0 / norm0, - 0 - (1 - learning_rate) * lamda * 0 \ + 0 - (1 - learning_rate) * lamda * 0 / norm0]), self.evaluate(var0)[0]) self.assertAllCloseAccordingToType( - np.array([0 - (1 - learning_rate) \ + np.array([0 - (1 - learning_rate) * lamda * 0.1 / norm0, - 0 - (1 - learning_rate) * lamda * 0.1\ + 0 - (1 - learning_rate) * lamda * 0.1 / norm0]), self.evaluate(var0)[1]) self.assertAllCloseAccordingToType( - np.array([1.0 * learning_rate - \ - (1 - learning_rate) * lamda * 0.01 \ + np.array([1.0 * learning_rate - + (1 - learning_rate) * lamda * 0.01 / norm1, - 1.0 * learning_rate - \ - (1 - learning_rate) * lamda * 0.01 \ + 1.0 * learning_rate - + (1 - learning_rate) * lamda * 0.01 / norm1]), self.evaluate(var1)[2]) # Step 2: the conditional_gradient contain the @@ -517,29 +522,29 @@ def testSparse(self): self.evaluate(var0)[0]) self.assertAllCloseAccordingToType( np.array([ - (0 - (1 - learning_rate) \ - * lamda * 0.1 / norm0) \ - * learning_rate \ - - (1 - learning_rate) * lamda * 0.1\ + (0 - (1 - learning_rate) + * lamda * 0.1 / norm0) + * learning_rate + - (1 - learning_rate) * lamda * 0.1 / norm0, - (0 - (1 - learning_rate) \ - * lamda * 0.1 / norm0) \ - * learning_rate \ - - (1 - learning_rate) \ + (0 - (1 - learning_rate) + * lamda * 0.1 / norm0) + * learning_rate + - (1 - learning_rate) * lamda * 0.1 / norm0]), self.evaluate(var0)[1]) self.assertAllCloseAccordingToType( np.array([ - (1.0 * learning_rate - \ - (1 - learning_rate) * lamda * 0.01 \ - / norm1) * learning_rate \ - - (1 - learning_rate) \ + (1.0 * learning_rate - + (1 - learning_rate) * lamda * 0.01 + / norm1) * learning_rate + - (1 - learning_rate) * lamda * 0.01 / norm1, - (1.0 * learning_rate- \ - (1 - learning_rate) * lamda * 0.01 \ - / norm1) \ - * learning_rate - \ - (1 - learning_rate) \ + (1.0 * learning_rate - + (1 - learning_rate) * lamda * 0.01 + / norm1) + * learning_rate - + (1 - learning_rate) * lamda * 0.01 / norm1]), self.evaluate(var1)[2]) @@ -556,7 +561,7 @@ def testSharing(self): norm1 = tf.math.reduce_sum(grads1 ** 2) ** 0.5 learning_rate = 0.1 lamda = 0.1 - cg_opt = cg_lib.ConditionalGradientOptimizer( + cg_opt = cg_lib.ConditionalGradient( learning_rate=learning_rate, lamda=lamda) cg_update1 = cg_opt.apply_gradients( zip([grads0, grads1], [var0, var1])) @@ -586,20 +591,20 @@ def testSharing(self): norm1 = self.evaluate(norm1) self.assertAllCloseAccordingToType( np.array( - [1.0 * learning_rate \ - - (1-learning_rate) \ + [1.0 * learning_rate + - (1 - learning_rate) * lamda * 0.1 / norm0, - 2.0 * learning_rate \ - - (1-learning_rate) \ + 2.0 * learning_rate + - (1 - learning_rate) * lamda * 0.1 / norm0]), self.evaluate(var0)) self.assertAllCloseAccordingToType( np.array( - [3.0 * learning_rate \ - - (1-learning_rate) \ + [3.0 * learning_rate + - (1 - learning_rate) * lamda * 0.01 / norm1, - 4.0 * learning_rate \ - - (1-learning_rate) \ + 4.0 * learning_rate + - (1 - learning_rate) * lamda * 0.01 / norm1]), self.evaluate(var1)) @@ -610,33 +615,33 @@ def testSharing(self): # Check that the parameters have been updated. self.assertAllCloseAccordingToType( np.array([ - (1.0 * learning_rate \ - - (1-learning_rate) \ - * lamda * 0.1 / norm0) \ - * learning_rate \ - - (1 - learning_rate) * lamda \ + (1.0 * learning_rate + - (1 - learning_rate) + * lamda * 0.1 / norm0) + * learning_rate + - (1 - learning_rate) * lamda * 0.1 / norm0, - (2.0 * learning_rate \ - - (1-learning_rate) \ - * lamda * 0.1 / norm0) \ - * learning_rate \ - - (1 - learning_rate) * lamda \ + (2.0 * learning_rate + - (1 - learning_rate) + * lamda * 0.1 / norm0) + * learning_rate + - (1 - learning_rate) * lamda * 0.1 / norm0 ]), self.evaluate(var0)) self.assertAllCloseAccordingToType( np.array([ - (3.0 * learning_rate \ - - (1-learning_rate) \ - * lamda * 0.01 / norm1) \ - * learning_rate \ - - (1 - learning_rate) * lamda \ + (3.0 * learning_rate + - (1 - learning_rate) + * lamda * 0.01 / norm1) + * learning_rate + - (1 - learning_rate) * lamda * 0.01 / norm1, - (4.0 * learning_rate \ - - (1-learning_rate) \ - * lamda * 0.01 / norm1) \ - * learning_rate \ - - (1 - learning_rate) \ - * lamda * 0.01 / norm1 \ + (4.0 * learning_rate + - (1 - learning_rate) + * lamda * 0.01 / norm1) + * learning_rate + - (1 - learning_rate) + * lamda * 0.01 / norm1 ]), self.evaluate(var1)) if __name__ == "__main__": From 68dbf641748344510bbf349589d642abcff07c0c Mon Sep 17 00:00:00 2001 From: pkan2 <34614124+pkan2@users.noreply.github.com> Date: Sun, 1 Sep 2019 15:02:35 -0500 Subject: [PATCH 08/20] Add files via upload --- tensorflow_addons/optimizers/conditional_gradient_test.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tensorflow_addons/optimizers/conditional_gradient_test.py b/tensorflow_addons/optimizers/conditional_gradient_test.py index 5bb658d8b7..1fa9274800 100644 --- a/tensorflow_addons/optimizers/conditional_gradient_test.py +++ b/tensorflow_addons/optimizers/conditional_gradient_test.py @@ -514,11 +514,11 @@ def testSparse(self): (1 - learning_rate) * lamda * 0.01 / norm1]), self.evaluate(var1)[2]) - # Step 2: the conditional_gradient contain the + # Step 2: the conditional_gradient contain the # previous update. cg_update.run() # Check that the parameters have been updated. - self.assertAllClose(np.array([0, 0]), + self.assertAllClose(np.array([0, 0]), self.evaluate(var0)[0]) self.assertAllCloseAccordingToType( np.array([ @@ -570,7 +570,7 @@ def testSharing(self): self.evaluate(tf.compat.v1.global_variables_initializer()) # Check we have slots - self.assertEqual(["conditional_gradient"], + self.assertEqual(["conditional_gradient"], cg_opt.get_slot_names()) slot0 = cg_opt.get_slot(var0, "conditional_gradient") self.assertEquals(slot0.get_shape(), var0.get_shape()) From 932bb7ffc3f83aff61710e3f145502bb87cfbc01 Mon Sep 17 00:00:00 2001 From: pkan2 <34614124+pkan2@users.noreply.github.com> Date: Sun, 1 Sep 2019 22:08:34 -0500 Subject: [PATCH 09/20] Add files via upload --- .../optimizers/conditional_gradient_test.py | 48 +++++++++++-------- 1 file changed, 28 insertions(+), 20 deletions(-) diff --git a/tensorflow_addons/optimizers/conditional_gradient_test.py b/tensorflow_addons/optimizers/conditional_gradient_test.py index 1fa9274800..40d141318a 100644 --- a/tensorflow_addons/optimizers/conditional_gradient_test.py +++ b/tensorflow_addons/optimizers/conditional_gradient_test.py @@ -69,11 +69,11 @@ def doTestBasic(self, use_resource=False, use_callable_params=False): self.assertEquals(slot0.get_shape(), var0.get_shape()) slot1 = cg_opt.get_slot(var1, "conditional_gradient") self.assertEquals(slot1.get_shape(), var1.get_shape()) - ''' + if not tf.executing_eagerly(): - self.assertFalse(slot0 in tf.trainable_variables()) - self.assertFalse(slot1 in tf.trainable_variables()) - ''' + self.assertFalse(slot0 in tf.compat.v1.trainable_variables()) + self.assertFalse(slot1 in tf.compat.v1.trainable_variables()) + if not tf.executing_eagerly(): self.evaluate(cg_update) @@ -115,7 +115,8 @@ def doTestBasic(self, use_resource=False, use_callable_params=False): - (1 - 0.5) * 0.01 * 0.01 / norm1 ]), self.evaluate(var1)) - + #@test_utils.run_in_graph_and_eager_modes(reset_test=True) + #? def testBasic(self): with self.cached_session(): self.doTestBasic(use_resource=False) @@ -129,6 +130,8 @@ def testBasicCallableParams(self): #with tf.enable_eager_execution(): self.doTestBasic(use_resource=True, use_callable_params=True) + #@test_utils.run_in_graph_and_eager_modes(reset_test=True) + #? def testVariablesAcrossGraphs(self): optimizer = cg_lib.ConditionalGradient(0.01, 0.5) with tf.Graph().as_default(): @@ -166,8 +169,7 @@ def testVariablesAcrossGraphs(self): def testMinimizeSparseResourceVariable(self): for dtype in [tf.half, tf.float32, tf.float64]: # This test invokes the ResourceSparseApplyConditionalGradient - # operation, which did not have a registered GPU kernel as of - # April 2018. + # operation. # With graph execution, the placement algorithm notices this # and automatically places the variable in CPU (host) memory. # With eager execution, the variable would be placed in GPU @@ -277,11 +279,13 @@ def testTensorLearningRateAndConditionalGradient(self): self.assertEquals(slot0.get_shape(), var0.get_shape()) slot1 = cg_opt.get_slot(var1, "conditional_gradient") self.assertEquals(slot1.get_shape(), var1.get_shape()) - ''' + if not tf.executing_eagerly(): - self.assertFalse(slot0 in tf.trainable_variables()) - self.assertFalse(slot1 in tf.trainable_variables()) - ''' + self.assertFalse(slot0 in + tf.compat.v1.trainable_variables()) + self.assertFalse(slot1 in + tf.compat.v1.trainable_variables()) + # Fetch params to validate initial values self.assertAllClose([1.0, 2.0], self.evaluate(var0)) @@ -479,11 +483,13 @@ def testSparse(self): self.assertEquals(slot0.get_shape(), var0.get_shape()) slot1 = cg_opt.get_slot(var1, "conditional_gradient") self.assertEquals(slot1.get_shape(), var1.get_shape()) - ''' + if not tf.executing_eagerly(): - self.assertFalse(slot0 in tf.trainable_variables()) - self.assertFalse(slot1 in tf.trainable_variables()) - ''' + self.assertFalse(slot0 in + tf.compat.v1.trainable_variables()) + self.assertFalse(slot1 in + tf.compat.v1.trainable_variables()) + # Fetch params to validate initial values self.assertAllClose([0, 0], self.evaluate(var0)[0]) self.assertAllClose([0, 0], self.evaluate(var0)[1]) @@ -576,11 +582,13 @@ def testSharing(self): self.assertEquals(slot0.get_shape(), var0.get_shape()) slot1 = cg_opt.get_slot(var1, "conditional_gradient") self.assertEquals(slot1.get_shape(), var1.get_shape()) - ''' - if not tf.executing_eagerly(): - self.assertFalse(slot0 in tf.trainable_variables()) - self.assertFalse(slot1 in tf.trainable_variables()) - ''' + + if not tf.executing_eagerly(): + self.assertFalse(slot0 in + tf.compat.v1.trainable_variables()) + self.assertFalse(slot1 in + tf.compat.v1.trainable_variables()) + # Fetch params to validate initial values self.assertAllClose([1.0, 2.0], self.evaluate(var0)) self.assertAllClose([3.0, 4.0], self.evaluate(var1)) From 22d57d8c26656b938b72edf4f5ecf2b953ea4e6b Mon Sep 17 00:00:00 2001 From: pkan2 <34614124+pkan2@users.noreply.github.com> Date: Mon, 2 Sep 2019 15:10:16 -0500 Subject: [PATCH 10/20] Add files via upload --- .../optimizers/conditional_gradient.py | 26 +- .../optimizers/conditional_gradient_test.py | 416 ++++++++---------- 2 files changed, 187 insertions(+), 255 deletions(-) diff --git a/tensorflow_addons/optimizers/conditional_gradient.py b/tensorflow_addons/optimizers/conditional_gradient.py index 86dbfe5a0b..34c0bf4e3c 100644 --- a/tensorflow_addons/optimizers/conditional_gradient.py +++ b/tensorflow_addons/optimizers/conditional_gradient.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== - """Conditional Gradient method for TensorFlow.""" from __future__ import absolute_import from __future__ import division @@ -21,8 +20,8 @@ import tensorflow as tf from tensorflow_addons.utils import keras_utils + @keras_utils.register_keras_custom_object - #class ConditionalGradient(tf.keras.optimizer_v2.OptimizerV2): class ConditionalGradient(tf.keras.optimizers.Optimizer): """Optimizer that implements the Conditional Gradient optimization. Helps handle constraints well. @@ -34,8 +33,11 @@ class ConditionalGradient(tf.keras.optimizers.Optimizer): ``` """ - def __init__(self, learning_rate, lamda, - use_locking=False, name="ConditionalGradient"): + def __init__(self, + learning_rate, + lamda, + use_locking=False, + name="ConditionalGradient"): """Construct a conditional gradient optimizer. Args: learning_rate: A `Tensor` or a floating point value. @@ -67,7 +69,7 @@ def _prepare(self, var_list): if callable(learning_rate): learning_rate = learning_rate() self._learning_rate_tensor = tf.convert_to_tensor( - learning_rate, name="learning_rate") + learning_rate, name="learning_rate") lamda = self.lamda if callable(lamda): lamda = lamda() @@ -75,13 +77,14 @@ def _prepare(self, var_list): def _resource_apply_dense(self, grad, var): def frobenius_norm(m): - return tf.math.reduce_sum(m ** 2) ** 0.5 + return tf.math.reduce_sum(m**2)**0.5 + norm = tf.convert_to_tensor(frobenius_norm(grad), name="norm") norm = tf.dtypes.cast(norm, var.dtype.base_dtype) lr = tf.dtypes.cast(self._learning_rate_tensor, var.dtype.base_dtype) lamda = tf.dtypes.cast(self._lamda_tensor, var.dtype.base_dtype) - var_update_tensor = (tf.math.multiply(var, lr) - - (1 - lr) * lamda * grad / norm) + var_update_tensor = ( + tf.math.multiply(var, lr) - (1 - lr) * lamda * grad / norm) var_update_kwargs = { 'resource': var.handle, 'value': var_update_tensor, @@ -92,14 +95,15 @@ def frobenius_norm(m): def _resource_apply_sparse(self, grad, var, indices): def frobenius_norm(m): - return tf.reduce_sum(m ** 2) ** 0.5 + return tf.reduce_sum(m**2)**0.5 + norm = tf.convert_to_tensor(frobenius_norm(grad), name="norm") norm = tf.dtypes.cast(norm, var.dtype.base_dtype) lr = tf.dtypes.cast(self._learning_rate_tensor, var.dtype.base_dtype) lamda = tf.dtypes.cast(self._lamda_tensor, var.dtype.base_dtype) var_slice = tf.gather(var, indices) - var_update_value = (tf.math.multiply(var_slice, lr) - - (1 - lr) * lamda * grad / norm) + var_update_value = ( + tf.math.multiply(var_slice, lr) - (1 - lr) * lamda * grad / norm) var_update_kwargs = { 'resource': var.handle, 'indices': indices, diff --git a/tensorflow_addons/optimizers/conditional_gradient_test.py b/tensorflow_addons/optimizers/conditional_gradient_test.py index 40d141318a..59d9690acb 100644 --- a/tensorflow_addons/optimizers/conditional_gradient_test.py +++ b/tensorflow_addons/optimizers/conditional_gradient_test.py @@ -24,37 +24,34 @@ from six.moves import xrange # pylint: disable=redefined-builtin import conditional_gradient as cg_lib + @test_utils.run_all_in_graph_and_eager_modes class ConditionalGradientTest(tf.test.TestCase): - - def _update_conditional_gradient_numpy(self, var, norm, g, lr, lamda): - var = var * lr - (1 - lr) * lamda * g /norm + var = var * lr - (1 - lr) * lamda * g / norm return var def doTestBasic(self, use_resource=False, use_callable_params=False): for i, dtype in enumerate([tf.half, tf.float32, tf.float64]): if use_resource: - var0 = tf.Variable( - [1.0, 2.0], dtype=dtype, name="var0_%d" % i) - var1 = tf.Variable( - [3.0, 4.0], dtype=dtype, name="var1_%d" % i) + var0 = tf.Variable([1.0, 2.0], dtype=dtype, name="var0_%d" % i) + var1 = tf.Variable([3.0, 4.0], dtype=dtype, name="var1_%d" % i) else: var0 = tf.Variable([1.0, 2.0], dtype=dtype) var1 = tf.Variable([3.0, 4.0], dtype=dtype) grads0 = tf.constant([0.1, 0.1], dtype=dtype) grads1 = tf.constant([0.01, 0.01], dtype=dtype) - norm0 = tf.math.reduce_sum(grads0 ** 2) ** 0.5 - norm1 = tf.math.reduce_sum(grads1 ** 2) ** 0.5 + norm0 = tf.math.reduce_sum(grads0**2)**0.5 + norm1 = tf.math.reduce_sum(grads1**2)**0.5 learning_rate = lambda: 0.5 lamda = lambda: 0.01 if not use_callable_params: learning_rate = learning_rate() lamda = lamda() cg_opt = cg_lib.ConditionalGradient( - learning_rate=learning_rate, lamda=lamda) + learning_rate=learning_rate, lamda=lamda) cg_update = cg_opt.apply_gradients( - zip([grads0, grads1], [var0, var1])) + zip([grads0, grads1], [var0, var1])) if not tf.executing_eagerly(): self.evaluate(tf.compat.v1.global_variables_initializer()) @@ -63,8 +60,7 @@ def doTestBasic(self, use_resource=False, use_callable_params=False): self.assertAllClose([3.0, 4.0], self.evaluate(var1)) # Check we have slots - self.assertEqual(["conditional_gradient"], - cg_opt.get_slot_names()) + self.assertEqual(["conditional_gradient"], cg_opt.get_slot_names()) slot0 = cg_opt.get_slot(var0, "conditional_gradient") self.assertEquals(slot0.get_shape(), var0.get_shape()) slot1 = cg_opt.get_slot(var1, "conditional_gradient") @@ -81,15 +77,15 @@ def doTestBasic(self, use_resource=False, use_callable_params=False): norm0 = self.evaluate(norm0) norm1 = self.evaluate(norm1) self.assertAllCloseAccordingToType( - np.array([ - 1.0 * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0, - 2.0 * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0 - ]), self.evaluate(var0)) + np.array([ + 1.0 * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0, + 2.0 * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0 + ]), self.evaluate(var0)) self.assertAllCloseAccordingToType( - np.array([ - 3.0 * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1, - 4.0 * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1 - ]), self.evaluate(var1)) + np.array([ + 3.0 * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1, + 4.0 * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1 + ]), self.evaluate(var1)) # Step 2: the conditional_gradient contain the previous update. if tf.executing_eagerly(): @@ -97,23 +93,17 @@ def doTestBasic(self, use_resource=False, use_callable_params=False): else: self.evaluate(cg_update) self.assertAllCloseAccordingToType( - np.array([ - (1.0 * 0.5 - - (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 - - (1 - 0.5) * 0.01 * 0.1 / norm0, - (2.0 * 0.5 - - (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 - - (1 - 0.5) * 0.01 * 0.1 / norm0 - ]), self.evaluate(var0)) + np.array([(1.0 * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 - + (1 - 0.5) * 0.01 * 0.1 / norm0, + (2.0 * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 + - (1 - 0.5) * 0.01 * 0.1 / norm0]), + self.evaluate(var0)) self.assertAllCloseAccordingToType( - np.array([ - (3.0 * 0.5 - - (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 - - (1 - 0.5) * 0.01 * 0.01 / norm1, - (4.0 * 0.5 - - (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 - - (1 - 0.5) * 0.01 * 0.01 / norm1 - ]), self.evaluate(var1)) + np.array([(3.0 * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 - + (1 - 0.5) * 0.01 * 0.01 / norm1, + (4.0 * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 + - (1 - 0.5) * 0.01 * 0.01 / norm1]), + self.evaluate(var1)) #@test_utils.run_in_graph_and_eager_modes(reset_test=True) #? @@ -121,7 +111,6 @@ def testBasic(self): with self.cached_session(): self.doTestBasic(use_resource=False) - @test_utils.run_in_graph_and_eager_modes(reset_test=True) def testResourceBasic(self): self.doTestBasic(use_resource=True) @@ -135,19 +124,17 @@ def testBasicCallableParams(self): def testVariablesAcrossGraphs(self): optimizer = cg_lib.ConditionalGradient(0.01, 0.5) with tf.Graph().as_default(): - var0 = tf.Variable( - [1.0, 2.0], dtype=tf.float32, name="var0") - var1 = tf.Variable( - [3.0, 4.0], dtype=tf.float32, name="var1") + var0 = tf.Variable([1.0, 2.0], dtype=tf.float32, name="var0") + var1 = tf.Variable([3.0, 4.0], dtype=tf.float32, name="var1") loss = lambda: tf.math.reduce_sum(var0 + var1) optimizer.minimize(loss, var_list=[var0, var1]) optimizer_variables = optimizer.variables() - # There should be three items. The first item is iteration, - #and one item for each variable. + # There should be three items. The first item is iteration, + #and one item for each variable. self.assertStartsWith(optimizer_variables[1].name, - "ConditionalGradient/var0") + "ConditionalGradient/var0") self.assertStartsWith(optimizer_variables[2].name, - "ConditionalGradient/var1") + "ConditionalGradient/var1") self.assertEqual(3, len(optimizer_variables)) ''' with tf.Graph().as_default(): @@ -170,16 +157,7 @@ def testMinimizeSparseResourceVariable(self): for dtype in [tf.half, tf.float32, tf.float64]: # This test invokes the ResourceSparseApplyConditionalGradient # operation. - # With graph execution, the placement algorithm notices this - # and automatically places the variable in CPU (host) memory. - # With eager execution, the variable would be placed in GPU - # memory if available, which would then conflict with the - # future invocation of the - # ResourceSparseApplyConditionalGradient operation. - # To work around this discrepancy, for now we force the variable - # to be placed on CPU. - with tf.device("/cpu:0"): - var0 = tf.Variable([[1.0, 2.0]], dtype=dtype) + var0 = tf.Variable([[1.0, 2.0]], dtype=dtype) # pylint: disable=cell-var-from-loop def loss(): @@ -191,14 +169,13 @@ def loss(): grads0_0 = 32 * 1.0 + 40 * 2.0 grads0_1 = 40 * 1.0 + 50 * 2.0 grads0 = tf.constant([[grads0_0, grads0_1]], dtype=dtype) - norm0 = tf.math.reduce_sum(grads0 ** 2) ** 0.5 + norm0 = tf.math.reduce_sum(grads0**2)**0.5 # pylint: enable=cell-var-from-loop learning_rate = 0.1 lamda = 0.1 opt = cg_lib.ConditionalGradient( - learning_rate=learning_rate, - lamda=lamda) + learning_rate=learning_rate, lamda=lamda) cg_op = opt.minimize(loss, var_list=[var0]) self.evaluate(tf.compat.v1.global_variables_initializer()) @@ -207,53 +184,42 @@ def loss(): # Validate updated params norm0 = self.evaluate(norm0) - self.assertAllCloseAccordingToType([ - [1.0 * learning_rate - - (1 - learning_rate)*lamda*grads0_0/norm0, - 2.0 * learning_rate - - (1 - learning_rate)*lamda*grads0_1/norm0] - ], self.evaluate(var0)) - + self.assertAllCloseAccordingToType([[ + 1.0 * learning_rate - + (1 - learning_rate) * lamda * grads0_0 / norm0, + 2.0 * learning_rate - + (1 - learning_rate) * lamda * grads0_1 / norm0 + ]], self.evaluate(var0)) @test_utils.run_in_graph_and_eager_modes(reset_test=True) def testMinimizeWith2DIndiciesForEmbeddingLookup(self): # This test invokes the ResourceSparseApplyConditionalGradient - # operation, which - # did not have a registered GPU kernel as of April 2018. With graph - # execution, the placement algorithm notices this and automatically - # places the variable in CPU (host) memory. With eager execution, - # the variable would be placed in GPU memory if available, which - # would then conflict with the future invocation of the - # ResourceSparseApplyConditionalGradient operation. - # To work around this discrepancy, for now we force the variable - # to be placed on CPU. - with tf.device("/cpu:0"): - var0 = tf.Variable(tf.ones([2, 2])) + # operation. + var0 = tf.Variable(tf.ones([2, 2])) def loss(): return tf.math.reduce_sum(tf.nn.embedding_lookup(var0, [[1]])) # the gradient for this loss function: grads0 = tf.constant([[0, 0], [1, 1]], dtype=tf.float32) - norm0 = tf.math.reduce_sum(grads0 ** 2) ** 0.5 + norm0 = tf.math.reduce_sum(grads0**2)**0.5 learning_rate = 0.1 lamda = 0.1 opt = cg_lib.ConditionalGradient( - learning_rate=learning_rate, - lamda=lamda) + learning_rate=learning_rate, lamda=lamda) cg_op = opt.minimize(loss, var_list=[var0]) self.evaluate(tf.compat.v1.global_variables_initializer()) # Run 1 step of cg_op self.evaluate(cg_op) norm0 = self.evaluate(norm0) - self.assertAllCloseAccordingToType([ - [1, - 1], - [learning_rate * 1 - (1 - learning_rate)*lamda*1/norm0, - learning_rate * 1 - (1 - learning_rate)*lamda*1/norm0] - ], self.evaluate(var0)) + self.assertAllCloseAccordingToType( + [[1, 1], + [ + learning_rate * 1 - (1 - learning_rate) * lamda * 1 / norm0, + learning_rate * 1 - (1 - learning_rate) * lamda * 1 / norm0 + ]], self.evaluate(var0)) @test_utils.run_deprecated_v1 def testTensorLearningRateAndConditionalGradient(self): @@ -263,29 +229,27 @@ def testTensorLearningRateAndConditionalGradient(self): var1 = tf.Variable([3.0, 4.0], dtype=dtype) grads0 = tf.constant([0.1, 0.1], dtype=dtype) grads1 = tf.constant([0.01, 0.01], dtype=dtype) - norm0 = tf.math.reduce_sum(grads0 ** 2) ** 0.5 - norm1 = tf.math.reduce_sum(grads1 ** 2) ** 0.5 + norm0 = tf.math.reduce_sum(grads0**2)**0.5 + norm1 = tf.math.reduce_sum(grads1**2)**0.5 cg_opt = cg_lib.ConditionalGradient( - learning_rate=tf.constant(0.5), - lamda=tf.constant(0.01)) + learning_rate=tf.constant(0.5), lamda=tf.constant(0.01)) cg_update = cg_opt.apply_gradients( - zip([grads0, grads1], [var0, var1])) + zip([grads0, grads1], [var0, var1])) self.evaluate(tf.compat.v1.global_variables_initializer()) # Check we have slots self.assertEqual(["conditional_gradient"], - cg_opt.get_slot_names()) + cg_opt.get_slot_names()) slot0 = cg_opt.get_slot(var0, "conditional_gradient") self.assertEquals(slot0.get_shape(), var0.get_shape()) slot1 = cg_opt.get_slot(var1, "conditional_gradient") self.assertEquals(slot1.get_shape(), var1.get_shape()) if not tf.executing_eagerly(): - self.assertFalse(slot0 in - tf.compat.v1.trainable_variables()) - self.assertFalse(slot1 in - tf.compat.v1.trainable_variables()) - + self.assertFalse( + slot0 in tf.compat.v1.trainable_variables()) + self.assertFalse( + slot1 in tf.compat.v1.trainable_variables()) # Fetch params to validate initial values self.assertAllClose([1.0, 2.0], self.evaluate(var0)) @@ -296,37 +260,32 @@ def testTensorLearningRateAndConditionalGradient(self): norm0 = self.evaluate(norm0) norm1 = self.evaluate(norm1) self.assertAllCloseAccordingToType( - np.array([ - 1.0 * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0, - 2.0 * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0 - ]), self.evaluate(var0)) + np.array([ + 1.0 * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0, + 2.0 * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0 + ]), self.evaluate(var0)) self.assertAllCloseAccordingToType( - np.array([ - 3.0 * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1, - 4.0 * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1 - ]), self.evaluate(var1)) + np.array([ + 3.0 * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1, + 4.0 * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1 + ]), self.evaluate(var1)) # Step 2: the conditional_gradient contain the # previous update. cg_update.run() # Check that the parameters have been updated. self.assertAllCloseAccordingToType( - np.array([ - (1.0 * 0.5 - - (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 - - (1 - 0.5) * 0.01 * 0.1 / norm0, - (2.0 * 0.5 - - (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 - - (1 - 0.5) * 0.01 * 0.1 / norm0 - ]), self.evaluate(var0)) + np.array( + [(1.0 * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 - + (1 - 0.5) * 0.01 * 0.1 / norm0, + (2.0 * 0.5 - (1 - 0.5) * 0.01 * 0.1 / norm0) * 0.5 - + (1 - 0.5) * 0.01 * 0.1 / norm0]), self.evaluate(var0)) self.assertAllCloseAccordingToType( - np.array([ - (3.0 * 0.5 - - (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 - - (1 - 0.5) * 0.01 * 0.01 / norm1, - (4.0 * 0.5 - - (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 - - (1 - 0.5) * 0.01 * 0.01 / norm1 - ]), self.evaluate(var1)) + np.array( + [(3.0 * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 - + (1 - 0.5) * 0.01 * 0.01 / norm1, + (4.0 * 0.5 - (1 - 0.5) * 0.01 * 0.01 / norm1) * 0.5 + - (1 - 0.5) * 0.01 * 0.01 / norm1]), + self.evaluate(var1)) def _dbParamsCG01(self): """Return dist-belief conditional_gradient values. @@ -438,15 +397,12 @@ def testLikeDistBeliefCG01(self): num_samples = len(db_grad) var0 = tf.Variable([0.0] * num_samples) grads0 = tf.constant([0.0] * num_samples) - cg_opt = cg_lib.ConditionalGradient( - learning_rate=0.1, lamda=0.1) + cg_opt = cg_lib.ConditionalGradient(learning_rate=0.1, lamda=0.1) cg_update = cg_opt.apply_gradients(zip([grads0], [var0])) self.evaluate(tf.compat.v1.global_variables_initializer()) for i in xrange(num_samples): cg_update.run(feed_dict={grads0: db_grad[i]}) - self.assertAllClose( - np.array(db_out[i]), self.evaluate(var0)) - + self.assertAllClose(np.array(db_out[i]), self.evaluate(var0)) @test_utils.run_deprecated_v1 def testSparse(self): @@ -455,40 +411,36 @@ def testSparse(self): var0 = tf.Variable(tf.zeros([4, 2], dtype=dtype)) var1 = tf.Variable(tf.constant(1.0, dtype, [4, 2])) grads0 = tf.IndexedSlices( - tf.constant( - [[.1, .1]], dtype=dtype), - tf.constant([1]), - tf.constant([4, 2])) + tf.constant([[.1, .1]], dtype=dtype), tf.constant([1]), + tf.constant([4, 2])) grads1 = tf.IndexedSlices( - tf.constant( - [[.01, .01], [.01, .01]], dtype=dtype), - tf.constant([2, 3]), - tf.constant([4, 2])) - norm0 = tf.math.reduce_sum( - tf.math.multiply(grads0, grads0)) ** 0.5 - norm1 = tf.math.reduce_sum( - tf.math.multiply(grads1, grads1)) ** 0.5 + tf.constant([[.01, .01], [.01, .01]], dtype=dtype), + tf.constant([2, 3]), tf.constant([4, 2])) + norm0 = tf.math.reduce_sum(tf.math.multiply(grads0, + grads0))**0.5 + norm1 = tf.math.reduce_sum(tf.math.multiply(grads1, + grads1))**0.5 learning_rate = 0.1 lamda = 0.1 cg_opt = cg_lib.ConditionalGradient( - learning_rate=learning_rate, lamda=lamda) + learning_rate=learning_rate, lamda=lamda) cg_update = cg_opt.apply_gradients( - zip([grads0, grads1], [var0, var1])) + zip([grads0, grads1], [var0, var1])) self.evaluate(tf.compat.v1.global_variables_initializer()) # Check we have slots self.assertEqual(["conditional_gradient"], - cg_opt.get_slot_names()) + cg_opt.get_slot_names()) slot0 = cg_opt.get_slot(var0, "conditional_gradient") self.assertEquals(slot0.get_shape(), var0.get_shape()) slot1 = cg_opt.get_slot(var1, "conditional_gradient") self.assertEquals(slot1.get_shape(), var1.get_shape()) if not tf.executing_eagerly(): - self.assertFalse(slot0 in - tf.compat.v1.trainable_variables()) - self.assertFalse(slot1 in - tf.compat.v1.trainable_variables()) + self.assertFalse( + slot0 in tf.compat.v1.trainable_variables()) + self.assertFalse( + slot1 in tf.compat.v1.trainable_variables()) # Fetch params to validate initial values self.assertAllClose([0, 0], self.evaluate(var0)[0]) @@ -501,59 +453,48 @@ def testSparse(self): norm0 = self.evaluate(norm0) norm1 = self.evaluate(norm1) self.assertAllCloseAccordingToType( - np.array([0 - (1 - learning_rate) - * lamda * 0 / norm0, - 0 - (1 - learning_rate) * lamda * 0 - / norm0]), - self.evaluate(var0)[0]) + np.array([ + 0 - (1 - learning_rate) * lamda * 0 / norm0, + 0 - (1 - learning_rate) * lamda * 0 / norm0 + ]), + self.evaluate(var0)[0]) self.assertAllCloseAccordingToType( - np.array([0 - (1 - learning_rate) - * lamda * 0.1 / norm0, - 0 - (1 - learning_rate) * lamda * 0.1 - / norm0]), - self.evaluate(var0)[1]) + np.array([ + 0 - (1 - learning_rate) * lamda * 0.1 / norm0, + 0 - (1 - learning_rate) * lamda * 0.1 / norm0 + ]), + self.evaluate(var0)[1]) self.assertAllCloseAccordingToType( - np.array([1.0 * learning_rate - - (1 - learning_rate) * lamda * 0.01 - / norm1, - 1.0 * learning_rate - - (1 - learning_rate) * lamda * 0.01 - / norm1]), - self.evaluate(var1)[2]) + np.array([ + 1.0 * learning_rate - + (1 - learning_rate) * lamda * 0.01 / norm1, + 1.0 * learning_rate - + (1 - learning_rate) * lamda * 0.01 / norm1 + ]), + self.evaluate(var1)[2]) # Step 2: the conditional_gradient contain the # previous update. cg_update.run() # Check that the parameters have been updated. - self.assertAllClose(np.array([0, 0]), - self.evaluate(var0)[0]) + self.assertAllClose(np.array([0, 0]), self.evaluate(var0)[0]) self.assertAllCloseAccordingToType( - np.array([ - (0 - (1 - learning_rate) - * lamda * 0.1 / norm0) - * learning_rate - - (1 - learning_rate) * lamda * 0.1 - / norm0, - (0 - (1 - learning_rate) - * lamda * 0.1 / norm0) - * learning_rate - - (1 - learning_rate) - * lamda * 0.1 / norm0]), - self.evaluate(var0)[1]) + np.array([(0 - (1 - learning_rate) * lamda * 0.1 / norm0) * + learning_rate - + (1 - learning_rate) * lamda * 0.1 / norm0, + (0 - (1 - learning_rate) * lamda * 0.1 / norm0) * + learning_rate - + (1 - learning_rate) * lamda * 0.1 / norm0]), + self.evaluate(var0)[1]) self.assertAllCloseAccordingToType( - np.array([ - (1.0 * learning_rate - - (1 - learning_rate) * lamda * 0.01 - / norm1) * learning_rate - - (1 - learning_rate) - * lamda * 0.01 / norm1, - (1.0 * learning_rate - - (1 - learning_rate) * lamda * 0.01 - / norm1) - * learning_rate - - (1 - learning_rate) - * lamda * 0.01 / norm1]), - self.evaluate(var1)[2]) - + np.array([(1.0 * learning_rate - + (1 - learning_rate) * lamda * 0.01 / norm1) * + learning_rate - + (1 - learning_rate) * lamda * 0.01 / norm1, + (1.0 * learning_rate - + (1 - learning_rate) * lamda * 0.01 / norm1) * + learning_rate - + (1 - learning_rate) * lamda * 0.01 / norm1]), + self.evaluate(var1)[2]) @test_utils.run_deprecated_v1 def testSharing(self): @@ -563,31 +504,31 @@ def testSharing(self): var1 = tf.Variable([3.0, 4.0], dtype=dtype) grads0 = tf.constant([0.1, 0.1], dtype=dtype) grads1 = tf.constant([0.01, 0.01], dtype=dtype) - norm0 = tf.math.reduce_sum(grads0 ** 2) ** 0.5 - norm1 = tf.math.reduce_sum(grads1 ** 2) ** 0.5 + norm0 = tf.math.reduce_sum(grads0**2)**0.5 + norm1 = tf.math.reduce_sum(grads1**2)**0.5 learning_rate = 0.1 lamda = 0.1 cg_opt = cg_lib.ConditionalGradient( - learning_rate=learning_rate, lamda=lamda) + learning_rate=learning_rate, lamda=lamda) cg_update1 = cg_opt.apply_gradients( - zip([grads0, grads1], [var0, var1])) + zip([grads0, grads1], [var0, var1])) cg_update2 = cg_opt.apply_gradients( - zip([grads0, grads1], [var0, var1])) + zip([grads0, grads1], [var0, var1])) self.evaluate(tf.compat.v1.global_variables_initializer()) # Check we have slots self.assertEqual(["conditional_gradient"], - cg_opt.get_slot_names()) + cg_opt.get_slot_names()) slot0 = cg_opt.get_slot(var0, "conditional_gradient") self.assertEquals(slot0.get_shape(), var0.get_shape()) slot1 = cg_opt.get_slot(var1, "conditional_gradient") self.assertEquals(slot1.get_shape(), var1.get_shape()) if not tf.executing_eagerly(): - self.assertFalse(slot0 in - tf.compat.v1.trainable_variables()) - self.assertFalse(slot1 in - tf.compat.v1.trainable_variables()) + self.assertFalse( + slot0 in tf.compat.v1.trainable_variables()) + self.assertFalse( + slot1 in tf.compat.v1.trainable_variables()) # Fetch params to validate initial values self.assertAllClose([1.0, 2.0], self.evaluate(var0)) @@ -598,23 +539,19 @@ def testSharing(self): norm0 = self.evaluate(norm0) norm1 = self.evaluate(norm1) self.assertAllCloseAccordingToType( - np.array( - [1.0 * learning_rate - - (1 - learning_rate) - * lamda * 0.1 / norm0, - 2.0 * learning_rate - - (1 - learning_rate) - * lamda * 0.1 / norm0]), - self.evaluate(var0)) + np.array([ + 1.0 * learning_rate - + (1 - learning_rate) * lamda * 0.1 / norm0, + 2.0 * learning_rate - + (1 - learning_rate) * lamda * 0.1 / norm0 + ]), self.evaluate(var0)) self.assertAllCloseAccordingToType( - np.array( - [3.0 * learning_rate - - (1 - learning_rate) - * lamda * 0.01 / norm1, - 4.0 * learning_rate - - (1 - learning_rate) - * lamda * 0.01 / norm1]), - self.evaluate(var1)) + np.array([ + 3.0 * learning_rate - + (1 - learning_rate) * lamda * 0.01 / norm1, + 4.0 * learning_rate - + (1 - learning_rate) * lamda * 0.01 / norm1 + ]), self.evaluate(var1)) # Step 2: the second conditional_gradient contain # the previous update. @@ -622,35 +559,26 @@ def testSharing(self): # Check that the parameters have been updated. self.assertAllCloseAccordingToType( - np.array([ - (1.0 * learning_rate - - (1 - learning_rate) - * lamda * 0.1 / norm0) - * learning_rate - - (1 - learning_rate) * lamda - * 0.1 / norm0, - (2.0 * learning_rate - - (1 - learning_rate) - * lamda * 0.1 / norm0) - * learning_rate - - (1 - learning_rate) * lamda - * 0.1 / norm0 - ]), self.evaluate(var0)) + np.array([(1.0 * learning_rate - + (1 - learning_rate) * lamda * 0.1 / norm0) * + learning_rate - + (1 - learning_rate) * lamda * 0.1 / norm0, + (2.0 * learning_rate - + (1 - learning_rate) * lamda * 0.1 / norm0) * + learning_rate - + (1 - learning_rate) * lamda * 0.1 / norm0]), + self.evaluate(var0)) self.assertAllCloseAccordingToType( - np.array([ - (3.0 * learning_rate - - (1 - learning_rate) - * lamda * 0.01 / norm1) - * learning_rate - - (1 - learning_rate) * lamda - * 0.01 / norm1, - (4.0 * learning_rate - - (1 - learning_rate) - * lamda * 0.01 / norm1) - * learning_rate - - (1 - learning_rate) - * lamda * 0.01 / norm1 - ]), self.evaluate(var1)) + np.array([(3.0 * learning_rate - + (1 - learning_rate) * lamda * 0.01 / norm1) * + learning_rate - + (1 - learning_rate) * lamda * 0.01 / norm1, + (4.0 * learning_rate - + (1 - learning_rate) * lamda * 0.01 / norm1) * + learning_rate - + (1 - learning_rate) * lamda * 0.01 / norm1]), + self.evaluate(var1)) + if __name__ == "__main__": tf.test.main() From aeb991cc3b18c28283ea1834f70ef5bca7913ccd Mon Sep 17 00:00:00 2001 From: pkan2 <34614124+pkan2@users.noreply.github.com> Date: Tue, 3 Sep 2019 09:40:42 -0500 Subject: [PATCH 11/20] Add files via upload --- tensorflow_addons/optimizers/BUILD | 2 +- .../optimizers/conditional_gradient_test.py | 26 ++++++++++++++++--- 2 files changed, 24 insertions(+), 4 deletions(-) diff --git a/tensorflow_addons/optimizers/BUILD b/tensorflow_addons/optimizers/BUILD index ebd61571df..83c88cc363 100644 --- a/tensorflow_addons/optimizers/BUILD +++ b/tensorflow_addons/optimizers/BUILD @@ -9,7 +9,7 @@ py_library( "conditional_gradient.py", "lazy_adam.py", "moving_average.py", - "weight_decay_optimizers.py" + "weight_decay_optimizers.py", ], srcs_version = "PY2AND3", deps = [ diff --git a/tensorflow_addons/optimizers/conditional_gradient_test.py b/tensorflow_addons/optimizers/conditional_gradient_test.py index 59d9690acb..d7304ee5ef 100644 --- a/tensorflow_addons/optimizers/conditional_gradient_test.py +++ b/tensorflow_addons/optimizers/conditional_gradient_test.py @@ -152,11 +152,31 @@ def testVariablesAcrossGraphs(self): self.assertEqual(3, len(optimizer_variables)) ''' + # Based on issue #347 in the following link, + # "https://github.com/tensorflow/addons/issues/347" + # tf.half is not registered for 'ResourceScatterUpdate' OpKernel for 'GPU' devices. + # So we have to remove tf.half when testing with gpu. + # The function "_DtypesToTest" is from + # "https://github.com/tensorflow/tensorflow/blob/5d4a6cee737a1dc6c20172a1dc1 + # 5df10def2df72/tensorflow/python/kernel_tests/conv_ops_3d_test.py#L53-L62" + + def _DtypesToTest(self, use_gpu): + if use_gpu: + if not test_utils.GpuSupportsHalfMatMulAndConv(): + return [tf.float32, tf.float64] + else: + return [tf.half, tf.float32, tf.float64] + else: + return [tf.half, tf.float32, tf.float64] + @test_utils.run_in_graph_and_eager_modes(reset_test=True) def testMinimizeSparseResourceVariable(self): - for dtype in [tf.half, tf.float32, tf.float64]: - # This test invokes the ResourceSparseApplyConditionalGradient - # operation. + # This test invokes the ResourceSparseApplyConditionalGradient + # operation. And it will call the 'ResourceScatterUpdate' OpKernel + # for 'GPU' devices. However, tf.half is not registered in this case, + # based on issue #347. + # Thus, we will call the "_DtypesToTest" function. + for dtype in self._DtypesToTest(use_gpu=tf.test.is_gpu_available()): var0 = tf.Variable([[1.0, 2.0]], dtype=dtype) # pylint: disable=cell-var-from-loop From 09ec6310aaec1d067f9e864ead155b3d8bfed66d Mon Sep 17 00:00:00 2001 From: pkan2 <34614124+pkan2@users.noreply.github.com> Date: Tue, 3 Sep 2019 10:30:06 -0500 Subject: [PATCH 12/20] Add files via upload --- tensorflow_addons/optimizers/conditional_gradient_test.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/tensorflow_addons/optimizers/conditional_gradient_test.py b/tensorflow_addons/optimizers/conditional_gradient_test.py index d7304ee5ef..fee16985aa 100644 --- a/tensorflow_addons/optimizers/conditional_gradient_test.py +++ b/tensorflow_addons/optimizers/conditional_gradient_test.py @@ -162,10 +162,7 @@ def testVariablesAcrossGraphs(self): def _DtypesToTest(self, use_gpu): if use_gpu: - if not test_utils.GpuSupportsHalfMatMulAndConv(): - return [tf.float32, tf.float64] - else: - return [tf.half, tf.float32, tf.float64] + return [tf.float32, tf.float64] else: return [tf.half, tf.float32, tf.float64] From d9ef23e2fb8c05b72ec38250d7b9be5fe042c2ce Mon Sep 17 00:00:00 2001 From: pkan2 <34614124+pkan2@users.noreply.github.com> Date: Tue, 3 Sep 2019 11:41:34 -0500 Subject: [PATCH 13/20] Add files via upload --- tensorflow_addons/optimizers/conditional_gradient_test.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tensorflow_addons/optimizers/conditional_gradient_test.py b/tensorflow_addons/optimizers/conditional_gradient_test.py index fee16985aa..0cafd6f72b 100644 --- a/tensorflow_addons/optimizers/conditional_gradient_test.py +++ b/tensorflow_addons/optimizers/conditional_gradient_test.py @@ -173,6 +173,10 @@ def testMinimizeSparseResourceVariable(self): # for 'GPU' devices. However, tf.half is not registered in this case, # based on issue #347. # Thus, we will call the "_DtypesToTest" function. + # + # TODO: + # Wait for the solving of issue #347. After that, we will test + # for the dtype to be tf.half, with 'GPU' devices. for dtype in self._DtypesToTest(use_gpu=tf.test.is_gpu_available()): var0 = tf.Variable([[1.0, 2.0]], dtype=dtype) From 0c51eba460fe44dfa1334a64de4f839dea074fd5 Mon Sep 17 00:00:00 2001 From: pkan2 <34614124+pkan2@users.noreply.github.com> Date: Sun, 8 Sep 2019 17:05:03 -0500 Subject: [PATCH 14/20] Add files via upload --- .../optimizers/conditional_gradient.py | 15 ++- .../optimizers/conditional_gradient_test.py | 127 ++++++++++-------- 2 files changed, 82 insertions(+), 60 deletions(-) diff --git a/tensorflow_addons/optimizers/conditional_gradient.py b/tensorflow_addons/optimizers/conditional_gradient.py index 34c0bf4e3c..43600b6c07 100644 --- a/tensorflow_addons/optimizers/conditional_gradient.py +++ b/tensorflow_addons/optimizers/conditional_gradient.py @@ -37,7 +37,8 @@ def __init__(self, learning_rate, lamda, use_locking=False, - name="ConditionalGradient"): + name="ConditionalGradient", + **kwargs): """Construct a conditional gradient optimizer. Args: learning_rate: A `Tensor` or a floating point value. @@ -47,9 +48,10 @@ def __init__(self, name: Optional name prefix for the operations created when applying gradients. Defaults to "ConditionalGradient" """ - super(ConditionalGradient, self).__init__(name=name) + super(ConditionalGradient, self).__init__(name=name, **kwargs) self._set_hyper("learning_rate", learning_rate) self._set_hyper("lamda", lamda) + self._set_hyper("use_locking", use_locking) def get_config(self): config = { @@ -74,13 +76,14 @@ def _prepare(self, var_list): if callable(lamda): lamda = lamda() self._lamda_tensor = tf.convert_to_tensor(lamda, name="lamda") + return super(ConditionalGradient, self)._prepare(var_list) def _resource_apply_dense(self, grad, var): def frobenius_norm(m): return tf.math.reduce_sum(m**2)**0.5 - norm = tf.convert_to_tensor(frobenius_norm(grad), name="norm") - norm = tf.dtypes.cast(norm, var.dtype.base_dtype) + norm = tf.convert_to_tensor( + frobenius_norm(grad), name="norm", dtype=var.dtype.base_dtype) lr = tf.dtypes.cast(self._learning_rate_tensor, var.dtype.base_dtype) lamda = tf.dtypes.cast(self._lamda_tensor, var.dtype.base_dtype) var_update_tensor = ( @@ -97,8 +100,8 @@ def _resource_apply_sparse(self, grad, var, indices): def frobenius_norm(m): return tf.reduce_sum(m**2)**0.5 - norm = tf.convert_to_tensor(frobenius_norm(grad), name="norm") - norm = tf.dtypes.cast(norm, var.dtype.base_dtype) + norm = tf.convert_to_tensor( + frobenius_norm(grad), name="norm", dtype=var.dtype.base_dtype) lr = tf.dtypes.cast(self._learning_rate_tensor, var.dtype.base_dtype) lamda = tf.dtypes.cast(self._lamda_tensor, var.dtype.base_dtype) var_slice = tf.gather(var, indices) diff --git a/tensorflow_addons/optimizers/conditional_gradient_test.py b/tensorflow_addons/optimizers/conditional_gradient_test.py index 0cafd6f72b..d2c96ad161 100644 --- a/tensorflow_addons/optimizers/conditional_gradient_test.py +++ b/tensorflow_addons/optimizers/conditional_gradient_test.py @@ -105,8 +105,7 @@ def doTestBasic(self, use_resource=False, use_callable_params=False): - (1 - 0.5) * 0.01 * 0.01 / norm1]), self.evaluate(var1)) - #@test_utils.run_in_graph_and_eager_modes(reset_test=True) - #? + @test_utils.run_in_graph_and_eager_modes(reset_test=True) def testBasic(self): with self.cached_session(): self.doTestBasic(use_resource=False) @@ -115,12 +114,11 @@ def testBasic(self): def testResourceBasic(self): self.doTestBasic(use_resource=True) + @test_utils.run_in_graph_and_eager_modes(reset_test=True) def testBasicCallableParams(self): - #with tf.enable_eager_execution(): self.doTestBasic(use_resource=True, use_callable_params=True) - #@test_utils.run_in_graph_and_eager_modes(reset_test=True) - #? + @test_utils.run_in_graph_and_eager_modes(reset_test=True) def testVariablesAcrossGraphs(self): optimizer = cg_lib.ConditionalGradient(0.01, 0.5) with tf.Graph().as_default(): @@ -242,7 +240,7 @@ def loss(): learning_rate * 1 - (1 - learning_rate) * lamda * 1 / norm0 ]], self.evaluate(var0)) - @test_utils.run_deprecated_v1 + @test_utils.run_in_graph_and_eager_modes(reset_test=True) def testTensorLearningRateAndConditionalGradient(self): for dtype in [tf.half, tf.float32, tf.float64]: with self.cached_session(): @@ -256,7 +254,11 @@ def testTensorLearningRateAndConditionalGradient(self): learning_rate=tf.constant(0.5), lamda=tf.constant(0.01)) cg_update = cg_opt.apply_gradients( zip([grads0, grads1], [var0, var1])) - self.evaluate(tf.compat.v1.global_variables_initializer()) + if not tf.executing_eagerly(): + self.evaluate(tf.compat.v1.global_variables_initializer()) + # Fetch params to validate initial values + self.assertAllClose([1.0, 2.0], self.evaluate(var0)) + self.assertAllClose([3.0, 4.0], self.evaluate(var1)) # Check we have slots self.assertEqual(["conditional_gradient"], @@ -272,11 +274,8 @@ def testTensorLearningRateAndConditionalGradient(self): self.assertFalse( slot1 in tf.compat.v1.trainable_variables()) - # Fetch params to validate initial values - self.assertAllClose([1.0, 2.0], self.evaluate(var0)) - self.assertAllClose([3.0, 4.0], self.evaluate(var1)) - - cg_update.run() + if not tf.executing_eagerly(): + self.evaluate(cg_update) # Check that the parameters have been updated. norm0 = self.evaluate(norm0) norm1 = self.evaluate(norm1) @@ -292,7 +291,10 @@ def testTensorLearningRateAndConditionalGradient(self): ]), self.evaluate(var1)) # Step 2: the conditional_gradient contain the # previous update. - cg_update.run() + if tf.executing_eagerly(): + cg_opt.apply_gradients(zip([grads0, grads1], [var0, var1])) + else: + self.evaluate(cg_update) # Check that the parameters have been updated. self.assertAllCloseAccordingToType( np.array( @@ -411,7 +413,7 @@ def _dbParamsCG01(self): # pylint: enable=line-too-long return db_grad, db_out - @test_utils.run_deprecated_v1 + @test_utils.run_in_graph_and_eager_modes(reset_test=True) def testLikeDistBeliefCG01(self): with self.cached_session(): db_grad, db_out = self._dbParamsCG01() @@ -419,15 +421,23 @@ def testLikeDistBeliefCG01(self): var0 = tf.Variable([0.0] * num_samples) grads0 = tf.constant([0.0] * num_samples) cg_opt = cg_lib.ConditionalGradient(learning_rate=0.1, lamda=0.1) - cg_update = cg_opt.apply_gradients(zip([grads0], [var0])) - self.evaluate(tf.compat.v1.global_variables_initializer()) + if not tf.executing_eagerly(): + cg_update = cg_opt.apply_gradients(zip([grads0], [var0])) + self.evaluate(tf.compat.v1.global_variables_initializer()) + for i in xrange(num_samples): - cg_update.run(feed_dict={grads0: db_grad[i]}) + if tf.executing_eagerly(): + grads0 = tf.constant(db_grad[i]) + cg_opt.apply_gradients(zip([grads0], [var0])) + else: + cg_update.run(feed_dict={grads0: db_grad[i]}) self.assertAllClose(np.array(db_out[i]), self.evaluate(var0)) - @test_utils.run_deprecated_v1 + @test_utils.run_in_graph_and_eager_modes(reset_test=True) def testSparse(self): - for dtype in [tf.half, tf.float32, tf.float64]: + # TODO: + # To address the issue #347. + for dtype in self._DtypesToTest(use_gpu=tf.test.is_gpu_available()): with self.cached_session(): var0 = tf.Variable(tf.zeros([4, 2], dtype=dtype)) var1 = tf.Variable(tf.constant(1.0, dtype, [4, 2])) @@ -447,7 +457,13 @@ def testSparse(self): learning_rate=learning_rate, lamda=lamda) cg_update = cg_opt.apply_gradients( zip([grads0, grads1], [var0, var1])) - self.evaluate(tf.compat.v1.global_variables_initializer()) + + if not tf.executing_eagerly(): + self.evaluate(tf.compat.v1.global_variables_initializer()) + # Fetch params to validate initial values + self.assertAllClose([0, 0], self.evaluate(var0)[0]) + self.assertAllClose([0, 0], self.evaluate(var0)[1]) + self.assertAllClose([1, 1], self.evaluate(var1)[2]) # Check we have slots self.assertEqual(["conditional_gradient"], @@ -463,13 +479,9 @@ def testSparse(self): self.assertFalse( slot1 in tf.compat.v1.trainable_variables()) - # Fetch params to validate initial values - self.assertAllClose([0, 0], self.evaluate(var0)[0]) - self.assertAllClose([0, 0], self.evaluate(var0)[1]) - self.assertAllClose([1, 1], self.evaluate(var1)[2]) - # Step 1: - cg_update.run() + if not tf.executing_eagerly(): + self.evaluate(cg_update) # Check that the parameters have been updated. norm0 = self.evaluate(norm0) norm1 = self.evaluate(norm1) @@ -495,7 +507,10 @@ def testSparse(self): self.evaluate(var1)[2]) # Step 2: the conditional_gradient contain the # previous update. - cg_update.run() + if tf.executing_eagerly(): + cg_opt.apply_gradients(zip([grads0, grads1], [var0, var1])) + else: + self.evaluate(cg_update) # Check that the parameters have been updated. self.assertAllClose(np.array([0, 0]), self.evaluate(var0)[0]) self.assertAllCloseAccordingToType( @@ -517,7 +532,7 @@ def testSparse(self): (1 - learning_rate) * lamda * 0.01 / norm1]), self.evaluate(var1)[2]) - @test_utils.run_deprecated_v1 + @test_utils.run_in_graph_and_eager_modes(reset_test=True) def testSharing(self): for dtype in [tf.half, tf.float32, tf.float64]: with self.cached_session(): @@ -535,7 +550,11 @@ def testSharing(self): zip([grads0, grads1], [var0, var1])) cg_update2 = cg_opt.apply_gradients( zip([grads0, grads1], [var0, var1])) - self.evaluate(tf.compat.v1.global_variables_initializer()) + if not tf.executing_eagerly(): + self.evaluate(tf.compat.v1.global_variables_initializer()) + # Fetch params to validate initial values + self.assertAllClose([1.0, 2.0], self.evaluate(var0)) + self.assertAllClose([3.0, 4.0], self.evaluate(var1)) # Check we have slots self.assertEqual(["conditional_gradient"], @@ -550,34 +569,34 @@ def testSharing(self): slot0 in tf.compat.v1.trainable_variables()) self.assertFalse( slot1 in tf.compat.v1.trainable_variables()) - - # Fetch params to validate initial values - self.assertAllClose([1.0, 2.0], self.evaluate(var0)) - self.assertAllClose([3.0, 4.0], self.evaluate(var1)) - - cg_update1.run() - # Check that the parameters have been updated. - norm0 = self.evaluate(norm0) - norm1 = self.evaluate(norm1) - self.assertAllCloseAccordingToType( - np.array([ - 1.0 * learning_rate - - (1 - learning_rate) * lamda * 0.1 / norm0, - 2.0 * learning_rate - - (1 - learning_rate) * lamda * 0.1 / norm0 - ]), self.evaluate(var0)) - self.assertAllCloseAccordingToType( - np.array([ - 3.0 * learning_rate - - (1 - learning_rate) * lamda * 0.01 / norm1, - 4.0 * learning_rate - - (1 - learning_rate) * lamda * 0.01 / norm1 - ]), self.evaluate(var1)) + # Because in the eager mode, as we declare two cg_update variables, + # it already altomatically finish executing them. Thus, we cannot + # test the param value at this time for eager mode. We can only test + # the final value of param after the second execution. + if not tf.executing_eagerly(): + self.evaluate(cg_update1) + # Check that the parameters have been updated. + norm0 = self.evaluate(norm0) + norm1 = self.evaluate(norm1) + self.assertAllCloseAccordingToType( + np.array([ + 1.0 * learning_rate - + (1 - learning_rate) * lamda * 0.1 / norm0, + 2.0 * learning_rate - + (1 - learning_rate) * lamda * 0.1 / norm0 + ]), self.evaluate(var0)) + self.assertAllCloseAccordingToType( + np.array([ + 3.0 * learning_rate - + (1 - learning_rate) * lamda * 0.01 / norm1, + 4.0 * learning_rate - + (1 - learning_rate) * lamda * 0.01 / norm1 + ]), self.evaluate(var1)) # Step 2: the second conditional_gradient contain # the previous update. - cg_update2.run() - + if not tf.executing_eagerly(): + self.evaluate(cg_update2) # Check that the parameters have been updated. self.assertAllCloseAccordingToType( np.array([(1.0 * learning_rate - From 953fa39bd19aabf04615c82c86b4032cb28a3826 Mon Sep 17 00:00:00 2001 From: pkan2 Date: Sun, 8 Sep 2019 17:43:50 -0500 Subject: [PATCH 15/20] add CG optimizer --- CONTRIBUTING.md | 10 +- README.md | 9 +- .../crosstool_wrapper_driver_is_not_gcc.tpl | 0 build_deps/gpu/cuda_configure.bzl | 8 +- build_deps/requirements.txt | 3 +- build_deps/requirements_gpu.txt | 3 +- build_deps/tf_dependency/tf_configure.bzl | 2 +- .../gcc7_manylinux2010-nvcc-cuda10.0/BUILD | 115 ++ .../cc_toolchain_config.bzl | 1493 +++++++++++++++++ .../bin/crosstool_wrapper_driver_is_not_gcc | 268 +++ build_pip_pkg.sh | 1 + configure.sh | 22 +- examples/image_ops.ipynb | 601 +++++++ examples/layers_normalizations.ipynb | 4 +- examples/layers_weightnormalization.ipynb | 4 +- examples/losses_triplet.ipynb | 6 +- examples/optimizers_lazyadam.ipynb | 4 +- examples/template.ipynb | 6 +- makefile | 1 - setup.py | 39 +- tensorflow_addons/__init__.py | 58 +- tensorflow_addons/activations/BUILD | 19 +- tensorflow_addons/activations/README.md | 14 +- tensorflow_addons/activations/__init__.py | 1 + tensorflow_addons/activations/gelu.py | 55 + tensorflow_addons/activations/gelu_test.py | 106 ++ tensorflow_addons/activations/sparsemax.py | 2 +- .../activations/sparsemax_test.py | 14 + .../custom_ops/activations/BUILD | 47 + .../activations/cc/kernels/gelu_op.cc | 77 + .../activations/cc/kernels/gelu_op.h | 144 ++ .../activations/cc/kernels/gelu_op_gpu.cu.cc | 36 + .../custom_ops/activations/cc/ops/gelu_op.cc | 37 + tensorflow_addons/custom_ops/image/BUILD | 29 +- .../image/cc/kernels/connected_components.cc | 138 ++ .../image/cc/kernels/connected_components.h | 305 ++++ .../euclidean_distance_transform_op.cc | 6 +- .../kernels/euclidean_distance_transform_op.h | 14 +- .../euclidean_distance_transform_op_gpu.cu.cc | 40 + .../custom_ops/image/cc/ops/image_ops.cc | 31 +- tensorflow_addons/custom_ops/layers/BUILD | 3 +- .../layers/cc/kernels/correlation_cost_op.cc | 2 +- tensorflow_addons/image/BUILD | 15 +- tensorflow_addons/image/README.md | 3 + tensorflow_addons/image/__init__.py | 1 + .../image/connected_components.py | 96 ++ .../image/connected_components_test.py | 157 ++ .../image/interpolate_spline_test.py | 234 ++- .../image/sparse_image_warp_test.py | 150 +- tensorflow_addons/layers/BUILD | 14 + tensorflow_addons/layers/README.md | 2 + tensorflow_addons/layers/__init__.py | 3 +- tensorflow_addons/layers/gelu.py | 57 + tensorflow_addons/layers/gelu_test.py | 39 + tensorflow_addons/layers/optical_flow_test.py | 2 +- tensorflow_addons/layers/sparsemax_test.py | 6 +- tensorflow_addons/layers/wrappers.py | 123 +- tensorflow_addons/layers/wrappers_test.py | 114 +- tensorflow_addons/losses/README.md | 1 + tensorflow_addons/losses/__init__.py | 2 +- tensorflow_addons/losses/npairs.py | 110 ++ tensorflow_addons/losses/npairs_test.py | 79 + tensorflow_addons/losses/sparsemax_loss.py | 2 +- .../losses/sparsemax_loss_test.py | 6 + tensorflow_addons/metrics/cohens_kappa.py | 18 +- tensorflow_addons/metrics/f1_test.py | 19 + tensorflow_addons/metrics/f_scores.py | 6 +- tensorflow_addons/metrics/fbeta_test.py | 19 + tensorflow_addons/optimizers/BUILD | 0 tensorflow_addons/optimizers/README.md | 0 tensorflow_addons/optimizers/__init__.py | 0 .../optimizers/lazy_adam_test.py | 2 +- tensorflow_addons/rnn/cell_test.py | 4 +- .../seq2seq/attention_wrapper.py | 6 +- .../seq2seq/attention_wrapper_test.py | 50 +- tensorflow_addons/text/crf.py | 5 +- tensorflow_addons/text/crf_test.py | 6 + tensorflow_addons/version.py | 2 +- tools/ci_build/builds/release_linux.sh | 12 +- tools/ci_build/builds/release_macos.sh | 12 +- .../builds/tf_auditwheel_patch.sh} | 15 +- tools/ci_build/builds/wheel_verify.sh | 14 +- tools/ci_build/ci_sanity.sh | 8 +- .../verify/check_file_name.py} | 4 +- .../verify/check_futures.py} | 2 +- tools/ci_testing/addons_gpu.sh | 1 + tools/ci_testing/install_py36.sh | 31 - tools/run_docker.sh | 8 +- 88 files changed, 4673 insertions(+), 564 deletions(-) mode change 100755 => 100644 build_deps/gpu/crosstool/clang/bin/crosstool_wrapper_driver_is_not_gcc.tpl create mode 100755 build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0/BUILD create mode 100755 build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0/cc_toolchain_config.bzl create mode 100755 build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0/clang/bin/crosstool_wrapper_driver_is_not_gcc create mode 100644 examples/image_ops.ipynb create mode 100644 tensorflow_addons/activations/gelu.py create mode 100644 tensorflow_addons/activations/gelu_test.py create mode 100644 tensorflow_addons/custom_ops/activations/BUILD create mode 100644 tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op.cc create mode 100644 tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op.h create mode 100644 tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op_gpu.cu.cc create mode 100644 tensorflow_addons/custom_ops/activations/cc/ops/gelu_op.cc create mode 100644 tensorflow_addons/custom_ops/image/cc/kernels/connected_components.cc create mode 100644 tensorflow_addons/custom_ops/image/cc/kernels/connected_components.h create mode 100644 tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op_gpu.cu.cc create mode 100644 tensorflow_addons/image/connected_components.py create mode 100644 tensorflow_addons/image/connected_components_test.py create mode 100644 tensorflow_addons/layers/gelu.py create mode 100644 tensorflow_addons/layers/gelu_test.py mode change 100644 => 100755 tensorflow_addons/optimizers/BUILD mode change 100644 => 100755 tensorflow_addons/optimizers/README.md mode change 100644 => 100755 tensorflow_addons/optimizers/__init__.py mode change 100644 => 100755 tools/ci_build/builds/release_macos.sh rename tools/{ci_testing/run_tests.sh => ci_build/builds/tf_auditwheel_patch.sh} (71%) rename tools/{test/file_name_test.py => ci_build/verify/check_file_name.py} (95%) rename tools/{test/check_futures_test.py => ci_build/verify/check_futures.py} (99%) delete mode 100755 tools/ci_testing/install_py36.sh diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 7bbfab997a..3aae3caf1f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -52,8 +52,8 @@ Try these useful commands below: * Format code automatically: `bash tools/run_docker.sh -c 'make code-format'` * Run sanity check: `bash tools/run_docker.sh -c 'make sanity-check'` * Run CPU unit tests: `bash tools/run_docker.sh -c 'make unit-test'` -* Run GPU unit tests: `bash tools/run_docker.sh -c 'make gpu-unit-test'` -* All of the above: `bash tools/run_docker.sh -c 'make'` +* Run GPU unit tests: `bash tools/run_docker.sh -d gpu -c 'make gpu-unit-test'` +* All of the above: `bash tools/run_docker.sh -d gpu -c 'make'` ## Coding style @@ -76,8 +76,7 @@ bash tools/run_docker.sh -c 'make unit-test' or run manually: ```bash -docker run --rm -it -v ${PWD}:/addons -w /addons tensorflow/tensorflow:nightly-custom-op /bin/bash - +docker run --rm -it -v ${PWD}:/addons -w /addons tensorflow/tensorflow:custom-op-ubuntu16 /bin/bash ./configure.sh # Links project with TensorFlow dependency bazel test -c opt -k \ @@ -94,11 +93,12 @@ bash tools/run_docker.sh -d gpu -c 'make gpu-unit-test' or run manually: ```bash -docker run --runtime=nvidia --rm -it -v ${PWD}:/addons -w /addons tensorflow/tensorflow:custom-op-gpu /bin/bash +docker run --runtime=nvidia --rm -it -v ${PWD}:/addons -w /addons tensorflow/tensorflow:custom-op-gpu-ubuntu16 /bin/bash ./configure.sh # Links project with TensorFlow dependency bazel test -c opt -k \ --test_timeout 300,450,1200,3600 \ +--crosstool_top=//build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0:toolchain \ --test_output=all \ --jobs=1 \ //tensorflow_addons/... diff --git a/README.md b/README.md index 1b53efd48d..ecabd4ae65 100644 --- a/README.md +++ b/README.md @@ -47,7 +47,6 @@ To install the latest version, run the following: pip install tensorflow-addons ``` -**Note:** You will also need [`tensorflow==2.0.0-beta1`](https://www.tensorflow.org/beta) installed. To use addons: @@ -73,6 +72,9 @@ https://bazel.build/) build system. git clone https://github.com/tensorflow/addons.git cd addons +# If building GPU Ops (Requires CUDA 10.0 and CuDNN 7) +export TF_NEED_CUDA=1 + # This script links project with TensorFlow dependency ./configure.sh @@ -93,6 +95,11 @@ User experience and project maintainability are core concepts in TF-Addons. In order to achieve these we require that our additions conform to established API patterns seen in core TensorFlow. +#### GPU/CPU Custom-Ops +A major benefit of TensorFlow Addons is that there are precompiled ops. Should +a CUDA 10 installation not be found then the op will automatically fall back to +a CPU implementation. + #### Proxy Maintainership Addons has been designed to compartmentalize subpackages and submodules so that they can be maintained by users who have expertise and a vested interest diff --git a/build_deps/gpu/crosstool/clang/bin/crosstool_wrapper_driver_is_not_gcc.tpl b/build_deps/gpu/crosstool/clang/bin/crosstool_wrapper_driver_is_not_gcc.tpl old mode 100755 new mode 100644 diff --git a/build_deps/gpu/cuda_configure.bzl b/build_deps/gpu/cuda_configure.bzl index 475a1281da..f996eeb08a 100644 --- a/build_deps/gpu/cuda_configure.bzl +++ b/build_deps/gpu/cuda_configure.bzl @@ -174,7 +174,7 @@ def _get_win_cuda_defines(repository_ctx): # If we are not on Windows, return empty vaules for Windows specific fields. # This ensures the CROSSTOOL file parser is happy. if not _is_windows(repository_ctx): - return { + return dict({ "%{msvc_env_tmp}": "", "%{msvc_env_path}": "", "%{msvc_env_include}": "", @@ -184,7 +184,7 @@ def _get_win_cuda_defines(repository_ctx): "%{msvc_link_path}": "", "%{msvc_lib_path}": "", "%{cxx_builtin_include_directory}": "", - } + }) vc_path = find_vc_path(repository_ctx) if not vc_path: @@ -957,6 +957,8 @@ def _get_cuda_config(repository_ctx): ) def _tpl(repository_ctx, tpl, substitutions = {}, out = None): + if substitutions == None: + substitutions = {} if not out: out = tpl.replace(":", "/") repository_ctx.template( @@ -1301,7 +1303,7 @@ def _create_local_cuda_repository(repository_ctx): _tpl( repository_ctx, "crosstool:CROSSTOOL", - cuda_defines + _get_win_cuda_defines(repository_ctx), + cuda_defines.update(_get_win_cuda_defines(repository_ctx)), out = "crosstool/CROSSTOOL", ) diff --git a/build_deps/requirements.txt b/build_deps/requirements.txt index 332a109199..fda98be524 100644 --- a/build_deps/requirements.txt +++ b/build_deps/requirements.txt @@ -1 +1,2 @@ -tf-nightly-2.0-preview==2.0.0.dev20190731 +# TensorFlow greater than this date is manylinux2010 compliant +tf-nightly-2.0-preview>=2.0.0.dev20190802 diff --git a/build_deps/requirements_gpu.txt b/build_deps/requirements_gpu.txt index 24d74c5f53..e0f02a4f63 100644 --- a/build_deps/requirements_gpu.txt +++ b/build_deps/requirements_gpu.txt @@ -1 +1,2 @@ -tf-nightly-gpu-2.0-preview==2.0.0.dev20190731 +# TensorFlow greater than this date is manylinux2010 compliant +tf-nightly-gpu-2.0-preview>=2.0.0.dev20190802 diff --git a/build_deps/tf_dependency/tf_configure.bzl b/build_deps/tf_dependency/tf_configure.bzl index fc187ee552..485773b938 100644 --- a/build_deps/tf_dependency/tf_configure.bzl +++ b/build_deps/tf_dependency/tf_configure.bzl @@ -168,7 +168,7 @@ def _symlink_genrule_for_dir( # Copy the headers to create a sandboxable setup. cmd = "cp -f" - command.append(cmd + ' "%s" "%s"' % (src_files[i], dest)) + command.append(cmd + ' "%s" "%s" | true' % (src_files[i], dest)) outs.append(' "' + dest_dir + dest_files[i] + '",') genrule = _genrule( genrule_name, diff --git a/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0/BUILD b/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0/BUILD new file mode 100755 index 0000000000..234bd0ed92 --- /dev/null +++ b/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0/BUILD @@ -0,0 +1,115 @@ +# This file is expanded from a template by cuda_configure.bzl +# Update cuda_configure.bzl#verify_build_defines when adding new variables. + +load(":cc_toolchain_config.bzl", "cc_toolchain_config") + +licenses(["restricted"]) + +package(default_visibility = ["//visibility:public"]) + +toolchain( + name = "toolchain-linux-x86_64", + exec_compatible_with = [ + "@bazel_tools//platforms:linux", + "@bazel_tools//platforms:x86_64", + ], + target_compatible_with = [ + "@bazel_tools//platforms:linux", + "@bazel_tools//platforms:x86_64", + ], + toolchain = ":cc-compiler-local", + toolchain_type = "@bazel_tools//tools/cpp:toolchain_type", +) + +cc_toolchain_suite( + name = "toolchain", + toolchains = { + "local|compiler": ":cc-compiler-local", + "darwin|compiler": ":cc-compiler-darwin", + "k8": ":cc-compiler-local", + "darwin": ":cc-compiler-darwin", + }, +) + +cc_toolchain( + name = "cc-compiler-local", + all_files = ":crosstool_wrapper_driver_is_not_gcc", + compiler_files = ":empty", + dwp_files = ":empty", + linker_files = ":crosstool_wrapper_driver_is_not_gcc", + objcopy_files = ":empty", + strip_files = ":empty", + # To support linker flags that need to go to the start of command line + # we need the toolchain to support parameter files. Parameter files are + # last on the command line and contain all shared libraries to link, so all + # regular options will be left of them. + supports_param_files = 1, + toolchain_config = ":cc-compiler-local-config", + toolchain_identifier = "local_linux", +) + +cc_toolchain_config( + name = "cc-compiler-local-config", + builtin_include_directories = [ + "/dt7/usr/include/c++/7", + "/dt7/usr/include/c++/7/x86_64-pc-linux-gnu", + "/dt7/usr/include/c++/7/backward", + "/dt7/usr/lib/gcc/x86_64-pc-linux-gnu/7/include", + "/dt7/usr/lib/gcc/x86_64-pc-linux-gnu/7/include-fixed", + "/dt7/usr/include", + "/usr/local/cuda-10.0/targets/x86_64-linux/include", + "/usr/local/cuda-10.0/include", + "/usr/local/cuda-10.0/extras/CUPTI/include", + "/usr/include", + ], + cpu = "local", + extra_no_canonical_prefixes_flags = ["-fno-canonical-system-headers"], + host_compiler_path = "clang/bin/crosstool_wrapper_driver_is_not_gcc", + host_compiler_prefix = "/usr/bin", + host_compiler_warnings = [], + host_unfiltered_compile_flags = [], + linker_bin_path = "/usr/bin", +) + +cc_toolchain( + name = "cc-compiler-darwin", + all_files = ":crosstool_wrapper_driver_is_not_gcc", + compiler_files = ":empty", + dwp_files = ":empty", + linker_files = ":crosstool_wrapper_driver_is_not_gcc", + objcopy_files = ":empty", + strip_files = ":empty", + supports_param_files = 0, + toolchain_config = ":cc-compiler-local-darwin", + toolchain_identifier = "local_darwin", +) + +cc_toolchain_config( + name = "cc-compiler-local-darwin", + builtin_include_directories = [ + "/dt7/usr/include/c++/7", + "/dt7/usr/include/c++/7/x86_64-pc-linux-gnu", + "/dt7/usr/include/c++/7/backward", + "/dt7/usr/lib/gcc/x86_64-pc-linux-gnu/7/include", + "/dt7/usr/lib/gcc/x86_64-pc-linux-gnu/7/include-fixed", + "/dt7/usr/include", + "/usr/include", + ], + cpu = "darwin", + extra_no_canonical_prefixes_flags = ["-fno-canonical-system-headers"], + host_compiler_path = "clang/bin/crosstool_wrapper_driver_is_not_gcc", + host_compiler_prefix = "/usr/bin", + host_compiler_warnings = [], + host_unfiltered_compile_flags = [], + linker_bin_path = "/usr/bin", +) + +filegroup( + name = "empty", + srcs = [], +) + +filegroup( + name = "crosstool_wrapper_driver_is_not_gcc", + srcs = ["clang/bin/crosstool_wrapper_driver_is_not_gcc"], +) diff --git a/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0/cc_toolchain_config.bzl b/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0/cc_toolchain_config.bzl new file mode 100755 index 0000000000..ba002b4543 --- /dev/null +++ b/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0/cc_toolchain_config.bzl @@ -0,0 +1,1493 @@ +"""cc_toolchain_config rule for configuring CUDA toolchains on Linux, Mac, and Windows.""" + +load( + "@bazel_tools//tools/cpp:cc_toolchain_config_lib.bzl", + "action_config", + "env_entry", + "env_set", + "feature", + "feature_set", + "flag_group", + "flag_set", + "tool", + "tool_path", + "variable_with_value", +) +load( + "@bazel_tools//tools/build_defs/cc:action_names.bzl", + "ASSEMBLE_ACTION_NAME", + "CC_FLAGS_MAKE_VARIABLE_ACTION_NAME", + "CLIF_MATCH_ACTION_NAME", + "CPP_COMPILE_ACTION_NAME", + "CPP_HEADER_PARSING_ACTION_NAME", + "CPP_LINK_DYNAMIC_LIBRARY_ACTION_NAME", + "CPP_LINK_EXECUTABLE_ACTION_NAME", + "CPP_LINK_NODEPS_DYNAMIC_LIBRARY_ACTION_NAME", + "CPP_LINK_STATIC_LIBRARY_ACTION_NAME", + "CPP_MODULE_CODEGEN_ACTION_NAME", + "CPP_MODULE_COMPILE_ACTION_NAME", + "C_COMPILE_ACTION_NAME", + "LINKSTAMP_COMPILE_ACTION_NAME", + "LTO_BACKEND_ACTION_NAME", + "LTO_INDEXING_ACTION_NAME", + "OBJCPP_COMPILE_ACTION_NAME", + "OBJCPP_EXECUTABLE_ACTION_NAME", + "OBJC_ARCHIVE_ACTION_NAME", + "OBJC_COMPILE_ACTION_NAME", + "OBJC_EXECUTABLE_ACTION_NAME", + "OBJC_FULLY_LINK_ACTION_NAME", + "PREPROCESS_ASSEMBLE_ACTION_NAME", + "STRIP_ACTION_NAME", +) + +ACTION_NAMES = struct( + assemble = ASSEMBLE_ACTION_NAME, + c_compile = C_COMPILE_ACTION_NAME, + cc_flags_make_variable = CC_FLAGS_MAKE_VARIABLE_ACTION_NAME, + clif_match = CLIF_MATCH_ACTION_NAME, + cpp_compile = CPP_COMPILE_ACTION_NAME, + cpp_header_parsing = CPP_HEADER_PARSING_ACTION_NAME, + cpp_link_dynamic_library = CPP_LINK_DYNAMIC_LIBRARY_ACTION_NAME, + cpp_link_executable = CPP_LINK_EXECUTABLE_ACTION_NAME, + cpp_link_nodeps_dynamic_library = CPP_LINK_NODEPS_DYNAMIC_LIBRARY_ACTION_NAME, + cpp_link_static_library = CPP_LINK_STATIC_LIBRARY_ACTION_NAME, + cpp_module_codegen = CPP_MODULE_CODEGEN_ACTION_NAME, + cpp_module_compile = CPP_MODULE_COMPILE_ACTION_NAME, + ld_embed_data = "ld_embed_data", + linkstamp_compile = LINKSTAMP_COMPILE_ACTION_NAME, + lto_backend = LTO_BACKEND_ACTION_NAME, + lto_indexing = LTO_INDEXING_ACTION_NAME, + objc_archive = OBJC_ARCHIVE_ACTION_NAME, + objc_compile = OBJC_COMPILE_ACTION_NAME, + objc_executable = OBJC_EXECUTABLE_ACTION_NAME, + objc_fully_link = OBJC_FULLY_LINK_ACTION_NAME, + objcopy_embed_data = "objcopy_embed_data", + objcpp_compile = OBJCPP_COMPILE_ACTION_NAME, + objcpp_executable = OBJCPP_EXECUTABLE_ACTION_NAME, + preprocess_assemble = PREPROCESS_ASSEMBLE_ACTION_NAME, + strip = STRIP_ACTION_NAME, +) + +def _impl(ctx): + if (ctx.attr.cpu == "darwin"): + toolchain_identifier = "local_darwin" + elif (ctx.attr.cpu == "local"): + toolchain_identifier = "local_linux" + elif (ctx.attr.cpu == "x64_windows"): + toolchain_identifier = "local_windows" + else: + fail("Unreachable") + + host_system_name = "local" + + target_system_name = "local" + + if (ctx.attr.cpu == "darwin"): + target_cpu = "darwin" + elif (ctx.attr.cpu == "local"): + target_cpu = "local" + elif (ctx.attr.cpu == "x64_windows"): + target_cpu = "x64_windows" + else: + fail("Unreachable") + + if (ctx.attr.cpu == "local"): + target_libc = "local" + elif (ctx.attr.cpu == "darwin"): + target_libc = "macosx" + elif (ctx.attr.cpu == "x64_windows"): + target_libc = "msvcrt" + else: + fail("Unreachable") + + if (ctx.attr.cpu == "darwin" or + ctx.attr.cpu == "local"): + compiler = "compiler" + elif (ctx.attr.cpu == "x64_windows"): + compiler = "msvc-cl" + else: + fail("Unreachable") + + abi_version = "local" + + abi_libc_version = "local" + + cc_target_os = None + + builtin_sysroot = None + + all_link_actions = [ + ACTION_NAMES.cpp_link_executable, + ACTION_NAMES.cpp_link_dynamic_library, + ACTION_NAMES.cpp_link_nodeps_dynamic_library, + ] + + cpp_link_dynamic_library_action = action_config( + action_name = ACTION_NAMES.cpp_link_dynamic_library, + implies = [ + "nologo", + "shared_flag", + "linkstamps", + "output_execpath_flags", + "input_param_flags", + "user_link_flags", + "linker_subsystem_flag", + "linker_param_file", + "msvc_env", + "no_stripping", + "has_configured_linker_path", + "def_file", + ], + tools = [tool(path = ctx.attr.msvc_link_path)], + ) + + cpp_link_nodeps_dynamic_library_action = action_config( + action_name = ACTION_NAMES.cpp_link_nodeps_dynamic_library, + implies = [ + "nologo", + "shared_flag", + "linkstamps", + "output_execpath_flags", + "input_param_flags", + "user_link_flags", + "linker_subsystem_flag", + "linker_param_file", + "msvc_env", + "no_stripping", + "has_configured_linker_path", + "def_file", + ], + tools = [tool(path = ctx.attr.msvc_link_path)], + ) + + cpp_link_static_library_action = action_config( + action_name = ACTION_NAMES.cpp_link_static_library, + implies = [ + "nologo", + "archiver_flags", + "input_param_flags", + "linker_param_file", + "msvc_env", + ], + tools = [tool(path = ctx.attr.msvc_lib_path)], + ) + + assemble_action = action_config( + action_name = ACTION_NAMES.assemble, + implies = [ + "compiler_input_flags", + "compiler_output_flags", + "nologo", + "msvc_env", + "sysroot", + ], + tools = [tool(path = ctx.attr.msvc_ml_path)], + ) + + preprocess_assemble_action = action_config( + action_name = ACTION_NAMES.preprocess_assemble, + implies = [ + "compiler_input_flags", + "compiler_output_flags", + "nologo", + "msvc_env", + "sysroot", + ], + tools = [tool(path = ctx.attr.msvc_ml_path)], + ) + + c_compile_action = action_config( + action_name = ACTION_NAMES.c_compile, + implies = [ + "compiler_input_flags", + "compiler_output_flags", + "nologo", + "msvc_env", + "parse_showincludes", + "user_compile_flags", + "sysroot", + "unfiltered_compile_flags", + ], + tools = [tool(path = ctx.attr.msvc_cl_path)], + ) + + cpp_compile_action = action_config( + action_name = ACTION_NAMES.cpp_compile, + implies = [ + "compiler_input_flags", + "compiler_output_flags", + "nologo", + "msvc_env", + "parse_showincludes", + "user_compile_flags", + "sysroot", + "unfiltered_compile_flags", + ], + tools = [tool(path = ctx.attr.msvc_cl_path)], + ) + + cpp_link_executable_action = action_config( + action_name = ACTION_NAMES.cpp_link_executable, + implies = [ + "nologo", + "linkstamps", + "output_execpath_flags", + "input_param_flags", + "user_link_flags", + "linker_subsystem_flag", + "linker_param_file", + "msvc_env", + "no_stripping", + ], + tools = [tool(path = ctx.attr.msvc_link_path)], + ) + + if (ctx.attr.cpu == "darwin" or + ctx.attr.cpu == "local"): + action_configs = [] + elif (ctx.attr.cpu == "x64_windows"): + action_configs = [ + assemble_action, + preprocess_assemble_action, + c_compile_action, + cpp_compile_action, + cpp_link_executable_action, + cpp_link_dynamic_library_action, + cpp_link_nodeps_dynamic_library_action, + cpp_link_static_library_action, + ] + else: + fail("Unreachable") + + no_windows_export_all_symbols_feature = feature(name = "no_windows_export_all_symbols") + + pic_feature = feature( + name = "pic", + enabled = True, + flag_sets = [ + flag_set( + actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], + flag_groups = [ + flag_group(flags = ["-fPIC"], expand_if_available = "pic"), + flag_group( + flags = ["-fPIE"], + expand_if_not_available = "pic", + ), + ], + ), + ], + ) + + preprocessor_defines_feature = feature( + name = "preprocessor_defines", + enabled = True, + flag_sets = [ + flag_set( + actions = [ + ACTION_NAMES.assemble, + ACTION_NAMES.preprocess_assemble, + ACTION_NAMES.c_compile, + ACTION_NAMES.cpp_compile, + ACTION_NAMES.cpp_header_parsing, + ACTION_NAMES.cpp_module_compile, + ], + flag_groups = [ + flag_group( + flags = ["/D%{preprocessor_defines}"], + iterate_over = "preprocessor_defines", + ), + ], + ), + ], + ) + + generate_pdb_file_feature = feature( + name = "generate_pdb_file", + requires = [ + feature_set(features = ["dbg"]), + feature_set(features = ["fastbuild"]), + ], + ) + + linkstamps_feature = feature( + name = "linkstamps", + flag_sets = [ + flag_set( + actions = all_link_actions, + flag_groups = [ + flag_group( + flags = ["%{linkstamp_paths}"], + iterate_over = "linkstamp_paths", + expand_if_available = "linkstamp_paths", + ), + ], + ), + ], + ) + + unfiltered_compile_flags_feature = feature( + name = "unfiltered_compile_flags", + flag_sets = ([ + flag_set( + actions = [ + ACTION_NAMES.preprocess_assemble, + ACTION_NAMES.c_compile, + ACTION_NAMES.cpp_compile, + ACTION_NAMES.cpp_header_parsing, + ACTION_NAMES.cpp_module_compile, + ACTION_NAMES.cpp_module_codegen, + ], + flag_groups = [ + flag_group( + flags = ctx.attr.host_unfiltered_compile_flags, + ), + ], + ), + ] if ctx.attr.host_unfiltered_compile_flags else []), + ) + + determinism_feature = feature( + name = "determinism", + flag_sets = [ + flag_set( + actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], + flag_groups = [ + flag_group( + flags = [ + "-Wno-builtin-macro-redefined", + "-D__DATE__=\"redacted\"", + "-D__TIMESTAMP__=\"redacted\"", + "-D__TIME__=\"redacted\"", + ], + ), + ], + ), + ], + ) + + nologo_feature = feature( + name = "nologo", + flag_sets = [ + flag_set( + actions = [ + ACTION_NAMES.c_compile, + ACTION_NAMES.cpp_compile, + ACTION_NAMES.cpp_module_compile, + ACTION_NAMES.cpp_module_codegen, + ACTION_NAMES.cpp_header_parsing, + ACTION_NAMES.assemble, + ACTION_NAMES.preprocess_assemble, + ACTION_NAMES.cpp_link_executable, + ACTION_NAMES.cpp_link_dynamic_library, + ACTION_NAMES.cpp_link_nodeps_dynamic_library, + ACTION_NAMES.cpp_link_static_library, + ], + flag_groups = [flag_group(flags = ["/nologo"])], + ), + ], + ) + + supports_pic_feature = feature(name = "supports_pic", enabled = True) + + output_execpath_flags_feature = feature( + name = "output_execpath_flags", + flag_sets = [ + flag_set( + actions = all_link_actions, + flag_groups = [ + flag_group( + flags = ["/OUT:%{output_execpath}"], + expand_if_available = "output_execpath", + ), + ], + ), + ], + ) + + default_link_flags_feature = feature( + name = "default_link_flags", + enabled = True, + flag_sets = [ + flag_set( + actions = all_link_actions, + flag_groups = [flag_group(flags = ["/MACHINE:X64"])], + ), + ], + ) + + if (ctx.attr.cpu == "local"): + hardening_feature = feature( + name = "hardening", + flag_sets = [ + flag_set( + actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], + flag_groups = [ + flag_group( + flags = [ + "-U_FORTIFY_SOURCE", + "-D_FORTIFY_SOURCE=1", + "-fstack-protector", + ], + ), + ], + ), + flag_set( + actions = [ + ACTION_NAMES.cpp_link_dynamic_library, + ACTION_NAMES.cpp_link_nodeps_dynamic_library, + ], + flag_groups = [flag_group(flags = ["-Wl,-z,relro,-z,now"])], + ), + flag_set( + actions = [ACTION_NAMES.cpp_link_executable], + flag_groups = [flag_group(flags = ["-pie", "-Wl,-z,relro,-z,now"])], + ), + ], + ) + elif (ctx.attr.cpu == "darwin"): + hardening_feature = feature( + name = "hardening", + flag_sets = [ + flag_set( + actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], + flag_groups = [ + flag_group( + flags = [ + "-U_FORTIFY_SOURCE", + "-D_FORTIFY_SOURCE=1", + "-fstack-protector", + ], + ), + ], + ), + flag_set( + actions = [ACTION_NAMES.cpp_link_executable], + flag_groups = [flag_group(flags = ["-pie"])], + ), + ], + ) + else: + hardening_feature = None + + supports_dynamic_linker_feature = feature(name = "supports_dynamic_linker", enabled = True) + + targets_windows_feature = feature( + name = "targets_windows", + enabled = True, + implies = ["copy_dynamic_libraries_to_binary"], + ) + + msvc_env_feature = feature( + name = "msvc_env", + env_sets = [ + env_set( + actions = [ + ACTION_NAMES.c_compile, + ACTION_NAMES.cpp_compile, + ACTION_NAMES.cpp_module_compile, + ACTION_NAMES.cpp_module_codegen, + ACTION_NAMES.cpp_header_parsing, + ACTION_NAMES.assemble, + ACTION_NAMES.preprocess_assemble, + ACTION_NAMES.cpp_link_executable, + ACTION_NAMES.cpp_link_dynamic_library, + ACTION_NAMES.cpp_link_nodeps_dynamic_library, + ACTION_NAMES.cpp_link_static_library, + ], + env_entries = [ + env_entry(key = "PATH", value = ctx.attr.msvc_env_path), + env_entry( + key = "INCLUDE", + value = ctx.attr.msvc_env_include, + ), + env_entry(key = "LIB", value = ctx.attr.msvc_env_lib), + env_entry(key = "TMP", value = ctx.attr.msvc_env_tmp), + env_entry(key = "TEMP", value = ctx.attr.msvc_env_tmp), + ], + ), + ], + ) + + linker_subsystem_flag_feature = feature( + name = "linker_subsystem_flag", + flag_sets = [ + flag_set( + actions = all_link_actions, + flag_groups = [flag_group(flags = ["/SUBSYSTEM:CONSOLE"])], + ), + ], + ) + + dynamic_link_msvcrt_no_debug_feature = feature( + name = "dynamic_link_msvcrt_no_debug", + flag_sets = [ + flag_set( + actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], + flag_groups = [flag_group(flags = ["/MD"])], + ), + flag_set( + actions = all_link_actions, + flag_groups = [flag_group(flags = ["/DEFAULTLIB:msvcrt.lib"])], + ), + ], + requires = [ + feature_set(features = ["fastbuild"]), + feature_set(features = ["opt"]), + ], + ) + + warnings_feature = feature( + name = "warnings", + flag_sets = [ + flag_set( + actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], + flag_groups = [ + flag_group( + flags = ["-Wall"] + ctx.attr.host_compiler_warnings, + ), + ], + ), + ], + ) + + dynamic_link_msvcrt_debug_feature = feature( + name = "dynamic_link_msvcrt_debug", + flag_sets = [ + flag_set( + actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], + flag_groups = [flag_group(flags = ["/MDd"])], + ), + flag_set( + actions = all_link_actions, + flag_groups = [flag_group(flags = ["/DEFAULTLIB:msvcrtd.lib"])], + ), + ], + requires = [feature_set(features = ["dbg"])], + ) + + compiler_output_flags_feature = feature( + name = "compiler_output_flags", + flag_sets = [ + flag_set( + actions = [ACTION_NAMES.assemble], + flag_groups = [ + flag_group( + flag_groups = [ + flag_group( + flags = ["/Fo%{output_file}", "/Zi"], + expand_if_not_available = "output_preprocess_file", + ), + ], + expand_if_available = "output_file", + expand_if_not_available = "output_assembly_file", + ), + ], + ), + flag_set( + actions = [ + ACTION_NAMES.preprocess_assemble, + ACTION_NAMES.c_compile, + ACTION_NAMES.cpp_compile, + ACTION_NAMES.cpp_header_parsing, + ACTION_NAMES.cpp_module_compile, + ACTION_NAMES.cpp_module_codegen, + ], + flag_groups = [ + flag_group( + flag_groups = [ + flag_group( + flags = ["/Fo%{output_file}"], + expand_if_not_available = "output_preprocess_file", + ), + ], + expand_if_available = "output_file", + expand_if_not_available = "output_assembly_file", + ), + flag_group( + flag_groups = [ + flag_group( + flags = ["/Fa%{output_file}"], + expand_if_available = "output_assembly_file", + ), + ], + expand_if_available = "output_file", + ), + flag_group( + flag_groups = [ + flag_group( + flags = ["/P", "/Fi%{output_file}"], + expand_if_available = "output_preprocess_file", + ), + ], + expand_if_available = "output_file", + ), + ], + ), + ], + ) + + default_compile_flags_feature = feature( + name = "default_compile_flags", + enabled = True, + flag_sets = [ + flag_set( + actions = [ + ACTION_NAMES.assemble, + ACTION_NAMES.preprocess_assemble, + ACTION_NAMES.linkstamp_compile, + ACTION_NAMES.c_compile, + ACTION_NAMES.cpp_compile, + ACTION_NAMES.cpp_header_parsing, + ACTION_NAMES.cpp_module_compile, + ACTION_NAMES.cpp_module_codegen, + ACTION_NAMES.lto_backend, + ACTION_NAMES.clif_match, + ], + flag_groups = [ + flag_group( + flags = [ + "/DCOMPILER_MSVC", + "/DNOMINMAX", + "/D_WIN32_WINNT=0x0600", + "/D_CRT_SECURE_NO_DEPRECATE", + "/D_CRT_SECURE_NO_WARNINGS", + "/D_SILENCE_STDEXT_HASH_DEPRECATION_WARNINGS", + "/bigobj", + "/Zm500", + "/J", + "/Gy", + "/GF", + "/EHsc", + "/wd4351", + "/wd4291", + "/wd4250", + "/wd4996", + ], + ), + ], + ), + ], + ) + + static_link_msvcrt_debug_feature = feature( + name = "static_link_msvcrt_debug", + flag_sets = [ + flag_set( + actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], + flag_groups = [flag_group(flags = ["/MTd"])], + ), + flag_set( + actions = all_link_actions, + flag_groups = [flag_group(flags = ["/DEFAULTLIB:libcmtd.lib"])], + ), + ], + requires = [feature_set(features = ["dbg"])], + ) + + static_link_msvcrt_feature = feature(name = "static_link_msvcrt") + + if (ctx.attr.cpu == "darwin" or + ctx.attr.cpu == "local"): + dbg_feature = feature( + name = "dbg", + flag_sets = [ + flag_set( + actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], + flag_groups = [flag_group(flags = ["-g"])], + ), + ], + implies = ["common"], + ) + elif (ctx.attr.cpu == "x64_windows"): + dbg_feature = feature( + name = "dbg", + flag_sets = [ + flag_set( + actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], + flag_groups = [flag_group(flags = ["/Od", "/Z7", "/DDEBUG"])], + ), + flag_set( + actions = all_link_actions, + flag_groups = [flag_group(flags = ["/DEBUG:FULL", "/INCREMENTAL:NO"])], + ), + ], + implies = ["generate_pdb_file"], + ) + else: + dbg_feature = None + + undefined_dynamic_feature = feature( + name = "undefined-dynamic", + flag_sets = [ + flag_set( + actions = [ + ACTION_NAMES.cpp_link_dynamic_library, + ACTION_NAMES.cpp_link_nodeps_dynamic_library, + ACTION_NAMES.cpp_link_executable, + ], + flag_groups = [flag_group(flags = ["-undefined", "dynamic_lookup"])], + ), + ], + ) + + parse_showincludes_feature = feature( + name = "parse_showincludes", + flag_sets = [ + flag_set( + actions = [ + ACTION_NAMES.preprocess_assemble, + ACTION_NAMES.c_compile, + ACTION_NAMES.cpp_compile, + ACTION_NAMES.cpp_module_compile, + ACTION_NAMES.cpp_header_parsing, + ], + flag_groups = [flag_group(flags = ["/showIncludes"])], + ), + ], + ) + + linker_param_file_feature = feature( + name = "linker_param_file", + flag_sets = [ + flag_set( + actions = all_link_actions + + [ACTION_NAMES.cpp_link_static_library], + flag_groups = [ + flag_group( + flags = ["@%{linker_param_file}"], + expand_if_available = "linker_param_file", + ), + ], + ), + ], + ) + + static_link_msvcrt_no_debug_feature = feature( + name = "static_link_msvcrt_no_debug", + flag_sets = [ + flag_set( + actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], + flag_groups = [flag_group(flags = ["/MT"])], + ), + flag_set( + actions = all_link_actions, + flag_groups = [flag_group(flags = ["/DEFAULTLIB:libcmt.lib"])], + ), + ], + requires = [ + feature_set(features = ["fastbuild"]), + feature_set(features = ["opt"]), + ], + ) + + supports_interface_shared_libraries_feature = feature( + name = "supports_interface_shared_libraries", + enabled = True, + ) + + disable_assertions_feature = feature( + name = "disable-assertions", + flag_sets = [ + flag_set( + actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], + flag_groups = [flag_group(flags = ["-DNDEBUG"])], + ), + ], + ) + + if (ctx.attr.cpu == "x64_windows"): + fastbuild_feature = feature( + name = "fastbuild", + flag_sets = [ + flag_set( + actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], + flag_groups = [flag_group(flags = ["/Od", "/Z7", "/DDEBUG"])], + ), + flag_set( + actions = all_link_actions, + flag_groups = [ + flag_group(flags = ["/DEBUG:FASTLINK", "/INCREMENTAL:NO"]), + ], + ), + ], + implies = ["generate_pdb_file"], + ) + elif (ctx.attr.cpu == "darwin" or + ctx.attr.cpu == "local"): + fastbuild_feature = feature(name = "fastbuild", implies = ["common"]) + else: + fastbuild_feature = None + + user_compile_flags_feature = feature( + name = "user_compile_flags", + flag_sets = [ + flag_set( + actions = [ + ACTION_NAMES.preprocess_assemble, + ACTION_NAMES.c_compile, + ACTION_NAMES.cpp_compile, + ACTION_NAMES.cpp_header_parsing, + ACTION_NAMES.cpp_module_compile, + ACTION_NAMES.cpp_module_codegen, + ], + flag_groups = [ + flag_group( + flags = ["%{user_compile_flags}"], + iterate_over = "user_compile_flags", + expand_if_available = "user_compile_flags", + ), + ], + ), + ], + ) + + compiler_input_flags_feature = feature( + name = "compiler_input_flags", + flag_sets = [ + flag_set( + actions = [ + ACTION_NAMES.assemble, + ACTION_NAMES.preprocess_assemble, + ACTION_NAMES.c_compile, + ACTION_NAMES.cpp_compile, + ACTION_NAMES.cpp_header_parsing, + ACTION_NAMES.cpp_module_compile, + ACTION_NAMES.cpp_module_codegen, + ], + flag_groups = [ + flag_group( + flags = ["/c", "%{source_file}"], + expand_if_available = "source_file", + ), + ], + ), + ], + ) + + no_legacy_features_feature = feature(name = "no_legacy_features") + + archiver_flags_feature = feature( + name = "archiver_flags", + flag_sets = [ + flag_set( + actions = [ACTION_NAMES.cpp_link_static_library], + flag_groups = [ + flag_group( + flags = ["/OUT:%{output_execpath}"], + expand_if_available = "output_execpath", + ), + ], + ), + ], + ) + + redirector_feature = feature( + name = "redirector", + enabled = True, + flag_sets = [ + flag_set( + actions = [ + ACTION_NAMES.c_compile, + ACTION_NAMES.cpp_compile, + ACTION_NAMES.cpp_module_compile, + ACTION_NAMES.cpp_module_codegen, + ACTION_NAMES.cpp_header_parsing, + ACTION_NAMES.assemble, + ACTION_NAMES.preprocess_assemble, + ], + flag_groups = [ + flag_group( + flags = [ + "-B", + "external/local_config_cuda/crosstool/windows/msvc_wrapper_for_nvcc.py", + ], + ), + ], + ), + ], + ) + + linker_bin_path_feature = feature( + name = "linker-bin-path", + flag_sets = [ + flag_set( + actions = all_link_actions, + flag_groups = [flag_group(flags = ["-B" + ctx.attr.linker_bin_path])], + ), + ], + ) + + if (ctx.attr.cpu == "local"): + opt_feature = feature( + name = "opt", + flag_sets = [ + flag_set( + actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], + flag_groups = [ + flag_group( + flags = ["-g0", "-O2", "-ffunction-sections", "-fdata-sections"], + ), + ], + ), + flag_set( + actions = [ + ACTION_NAMES.cpp_link_dynamic_library, + ACTION_NAMES.cpp_link_nodeps_dynamic_library, + ACTION_NAMES.cpp_link_executable, + ], + flag_groups = [flag_group(flags = ["-Wl,--gc-sections"])], + ), + ], + implies = ["common", "disable-assertions"], + ) + elif (ctx.attr.cpu == "darwin"): + opt_feature = feature( + name = "opt", + flag_sets = [ + flag_set( + actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], + flag_groups = [ + flag_group( + flags = ["-g0", "-O2", "-ffunction-sections", "-fdata-sections"], + ), + ], + ), + ], + implies = ["common", "disable-assertions"], + ) + elif (ctx.attr.cpu == "x64_windows"): + opt_feature = feature( + name = "opt", + flag_sets = [ + flag_set( + actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], + flag_groups = [flag_group(flags = ["/O2", "/DNDEBUG"])], + ), + ], + ) + else: + opt_feature = None + + include_paths_feature = feature( + name = "include_paths", + enabled = True, + flag_sets = [ + flag_set( + actions = [ + ACTION_NAMES.assemble, + ACTION_NAMES.preprocess_assemble, + ACTION_NAMES.c_compile, + ACTION_NAMES.cpp_compile, + ACTION_NAMES.cpp_header_parsing, + ACTION_NAMES.cpp_module_compile, + ], + flag_groups = [ + flag_group( + flags = ["/I%{quote_include_paths}"], + iterate_over = "quote_include_paths", + ), + flag_group( + flags = ["/I%{include_paths}"], + iterate_over = "include_paths", + ), + flag_group( + flags = ["/I%{system_include_paths}"], + iterate_over = "system_include_paths", + ), + ], + ), + ], + ) + + shared_flag_feature = feature( + name = "shared_flag", + flag_sets = [ + flag_set( + actions = [ + ACTION_NAMES.cpp_link_dynamic_library, + ACTION_NAMES.cpp_link_nodeps_dynamic_library, + ], + flag_groups = [flag_group(flags = ["/DLL"])], + ), + ], + ) + + windows_export_all_symbols_feature = feature(name = "windows_export_all_symbols") + + frame_pointer_feature = feature( + name = "frame-pointer", + flag_sets = [ + flag_set( + actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], + flag_groups = [flag_group(flags = ["-fno-omit-frame-pointer"])], + ), + ], + ) + + build_id_feature = feature( + name = "build-id", + flag_sets = [ + flag_set( + actions = all_link_actions, + flag_groups = [ + flag_group( + flags = ["-Wl,--build-id=md5", "-Wl,--hash-style=gnu"], + ), + ], + ), + ], + ) + + sysroot_feature = feature( + name = "sysroot", + flag_sets = [ + flag_set( + actions = [ + ACTION_NAMES.assemble, + ACTION_NAMES.preprocess_assemble, + ACTION_NAMES.c_compile, + ACTION_NAMES.cpp_compile, + ACTION_NAMES.cpp_header_parsing, + ACTION_NAMES.cpp_module_compile, + ACTION_NAMES.cpp_module_codegen, + ACTION_NAMES.cpp_link_executable, + ACTION_NAMES.cpp_link_dynamic_library, + ACTION_NAMES.cpp_link_nodeps_dynamic_library, + ], + flag_groups = [ + flag_group( + flags = ["--sysroot=%{sysroot}"], + iterate_over = "sysroot", + expand_if_available = "sysroot", + ), + ], + ), + ], + ) + + def_file_feature = feature( + name = "def_file", + flag_sets = [ + flag_set( + actions = all_link_actions, + flag_groups = [ + flag_group( + flags = ["/DEF:%{def_file_path}", "/ignore:4070"], + expand_if_available = "def_file_path", + ), + ], + ), + ], + ) + + if (ctx.attr.cpu == "darwin"): + stdlib_feature = feature( + name = "stdlib", + flag_sets = [ + flag_set( + actions = all_link_actions, + flag_groups = [flag_group(flags = ["-lc++"])], + ), + ], + ) + elif (ctx.attr.cpu == "local"): + stdlib_feature = feature( + name = "stdlib", + flag_sets = [ + flag_set( + actions = all_link_actions, + flag_groups = [flag_group(flags = ["-lstdc++"])], + ), + ], + ) + else: + stdlib_feature = None + + no_stripping_feature = feature(name = "no_stripping") + + alwayslink_feature = feature( + name = "alwayslink", + flag_sets = [ + flag_set( + actions = [ + ACTION_NAMES.cpp_link_dynamic_library, + ACTION_NAMES.cpp_link_nodeps_dynamic_library, + ACTION_NAMES.cpp_link_executable, + ], + flag_groups = [flag_group(flags = ["-Wl,-no-as-needed"])], + ), + ], + ) + + input_param_flags_feature = feature( + name = "input_param_flags", + flag_sets = [ + flag_set( + actions = [ + ACTION_NAMES.cpp_link_dynamic_library, + ACTION_NAMES.cpp_link_nodeps_dynamic_library, + ], + flag_groups = [ + flag_group( + flags = ["/IMPLIB:%{interface_library_output_path}"], + expand_if_available = "interface_library_output_path", + ), + ], + ), + flag_set( + actions = all_link_actions + + [ACTION_NAMES.cpp_link_static_library], + flag_groups = [ + flag_group( + iterate_over = "libraries_to_link", + flag_groups = [ + flag_group( + iterate_over = "libraries_to_link.object_files", + flag_groups = [flag_group(flags = ["%{libraries_to_link.object_files}"])], + expand_if_equal = variable_with_value( + name = "libraries_to_link.type", + value = "object_file_group", + ), + ), + flag_group( + flag_groups = [flag_group(flags = ["%{libraries_to_link.name}"])], + expand_if_equal = variable_with_value( + name = "libraries_to_link.type", + value = "object_file", + ), + ), + flag_group( + flag_groups = [flag_group(flags = ["%{libraries_to_link.name}"])], + expand_if_equal = variable_with_value( + name = "libraries_to_link.type", + value = "interface_library", + ), + ), + flag_group( + flag_groups = [ + flag_group( + flags = ["%{libraries_to_link.name}"], + expand_if_false = "libraries_to_link.is_whole_archive", + ), + flag_group( + flags = ["/WHOLEARCHIVE:%{libraries_to_link.name}"], + expand_if_true = "libraries_to_link.is_whole_archive", + ), + ], + expand_if_equal = variable_with_value( + name = "libraries_to_link.type", + value = "static_library", + ), + ), + ], + expand_if_available = "libraries_to_link", + ), + ], + ), + ], + ) + + if (ctx.attr.cpu == "local"): + no_canonical_prefixes_feature = feature( + name = "no-canonical-prefixes", + flag_sets = [ + flag_set( + actions = [ + ACTION_NAMES.c_compile, + ACTION_NAMES.cpp_compile, + ACTION_NAMES.cpp_link_executable, + ACTION_NAMES.cpp_link_dynamic_library, + ACTION_NAMES.cpp_link_nodeps_dynamic_library, + ], + flag_groups = [ + flag_group( + flags = [ + "-no-canonical-prefixes", + ] + ctx.attr.extra_no_canonical_prefixes_flags, + ), + ], + ), + ], + ) + elif (ctx.attr.cpu == "darwin"): + no_canonical_prefixes_feature = feature( + name = "no-canonical-prefixes", + flag_sets = [ + flag_set( + actions = [ + ACTION_NAMES.c_compile, + ACTION_NAMES.cpp_compile, + ACTION_NAMES.cpp_link_executable, + ACTION_NAMES.cpp_link_dynamic_library, + ACTION_NAMES.cpp_link_nodeps_dynamic_library, + ], + flag_groups = [flag_group(flags = ["-no-canonical-prefixes"])], + ), + ], + ) + else: + no_canonical_prefixes_feature = None + + has_configured_linker_path_feature = feature(name = "has_configured_linker_path") + + copy_dynamic_libraries_to_binary_feature = feature(name = "copy_dynamic_libraries_to_binary") + + user_link_flags_feature = feature( + name = "user_link_flags", + flag_sets = [ + flag_set( + actions = all_link_actions, + flag_groups = [ + flag_group( + flags = ["%{user_link_flags}"], + iterate_over = "user_link_flags", + expand_if_available = "user_link_flags", + ), + ], + ), + ], + ) + + cpp11_feature = feature( + name = "c++11", + flag_sets = [ + flag_set( + actions = [ACTION_NAMES.cpp_compile], + flag_groups = [flag_group(flags = ["-std=c++11"])], + ), + ], + ) + + if (ctx.attr.cpu == "local"): + common_feature = feature( + name = "common", + implies = [ + "stdlib", + "c++11", + "determinism", + "alwayslink", + "hardening", + "warnings", + "frame-pointer", + "build-id", + "no-canonical-prefixes", + "linker-bin-path", + ], + ) + elif (ctx.attr.cpu == "darwin"): + common_feature = feature( + name = "common", + implies = [ + "stdlib", + "c++11", + "determinism", + "hardening", + "warnings", + "frame-pointer", + "no-canonical-prefixes", + "linker-bin-path", + "undefined-dynamic", + ], + ) + else: + common_feature = None + + if (ctx.attr.cpu == "local"): + features = [ + cpp11_feature, + stdlib_feature, + determinism_feature, + alwayslink_feature, + pic_feature, + hardening_feature, + warnings_feature, + frame_pointer_feature, + build_id_feature, + no_canonical_prefixes_feature, + disable_assertions_feature, + linker_bin_path_feature, + common_feature, + opt_feature, + fastbuild_feature, + dbg_feature, + supports_dynamic_linker_feature, + supports_pic_feature, + ] + elif (ctx.attr.cpu == "darwin"): + features = [ + cpp11_feature, + stdlib_feature, + determinism_feature, + pic_feature, + hardening_feature, + warnings_feature, + frame_pointer_feature, + no_canonical_prefixes_feature, + disable_assertions_feature, + linker_bin_path_feature, + undefined_dynamic_feature, + common_feature, + opt_feature, + fastbuild_feature, + dbg_feature, + supports_dynamic_linker_feature, + supports_pic_feature, + ] + elif (ctx.attr.cpu == "x64_windows"): + features = [ + no_legacy_features_feature, + redirector_feature, + nologo_feature, + has_configured_linker_path_feature, + no_stripping_feature, + targets_windows_feature, + copy_dynamic_libraries_to_binary_feature, + default_compile_flags_feature, + msvc_env_feature, + include_paths_feature, + preprocessor_defines_feature, + parse_showincludes_feature, + generate_pdb_file_feature, + shared_flag_feature, + linkstamps_feature, + output_execpath_flags_feature, + archiver_flags_feature, + input_param_flags_feature, + linker_subsystem_flag_feature, + user_link_flags_feature, + default_link_flags_feature, + linker_param_file_feature, + static_link_msvcrt_feature, + static_link_msvcrt_no_debug_feature, + dynamic_link_msvcrt_no_debug_feature, + static_link_msvcrt_debug_feature, + dynamic_link_msvcrt_debug_feature, + dbg_feature, + fastbuild_feature, + opt_feature, + user_compile_flags_feature, + sysroot_feature, + unfiltered_compile_flags_feature, + compiler_output_flags_feature, + compiler_input_flags_feature, + def_file_feature, + windows_export_all_symbols_feature, + no_windows_export_all_symbols_feature, + supports_dynamic_linker_feature, + supports_interface_shared_libraries_feature, + ] + else: + fail("Unreachable") + + cxx_builtin_include_directories = ctx.attr.builtin_include_directories + + if (ctx.attr.cpu == "x64_windows"): + tool_paths = [ + tool_path(name = "ar", path = ctx.attr.msvc_lib_path), + tool_path(name = "ml", path = ctx.attr.msvc_ml_path), + tool_path(name = "cpp", path = ctx.attr.msvc_cl_path), + tool_path(name = "gcc", path = ctx.attr.msvc_cl_path), + tool_path(name = "gcov", path = "wrapper/bin/msvc_nop.bat"), + tool_path(name = "ld", path = ctx.attr.msvc_link_path), + tool_path(name = "nm", path = "wrapper/bin/msvc_nop.bat"), + tool_path( + name = "objcopy", + path = "wrapper/bin/msvc_nop.bat", + ), + tool_path( + name = "objdump", + path = "wrapper/bin/msvc_nop.bat", + ), + tool_path( + name = "strip", + path = "wrapper/bin/msvc_nop.bat", + ), + ] + elif (ctx.attr.cpu == "local"): + tool_paths = [ + tool_path(name = "gcc", path = ctx.attr.host_compiler_path), + tool_path(name = "ar", path = ctx.attr.host_compiler_prefix + "/ar"), + tool_path(name = "compat-ld", path = ctx.attr.host_compiler_prefix + "/ld"), + tool_path(name = "cpp", path = ctx.attr.host_compiler_prefix + "/cpp"), + tool_path(name = "dwp", path = ctx.attr.host_compiler_prefix + "/dwp"), + tool_path(name = "gcov", path = ctx.attr.host_compiler_prefix + "/gcov"), + tool_path(name = "ld", path = ctx.attr.host_compiler_prefix + "/ld"), + tool_path(name = "nm", path = ctx.attr.host_compiler_prefix + "/nm"), + tool_path(name = "objcopy", path = ctx.attr.host_compiler_prefix + "/objcopy"), + tool_path(name = "objdump", path = ctx.attr.host_compiler_prefix + "/objdump"), + tool_path(name = "strip", path = ctx.attr.host_compiler_prefix + "/strip"), + ] + elif (ctx.attr.cpu == "darwin"): + tool_paths = [ + tool_path(name = "gcc", path = ctx.attr.host_compiler_path), + tool_path(name = "ar", path = ctx.attr.host_compiler_prefix + "/libtool"), + tool_path(name = "compat-ld", path = ctx.attr.host_compiler_prefix + "/ld"), + tool_path(name = "cpp", path = ctx.attr.host_compiler_prefix + "/cpp"), + tool_path(name = "dwp", path = ctx.attr.host_compiler_prefix + "/dwp"), + tool_path(name = "gcov", path = ctx.attr.host_compiler_prefix + "/gcov"), + tool_path(name = "ld", path = ctx.attr.host_compiler_prefix + "/ld"), + tool_path(name = "nm", path = ctx.attr.host_compiler_prefix + "/nm"), + tool_path(name = "objcopy", path = ctx.attr.host_compiler_prefix + "/objcopy"), + tool_path(name = "objdump", path = ctx.attr.host_compiler_prefix + "/objdump"), + tool_path(name = "strip", path = ctx.attr.host_compiler_prefix + "/strip"), + ] + else: + fail("Unreachable") + + out = ctx.actions.declare_file(ctx.label.name) + ctx.actions.write(out, "Fake executable") + return [ + cc_common.create_cc_toolchain_config_info( + ctx = ctx, + features = features, + action_configs = action_configs, + artifact_name_patterns = [], + cxx_builtin_include_directories = cxx_builtin_include_directories, + toolchain_identifier = toolchain_identifier, + host_system_name = host_system_name, + target_system_name = target_system_name, + target_cpu = target_cpu, + target_libc = target_libc, + compiler = compiler, + abi_version = abi_version, + abi_libc_version = abi_libc_version, + tool_paths = tool_paths, + make_variables = [], + builtin_sysroot = builtin_sysroot, + cc_target_os = cc_target_os, + ), + DefaultInfo( + executable = out, + ), + ] + +cc_toolchain_config = rule( + attrs = { + "cpu": attr.string( + mandatory = True, + values = [ + "darwin", + "local", + "x64_windows", + ], + ), + "builtin_include_directories": attr.string_list(), + "extra_no_canonical_prefixes_flags": attr.string_list(), + "host_compiler_path": attr.string(), + "host_compiler_prefix": attr.string(), + "host_compiler_warnings": attr.string_list(), + "host_unfiltered_compile_flags": attr.string_list(), + "linker_bin_path": attr.string(), + "msvc_cl_path": attr.string(default = "msvc_not_used"), + "msvc_env_include": attr.string(default = "msvc_not_used"), + "msvc_env_lib": attr.string(default = "msvc_not_used"), + "msvc_env_path": attr.string(default = "msvc_not_used"), + "msvc_env_tmp": attr.string(default = "msvc_not_used"), + "msvc_lib_path": attr.string(default = "msvc_not_used"), + "msvc_link_path": attr.string(default = "msvc_not_used"), + "msvc_ml_path": attr.string(default = "msvc_not_used"), + }, + executable = True, + provides = [CcToolchainConfigInfo], + implementation = _impl, +) diff --git a/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0/clang/bin/crosstool_wrapper_driver_is_not_gcc b/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0/clang/bin/crosstool_wrapper_driver_is_not_gcc new file mode 100755 index 0000000000..5c0abcdcd8 --- /dev/null +++ b/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0/clang/bin/crosstool_wrapper_driver_is_not_gcc @@ -0,0 +1,268 @@ +#!/usr/bin/env python +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Crosstool wrapper for compiling CUDA programs. + +SYNOPSIS: + crosstool_wrapper_is_not_gcc [options passed in by cc_library() + or cc_binary() rule] + +DESCRIPTION: + This script is expected to be called by the cc_library() or cc_binary() bazel + rules. When the option "-x cuda" is present in the list of arguments passed + to this script, it invokes the nvcc CUDA compiler. Most arguments are passed + as is as a string to --compiler-options of nvcc. When "-x cuda" is not + present, this wrapper invokes hybrid_driver_is_not_gcc with the input + arguments as is. +""" + +from __future__ import print_function + +__author__ = 'keveman@google.com (Manjunath Kudlur)' + +from argparse import ArgumentParser +import os +import subprocess +import re +import sys +import pipes + +# Template values set by cuda_autoconf. +CPU_COMPILER = ('/dt7/usr/bin/gcc') +GCC_HOST_COMPILER_PATH = ('/dt7/usr/bin/gcc') + +NVCC_PATH = '/usr/local/cuda-10.0/bin/nvcc' +PREFIX_DIR = os.path.dirname(GCC_HOST_COMPILER_PATH) +NVCC_VERSION = '10.0' + + +def Log(s): + print('gpus/crosstool: {0}'.format(s)) + + +def GetOptionValue(argv, option): + """Extract the list of values for option from the argv list. + + Args: + argv: A list of strings, possibly the argv passed to main(). + option: The option whose value to extract, without the leading '-'. + + Returns: + A list of values, either directly following the option, + (eg., -opt val1 val2) or values collected from multiple occurrences of + the option (eg., -opt val1 -opt val2). + """ + + parser = ArgumentParser() + parser.add_argument('-' + option, nargs='*', action='append') + args, _ = parser.parse_known_args(argv) + if not args or not vars(args)[option]: + return [] + else: + return sum(vars(args)[option], []) + + +def GetHostCompilerOptions(argv): + """Collect the -isystem, -iquote, and --sysroot option values from argv. + + Args: + argv: A list of strings, possibly the argv passed to main(). + + Returns: + The string that can be used as the --compiler-options to nvcc. + """ + + parser = ArgumentParser() + parser.add_argument('-isystem', nargs='*', action='append') + parser.add_argument('-iquote', nargs='*', action='append') + parser.add_argument('--sysroot', nargs=1) + parser.add_argument('-g', nargs='*', action='append') + parser.add_argument('-fno-canonical-system-headers', action='store_true') + parser.add_argument('-no-canonical-prefixes', action='store_true') + + args, _ = parser.parse_known_args(argv) + + opts = '' + + if args.isystem: + opts += ' -isystem ' + ' -isystem '.join(sum(args.isystem, [])) + if args.iquote: + opts += ' -iquote ' + ' -iquote '.join(sum(args.iquote, [])) + if args.g: + opts += ' -g' + ' -g'.join(sum(args.g, [])) + if args.fno_canonical_system_headers: + opts += ' -fno-canonical-system-headers' + if args.no_canonical_prefixes: + opts += ' -no-canonical-prefixes' + if args.sysroot: + opts += ' --sysroot ' + args.sysroot[0] + + return opts + + +def _update_options(nvcc_options): + if NVCC_VERSION in ("7.0",): + return nvcc_options + + update_options = {"relaxed-constexpr": "expt-relaxed-constexpr"} + return [ + update_options[opt] if opt in update_options else opt + for opt in nvcc_options + ] + + +def GetNvccOptions(argv): + """Collect the -nvcc_options values from argv. + + Args: + argv: A list of strings, possibly the argv passed to main(). + + Returns: + The string that can be passed directly to nvcc. + """ + + parser = ArgumentParser() + parser.add_argument('-nvcc_options', nargs='*', action='append') + + args, _ = parser.parse_known_args(argv) + + if args.nvcc_options: + options = _update_options(sum(args.nvcc_options, [])) + return ' '.join(['--' + a for a in options]) + return '' + + +def InvokeNvcc(argv, log=False): + """Call nvcc with arguments assembled from argv. + + Args: + argv: A list of strings, possibly the argv passed to main(). + log: True if logging is requested. + + Returns: + The return value of calling os.system('nvcc ' + args) + """ + + host_compiler_options = GetHostCompilerOptions(argv) + nvcc_compiler_options = GetNvccOptions(argv) + opt_option = GetOptionValue(argv, 'O') + m_options = GetOptionValue(argv, 'm') + m_options = ''.join([' -m' + m for m in m_options if m in ['32', '64']]) + include_options = GetOptionValue(argv, 'I') + out_file = GetOptionValue(argv, 'o') + depfiles = GetOptionValue(argv, 'MF') + defines = GetOptionValue(argv, 'D') + defines = ''.join([' -D' + define for define in defines]) + undefines = GetOptionValue(argv, 'U') + undefines = ''.join([' -U' + define for define in undefines]) + std_options = GetOptionValue(argv, 'std') + # currently only c++11 is supported by Cuda 7.0 std argument + nvcc_allowed_std_options = ["c++11"] + std_options = ''.join([ + ' -std=' + define for define in std_options + if define in nvcc_allowed_std_options + ]) + + # The list of source files get passed after the -c option. I don't know of + # any other reliable way to just get the list of source files to be compiled. + src_files = GetOptionValue(argv, 'c') + + # Pass -w through from host to nvcc, but don't do anything fancier with + # warnings-related flags, since they're not necessarily the same across + # compilers. + warning_options = ' -w' if '-w' in argv else '' + + if len(src_files) == 0: + return 1 + if len(out_file) != 1: + return 1 + + opt = (' -O2' if + (len(opt_option) > 0 and int(opt_option[0]) > 0) else ' -g -G') + + includes = (' -I ' + ' -I '.join(include_options) + if len(include_options) > 0 else '') + + # Unfortunately, there are other options that have -c prefix too. + # So allowing only those look like C/C++ files. + src_files = [ + f for f in src_files if re.search('\.cpp$|\.cc$|\.c$|\.cxx$|\.C$', f) + ] + srcs = ' '.join(src_files) + out = ' -o ' + out_file[0] + + supported_cuda_compute_capabilities = ["3.0", "6.0"] + nvccopts = '-D_FORCE_INLINES ' + for capability in supported_cuda_compute_capabilities: + capability = capability.replace('.', '') + nvccopts += r'-gencode=arch=compute_%s,\"code=sm_%s,compute_%s\" ' % ( + capability, capability, capability) + nvccopts += ' ' + nvcc_compiler_options + nvccopts += undefines + nvccopts += defines + nvccopts += std_options + nvccopts += m_options + nvccopts += warning_options + + if depfiles: + # Generate the dependency file + depfile = depfiles[0] + cmd = (NVCC_PATH + ' ' + nvccopts + ' --compiler-options "' + + host_compiler_options + '"' + ' --compiler-bindir=' + + GCC_HOST_COMPILER_PATH + ' -I .' + ' -x cu ' + opt + includes + + ' ' + srcs + ' -M -o ' + depfile) + if log: Log(cmd) + exit_status = os.system(cmd) + if exit_status != 0: + return exit_status + + cmd = (NVCC_PATH + ' ' + nvccopts + ' --compiler-options "' + + host_compiler_options + ' -fPIC"' + ' --compiler-bindir=' + + GCC_HOST_COMPILER_PATH + ' -I .' + ' -x cu ' + opt + includes + + ' -c ' + srcs + out) + + # TODO(zhengxq): for some reason, 'gcc' needs this help to find 'as'. + # Need to investigate and fix. + cmd = 'PATH=' + PREFIX_DIR + ':$PATH ' + cmd + if log: Log(cmd) + return os.system(cmd) + + +def main(): + parser = ArgumentParser() + parser.add_argument('-x', nargs=1) + parser.add_argument('--cuda_log', action='store_true') + args, leftover = parser.parse_known_args(sys.argv[1:]) + + if args.x and args.x[0] == 'cuda': + if args.cuda_log: Log('-x cuda') + leftover = [pipes.quote(s) for s in leftover] + if args.cuda_log: Log('using nvcc') + return InvokeNvcc(leftover, log=args.cuda_log) + + # Strip our flags before passing through to the CPU compiler for files which + # are not -x cuda. We can't just pass 'leftover' because it also strips -x. + # We not only want to pass -x to the CPU compiler, but also keep it in its + # relative location in the argv list (the compiler is actually sensitive to + # this). + cpu_compiler_flags = [ + flag for flag in sys.argv[1:] if not flag.startswith(('--cuda_log')) + ] + + return subprocess.call([CPU_COMPILER] + cpu_compiler_flags) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build_pip_pkg.sh b/build_pip_pkg.sh index 9ef2861aae..82e1c78754 100755 --- a/build_pip_pkg.sh +++ b/build_pip_pkg.sh @@ -44,6 +44,7 @@ function main() { cp ${PIP_FILE_PREFIX}setup.py "${TMPDIR}" cp ${PIP_FILE_PREFIX}MANIFEST.in "${TMPDIR}" cp ${PIP_FILE_PREFIX}LICENSE "${TMPDIR}" + touch ${TMPDIR}/stub.cc rsync -avm -L --exclude='*_test.py' ${PIP_FILE_PREFIX}tensorflow_addons "${TMPDIR}" pushd ${TMPDIR} diff --git a/configure.sh b/configure.sh index 0624a47f86..a23e051365 100755 --- a/configure.sh +++ b/configure.sh @@ -48,7 +48,7 @@ elif [[ ! -z "$1" ]]; then fi # Install python dependencies -read -r -p "Tensorflow will be upgraded to 2.0. Are You Sure? [y/n] " reply +read -r -p "Tensorflow 2.0 will be installed if it is not already. Are You Sure? [y/n] " reply case $reply in [yY]*) echo "Installing...";; * ) echo "Goodbye!"; exit;; @@ -70,26 +70,26 @@ TF_CFLAGS=( $(${PYTHON_VERSION} -c 'import tensorflow as tf; print(" ".join(tf.s TF_LFLAGS=( $(${PYTHON_VERSION} -c 'import tensorflow as tf; print(" ".join(tf.sysconfig.get_link_flags()))') ) TF_CXX11_ABI_FLAG=( $(${PYTHON_VERSION} -c 'import tensorflow as tf; print(tf.sysconfig.CXX11_ABI_FLAG)') ) -SHARED_LIBRARY_DIR=${TF_LFLAGS[0]:2} -SHARED_LIBRARY_NAME=$(generate_shared_lib_name ${TF_LFLAGS[1]}) +TF_SHARED_LIBRARY_DIR=${TF_LFLAGS[0]:2} +TF_SHARED_LIBRARY_NAME=$(generate_shared_lib_name ${TF_LFLAGS[1]}) write_action_env_to_bazelrc "TF_HEADER_DIR" ${TF_CFLAGS:2} -write_action_env_to_bazelrc "TF_SHARED_LIBRARY_DIR" ${SHARED_LIBRARY_DIR} -write_action_env_to_bazelrc "TF_SHARED_LIBRARY_NAME" ${SHARED_LIBRARY_NAME} +write_action_env_to_bazelrc "TF_SHARED_LIBRARY_DIR" ${TF_SHARED_LIBRARY_DIR} +write_action_env_to_bazelrc "TF_SHARED_LIBRARY_NAME" ${TF_SHARED_LIBRARY_NAME} write_action_env_to_bazelrc "TF_CXX11_ABI_FLAG" ${TF_CXX11_ABI_FLAG} + if [[ "$TF_NEED_CUDA" == "1" ]]; then + write_action_env_to_bazelrc "TF_NEED_CUDA" ${TF_NEED_CUDA} write_action_env_to_bazelrc "CUDNN_INSTALL_PATH" "/usr/lib/x86_64-linux-gnu" write_action_env_to_bazelrc "TF_CUDA_VERSION" "10.0" write_action_env_to_bazelrc "TF_CUDNN_VERSION" "7" write_action_env_to_bazelrc "CUDA_TOOLKIT_PATH" "${CUDA_HOME:=/usr/local/cuda}" - write_to_bazelrc "build --config=cuda" - write_to_bazelrc "test --config=cuda" + write_to_bazelrc "test --config=cuda" + write_to_bazelrc "build --config=cuda" + write_to_bazelrc "build --spawn_strategy=local" + write_to_bazelrc "build --strategy=Genrule=local" write_to_bazelrc "build:cuda --define=using_cuda=true --define=using_cuda_nvcc=true" - write_to_bazelrc "build:cuda --crosstool_top=@local_config_cuda//crosstool:toolchain" - write_to_bazelrc "build --spawn_strategy=standalone" - write_to_bazelrc "build --strategy=Genrule=standalone" - write_action_env_to_bazelrc "TF_NEED_CUDA" ${TF_NEED_CUDA} fi diff --git a/examples/image_ops.ipynb b/examples/image_ops.ipynb new file mode 100644 index 0000000000..51e1a93908 --- /dev/null +++ b/examples/image_ops.ipynb @@ -0,0 +1,601 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "name": "image_ops.ipynb", + "version": "0.3.2", + "provenance": [], + "collapsed_sections": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "accelerator": "GPU" + }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "GWEKvPCCxJke", + "colab_type": "text" + }, + "source": [ + "##### Copyright 2019 The TensorFlow Authors." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "l-m8KQ-nxK5l", + "colab_type": "code", + "colab": {} + }, + "source": [ + "#@title Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "# you may not use this file except in compliance with the License.\n", + "# You may obtain a copy of the License at\n", + "#\n", + "# https://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing, software\n", + "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "# See the License for the specific language governing permissions and\n", + "# limitations under the License." + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "O8FuVCLYxi_l", + "colab_type": "text" + }, + "source": [ + "# TensorFlow Addons Image: Operations\n", + "\n", + "\n", + " \n", + " \n", + "
\n", + " Run in Google Colab\n", + " \n", + " View source on GitHub\n", + "
" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "2a5ksOt-xsOl", + "colab_type": "text" + }, + "source": [ + "# Overview\n", + "This notebook will demonstrate how to use the some image operations in TensorFlow Addons.\n", + "\n", + "Here is the list of image operations we'll be covering in this example:\n", + "\n", + "- tfa.image.mean_filter2d\n", + "\n", + "- tfa.image.rotate\n", + "\n", + "- tfa.image.transform\n", + "\n", + "- tfa.image.random_hsv_in_yiq\n", + "\n", + "- tfa.image.adjust_hsv_in_yiq\n", + "\n", + "- tfa.image.dense_image_warp\n", + "\n", + "- tfa.image.euclidean_dist_transform" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "DMbjxr4PyMPF", + "colab_type": "text" + }, + "source": [ + "# Setup" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "2ZdFry6yAp-c", + "colab_type": "code", + "outputId": "0be2c0d6-ea06-48bc-97ef-06ccffabbf0a", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 85 + } + }, + "source": [ + "!pip install -q tensorflow-gpu==2.0.0rc0\n", + "!pip install -q tensorflow-addons~=0.5\n", + "\n", + "from __future__ import absolute_import, division, print_function, unicode_literals\n", + "\n", + "import numpy as np\n", + "import tensorflow as tf\n", + "import tensorflow_addons as tfa\n", + "\n", + "import matplotlib.pyplot as plt" + ], + "execution_count": 2, + "outputs": [ + { + "output_type": "stream", + "text": [ + "\u001b[K |████████████████████████████████| 348.9MB 69kB/s \n", + "\u001b[K |████████████████████████████████| 3.1MB 30.6MB/s \n", + "\u001b[K |████████████████████████████████| 501kB 37.4MB/s \n", + "\u001b[K |████████████████████████████████| 552kB 2.7MB/s \n", + "\u001b[?25h" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "Q6Z2rsP8yp2v", + "colab_type": "text" + }, + "source": [ + "# Prepare and Inspect Images" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "9gbgJP10z9WO", + "colab_type": "text" + }, + "source": [ + "## Download the images" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "IgUsVhBQ6dSg", + "colab_type": "code", + "colab": {} + }, + "source": [ + "!wget -q https://i.dailymail.co.uk/i/pix/2015/09/01/18/2BE1E88B00000578-3218613-image-m-5_1441127035222.jpg -O google.jpg\n", + "!wget -q https://i.stack.imgur.com/nm2HM.png -O xray.png" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "uheQOL-y0Fj3", + "colab_type": "text" + }, + "source": [ + "## Inspect the images" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "MFGirRRZ0Y9k", + "colab_type": "text" + }, + "source": [ + "### Google Icon" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "NRlvNQdm1YI8", + "colab_type": "code", + "outputId": "3997165d-46b0-4ac6-adaf-22bfd94efeb8", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 281 + } + }, + "source": [ + "google_img_raw = tf.io.read_file(\"google.jpg\")\n", + "google_img = tf.io.decode_image(google_img_raw)\n", + "\n", + "plt.title(\"Google Icon with shape {}\".format(google_img.shape))\n", + "_ = plt.imshow(google_img)" + ], + "execution_count": 4, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAUsAAAEICAYAAADWe9ZcAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsvXl8JUd1sP2cquq+92oZzYxnvO8Y\ngzEYMAYTVhOWsIRAAklIwCwBHBKyvAm8QBISSELy8X1vli8bJCyJMQQDIRhI2BIgDhjCvpjF2GBs\nM2N7POPZJV317a467x9VV7qSpRnNWBpJVj3z61Hf7rrddau7T586deocUVUymUwmc2jMSlcgk8lk\n1gJZWGYymcwiyMIyk8lkFkEWlplMJrMIsrDMZDKZRZCFZSaTySyCLCxXCSLyehF510rXYykRkUeL\nyPWH2H+miKiIuLt5nktEZPvdOcZSISItEfmuiJy00nVZjYjIBSLy+ZWux9GQheUcROQ5IvJFEZkQ\nkZ1p/VdFRFa6bgshIjeLyBNWuh5zUdXPqup9+p9Xaz2XmMuAz6jq7YMbRaQUkesGhXp6mYzPWVRE\nnpX2t0TkL0XkNhHZKyJvEpFisRURkaH0nTtFZL+IfGZg32+JyA9F5EA6/l/O99ISkcemOr3hCM77\nLhG5PR37BhF5SX+fql4L7BORpy/2eKuFLCwHEJFXAH8F/B/gROAE4GXAI4FyBauWWTu8DHjnPNv/\nN7BrcEN6mYz0F+AngXHg46nIa4CLgPsD5wIXAq89grq8BdgMnJf+/tbAvg8DF6rqhnT8BwK/Mfjl\nJJj/CvjiEZwT4P8BzkzH/ingDSLykIH9/wz88hEec8XJwjIhImPAHwG/qqrvV9WDGvm6qj5XVat+\nORG5QkR2icgtIvJaETFpn0mfb0la6RXpuP1zPD/t2y0iv38oTUtEHi4inxeRfSLyTRG55Ah+y0uT\nFnMwdQkvTNvPE5Gr0zG/IyI/NfCdy0Xk70TkI+l7XxSRey1w/HekFwsickrSPF6ePt9LRPaktpju\nHovIO4HTgX9LGtSrBg75XBH5UdKAfu8Qv+up6fccFJFbReSVc/a/IrX77SLyooHtTxORrydNZ5uI\nvH5gX98UcFnSsG4fPG76Ha8RkRvTdXufiGxeoH6nA2czR7iIyFnA84hC5FC8AHi/qk6kz08H/lpV\n96jqLuCvgV86zDH657wvUVBdpqq7VNWr6lf7+1X1RlXd1y8OBOCcOYd5BfAfwPcWc86BY3+n/7wA\nmpbBe+lq4PEi0jqS4644qpqXOOXzyUADuMOUuwL4EDAKnAncALw47fsl4AfEB2YE+ADwzrTvfkSt\n4VFELfXPgBp4Qtr/euBdaf0UYDfwVOIL7Ynp89YF6nTzwHF+FrgVeCjxITgHOAMoUt1+N53/x4GD\nwH3S9y5P53gY4Ihv//cscL5fAv4trf8icCPw3oF9H0rrlwDb56tn+nwm8UF6K9AhajcVcN4C570d\neHRa30TUjPrnaYgvuyK12ySwaWD/A1JbXgDcATxzTh2uBIZTuV0D7fmbwBeAU4EW8A/AlQvU72nA\nd+bZ/u/AT89tjzllhtP1uGRg21eAnxv4/NxU17FF3M/PB74F/CVwZ1p/1pwyvwgcSMfcBTxwYN8Z\nxHt7JN0bbzjC5+lN6Roo8DVgZM7+A8AFK/3cH9FvWukKrJaF+ObfMWfb54F9QBd4DGCBHnC/gTK/\nDFyd1j9F1Ez7++5DFIgO+IPBhwwYSseaT1i+miRkB8p/AnjBAnW/eeA4nwB+c54yjwZ2AGZg25XA\n69P65cDbBvY9FfjeAue7F7CXKHz+PrXB9rTvHcBvp/VZwoGFheWpA9u+BDxngfP+KJ1rw5ztl6Rr\n5Aa27QQevsBx/n/gL+fU4b4D+/8/4O1p/Trg8QP7Tupf03mO+1zgC3O2/TTwsfnaY065S4GbABnY\n9gbgc8BWolnoi6muJy3ifv7dVPb1xJfjY4kv67u8iIB7A38MnDiw7UPAzw/cG0ckLNP3LFE5eC1Q\nzNl3K/CYpXp+j8WSu+Ez7Aa2DBq5VfURqrox7TPAFqLmcsvA924haoIAJ8+zzxFtnycD2waOPZmO\nOx9nAD+busv7RGQf8aZbzAjraURNby4nA9tUNSxQd4jCtM8kUau4C6p6IzABPIgohP8duE1E7kN8\nKP97EfUcZFHnBZ5FFOK3iMh/i8iPDezbrarNfMcRkYtF5L+S6WQ/0a64Zc6xtw2s30JsL4jX4qqB\n63Ad4InXdC57iT0O0nmHiYL3N+YpO5cXAFdokiSJPwG+DnyD+OL+IFFQ37GI43VT2Teoak9V/xv4\nL+BJcwuq6veB7xC1QdLgy6iqvncR51kQjV3/a4ha+a/M2T1KVETWDFlYzvA/xC7gMw5R5k7iDXjG\nwLbTiW9JgNvm2dcQb+7biTcNACLSAY5b4DzbiJrlxoFlWFXfuIjfsY3Z9qE+twGn9e2r89T9SPlv\n4NlAqaq3ps8vIHaPv7HAd+5WiCtV/bKqPgM4nig43rfIr76bOKBxmqqOEbXhud4Npw2sn05sL4jt\n+ZQ516KdfvNcrgXOGnjh3puouX5WRHYQzTInicgOETmz/yUROY2odV4x5/d2VfXXVPUUVT2b+HL9\n6pwX3kJcO8+2Q7W/Y+a+eTxwUarnDuDngf8lIh9axHkPd2xE5BSitrugW9lqJAvLhEZj9x8CbxKR\nZ4vIaDLuP4hoT0JVPfEB/ZO0/wzgt4G+f+SVwG+JyFkiMgL8KdGW1wDvB54uIo8QkZLYPVrIHeld\nqexPiIgVkXYaLDl1gfKDvA14pYg8RCLnpHp+kahtvUpEijRg9HTgPUfWUtP8N/BrQN8d5er0+ZrU\nTvNxB9Gee8RIdL15roiMqWpNtHktRmhA1GL2qOqUiDyMaKuby+9LdLU5H3gR0Neq/p54vc9I9dgq\nIvO+UFV1O9Eu/LC06dtEIfygtLyE2AYPYrYmeynw+aSxD/7mU0Tk5HQdHw78PvC6gf2Xi8jlC/zm\nzxDNFr8jIk5EHgk8jmimQUReIiLHp/X7Ab9DNCORznPuQL0/TLQrvyiVv0RE5hW8InK8RPe7kXTv\n/gTwCwPHhtj7+LTODAKtDVbaDrDaFqLd6UtEwbKLKGQuI2pQEDWnd6V924i2SJP2mfR5W9r/LtIg\nQ9r/QuINvJt4Q97KzIDF60k2y/T5YqJA2pOO9RHg9AXqfDOzbYEvI761x4kP7IPT9vPTMfcD3wV+\neuA7lzNgl+IQ9rW0/z5ETeUF6fMYUYt+9ULHIGrtPyJ2v17JjL1w0NZ4NfCSec5XEl1q9hIF5ZeB\nRy1UV2bbcZ9N7FofJJoM/pYZ+3C/DpcRtckdwKsGjmOIL8Tr0/dvBP70EO3ycuDNC+ybt02Jo80v\nnmf7Y9LvmEznf+6c/Z8CXnqIupxP7DFNzHO9/4kouCfSOf4P0F7gOHPvjUuBzy1Qdmu6x/al6/St\nuXUk3ss/tdLP+pEukiqfOcYkzXMfcG9VvWml67NeSd3hm4gDEM2hSy/qeC2infHxOscxfSlJvZNv\nEkeU6+U6zwLnfhvwL6r6iaP47gXAP6jqjx228CojC8tjSDKcf4rY/f5zovZ4oeaLsGIstbDM3HNZ\nFpuliDxZRK4XkR+IyGuW4xxrlGcQu3q3EY3/z8mCMpNZGyy5ZikilujM+kRgO9G29Auq+t0lPVEm\nk8kcQ5ZDs3wY8ANV/aGq9oijrYdyx8lkMplVz90KjbUApzDbLWI70Ta3IFu2bNEzzzxzGaqSyWQy\nh+arX/3qnaq69XDllkNYLgoRuYzorsHpp5/OV77ylZWqSiaTWceIyC2HL7U83fBbmT0b4lTmmSWi\nqm9R1YtU9aKtWw8r1DOZTGZFWQ5h+WXg3mkWSwk8hzgDIJPJZNYsS94NV9VGRH6NOK3KAv+oqt9Z\n6vNkMpnMsWRZbJaq+lHgo8tx7Ewmk1kJciCNTCaTWQQrNhqeyRwxg/MnVm36uMw9lSwsM2uHBQSk\nLrwrk1kysrDMrBnyJPrMSpJtlplMJrMIsmaZWVMEQJKOKch093tuQBiR3DHPLC1ZWGbWFH1Bafpi\nck7fPAvJzHKRhWVmzaDobCE5rU1KFJJZTmaWkSwsMwuymFFmJXaNDSBzvjAjygDCzGcdMJUPyD4B\n0CYdVMEHCAE0AIrxPp6k8dA00OtBncqXDoyCK6EowFlQEw8qBkTiX9NfBIwZFL+J/q/pVyqkVTNY\nXebmSpu1f/YPz9xDyMIyMy93EYIAMlsONDQIHpvyenkpMAoNgUJAgoEe6S4ziEIQqI1Smgb8BNQT\ncHACed/7uO6zn2N8535oPKUVNrYdvqkorFAUFuMFCbEGFsHXDQawYgjaAxGCJBnrHKYoQYSqqXGF\n58BEl1odE72GpmzjhkZ5+BN/Ah79aDjzDGi1odOmMQUWQfD4BoxzoIpgpxtAjZl+OTSpPWwApKGR\nGg+06Cz/hcocM1ZFDp6LLrpIc4i21cd8mmVfiFoq0ALE0BCDAIjWID0IJTQeLQr2h4q2LWmrwkSA\nqXGm3v9Wvv6pz6A77mCTWIYMdDpC1Z1kyBlUFSuOgOJVwQhqhKZWClNgTNTi1IfpzHsuKYNBIKgS\nxKCAcRZrLZN1hfGKRXAqeK+0i5IKGHfg65pu5alcm4deeinmSU+GjZtAOjDUImiFsWb613vMLHOp\nxSAhRE1UFA9YimW8OpmlQkS+qqoXHbZcFpaZBZnTnZw9gSbpnQoq/U5pjaFBtBO3hUnsvl3w+c/x\n5cvfwvjtP6IlgdNbYxhnMcZR9xRnCgjgez1arsGKQ8Ti64CxJcYYQoBGekCYFpZew/SAjvqZdWMM\ndVPhigI1gsVCbajrmqJwTNVTtFoFKtDzPQoLplFKdQgFk67ggPbYW1fs61Wce8njOPMPXo+6DnU5\nggWs1vGHC0n17rdHMd0meaxpbZCFZebuM4/tLQqcAk9MhGgx0YZoTbQjBgvNdvjs1Vzzt//Iaft6\njJQGMR4JPmqC0sYFRRRUAt4EKhsFjAtR7AYxBBNADSGdv/QDJgFi+T4y9zaWMGfDzGedI8S8mGjf\nTKYCIz2EGoOnkQ5BHPu6XXY3yhmPeAyn/for4bjjoT0c28bENvKABo8Te5d2y6xeFisss80yszCD\ngzWqCIIzlqZX48o4gKICB/1BNiD03v5OvvDuKzm59AxbuE8AU3iMBhShsgaPoqbGBShCwGjUUlsB\nvEAjLp06YELAqscQEAUvBQvNo+gLwLsIzVlEodgfYDIahWNLwZuGnqsINhDU4HwHgqMdGlQDnaLF\nyS1H92tf5paXPY/rdmzjya98LfzkM2B0hEqGsYDDglQ0BFy2Wd6jyJplZkHmxq1QH6DxiCugexC2\nb+NLv/c7bLp9GxuKgHYMU/snGKJN0W4xRY1pFYTGY4MDDEbBiI+C0QZUAoIiGiiCwfgWKlFwztYc\nw100wtla5lxN0swqZ2loDAQMYXAwXgOCB7UYLUFt1HiNBwLie2ANjUsqZBVoly0mpyqGh0p63R5d\nCm5oah70ouez9bmXgh0G28rz49YIuRueOTxpBCf+CbM2992+ZwZ0gKaGiXHYsYPPv+TZnFS02WgL\n6lAjxmAaUA+htGDB9ypMYfD4KLCCxJFzsQTpu9ukrq+mwRPpxX3CdNcYSF322dVfjLAMEo9tpyVk\nLCeEpL0aGlMAPnXdm+k6gaGRImrDUuPQNCpe0CBIMAgeJ1OUoaFuDF+nwwNe/FKOf/bPQ7sNVlDj\nZpT05I50F1ejWdchS9ljSRaWmcOTnss4mt0gKBoC3lgaoKUS3WV8gN4k9cc/zKf+9A+5eKRNbeLA\nS380Os+cYXrgqWka9hvH5IUP50GveRVs3Ao2jYxrAK2Se5KLQlnje8ETcOpBLFlgHjsWKyzzFVnH\nNKbGU+NoAIenQHyJ1dhlFpmCZh/Xv+4VXPOYh9J765u5aHQje90oIkII0XWnLyTWO03T4H0cxDqx\n49jyjav5zk8/neoD74buXtAaxeBNB1DEN+Ab1DRUBJwaVIpprTOzushXZR1jEWJHEqBBfAWFIj7Q\nnqjh2ut598MuZuNnPs75Y45u6BJKYbSuprVJEcF7v8K/ZHXQf2kURcHUgXGGx2tObgf2/cNf8tGH\nXcCd//h3SHUA2yQfTGtQB6C0NZoBckuuXnI3fB0TmAIspilAoLLQ2nEj17zoUk7Rig1NIGgLpSCI\nx9DDakAAnx0pDokKhGSblGR71cIy3nSZDDUPfM8nYWuHhjZOHV7Bptk/lhaS9ZhjRu6GZw5LjzZ1\nrTSMg99H64Mf4Iafv5R72QCTXbq2RWMbkAqjlqDDeDr4bJ5cBIFgPGoCpS8oa0OrF9hohRNs4JPP\neCxTb/o73NRuVGos0BMHdLKgXKXkq7KOaQMtaXD77+Q/f+yh7HrTG9hUdLEHlbbbjNGA8yXOO7zt\nUhcH6bka1eGVrvqqR9TQagraNXg7QWh3qbWL7wnKGOdvarP7qg/w6Uc+Efnal0D2Ybs1LvfDVy25\nL7We2fcjvvyq11B+70Yu2HAcIVQ0pYXQ4Bofg/TQAzGU3uC8AWkwUhM0z3s+FP0AJD1rUAmY2lNK\ngSpUdUVRFwhwwaZhvv+qV3LTyBBPetvbmGqfSEEHu9I/IHMXsma51kmuJz0aeoRpx8gGqAhUxHXP\nBFBFdyEf4MDtfPZpT+HUbds4pTSIRP9BWxsKYzFFtLWF5CAO8WYx6gj5UT4sgdhuRsEGh5GSgMQp\nnUawCLVTalNzQuN5xJ4u//7859KevBlliv4VrHwKbReiD2jTv8aDy+zVzDKRNcs1Tj+IRYkBPI0E\nnDhcCDg8UCfn7tR1PrCDf3vio3nCaWdy7tAGpJpCRWLQByWFWxs8QXLunmdb5vAMOs4PtmFjosO8\nNzDeapBezQP2CXc861e4KVQ8/L0fgNETcDYgGvDGYTHxgZ3HZtyfQJBZPvJdv8YRZgRcnKCn6PRj\nKWAaqHpQHeT7f/EGvv3kp/K4DcexZ/ftcWRbZNqxPHPs8AKFh3aTuusu0DHK1knPfUrHtT/5dMbf\n+TZsiPPlLU0UhnpXxTLHGj42ZM1yjaMAEuc4WwGP4DFYAx6DCw6KA3z4qY/l4UD7+A57phpcU6C+\nJmAoioKmaVb4l6w/Sg9Oo5apKIyW7Kt7TOyfYsw2VFe+k5Gzz8I98hGoawGpa09Ir0Mz8D85gfoy\nkzXLNY6Q5hMnQWlwuAZCHfB4mJzgP579s5w/vpv28BATUw0FUJSCMQ5jDN77PF1xBejZuASBtnF4\nXzPhPG4I2kUbJHDz636Hb/3v30R6kwhNmpYKda9ibmqLzPKSheVaJ1r/qYixJV0NuIBlnNbOO/jY\nJQ/l/tUEG80m3J6KdmOQNJ0xP2wrhyFQWxgvYcrFXECtxqRueZpj3qsZKwxbr/s2H3rsxTCxG3oe\nNNAuC+axJmeWkdwNX+sIKI6WQiXQKwKje3fzgSdcwoPHHA8d6tApHLtGDbUYTB2wGiNo5Mds5XAh\n4AKoGAIGLylCElDUlmCEUCjqa/xkjwvaI1z/zJ8inH0O5/3N26FwIB6TPROOGVmzXAMoKQo3NRV1\nVCajPxBKhdDQEBMijvb2cs1PPobHnDDEqArGFUxOTtIKFprDBcfNHCtUDCozYeNEQ3p9BXpOaUyI\nydnU0W6NMGwKNqmw6cYb4ZqPQG8c0YKQQul5wjzR4TNLSRaWa4D+iLcgtIAegZ6LO4QCgqMXJuh0\nd/Gpp/0852/eij/Qxbc79Gx8gJw32PwsrRrCwNJHtJ9OOAq+GB0eahsIpomlJXD9H76RH/3Zn8P4\nTqyGGGIvQO6SLy9ZWK4JAioVeAcUOAIFNZOmiY56CkO9Pbz/4ofwAL+fem+X9tBxTFa9la545ihw\nabZUPz+RSiCYuA6w0Rn4/Gf5wq/9JoQDaQDckP0ZlpcsLNcAybU8hisPYNUREIZQeraGiTv5wiN/\nisdsGWGytwtxFi+w2ZYUWdlYc5hZTv/RxuxC/Bvn69ecoiXbd2yHJmBDwBto8uO8rOTWXQMYDIZW\nzKjYz0+DAw/l/p1c84yncc7xw7SkoD00ii8C1BU9rXPHbI2iacDHKvhuhZ1qaFPQTHWR2vBfheHZ\n//I+aHWmn+K2z4/zcpJHw9cAouA89JyC1BiKmEhr3w52PvNneUiry97uFLbYQGgafFkRDJjg7jp9\nMbPqCUlQqoAJMGTb9GplSgrqDcdz8tv+liecfDaeDkoc62uHADli/bKSW3etYKHBYhCkHoeDu/j4\nJU9GWz0mTUHhWkz6Hu3kkmKCofB9f8rMWsJoXPrUCm7TGAc7Le48aStsPpUpjVYZp3HKZGXAS7Za\nLieHfZJE5B9FZKeIfHtg22YR+U8R+X76uyltFxH5axH5gYhcKyIXLmfl75HoPB8lDvAMeRMdhyb2\n88XHPYUfO3kjU02Nrwx1q2BKKirX0G4MNriUNXElfkTmcMzNVNnHELXKmA0Tghik02Z/8GxrlTzk\nz/8GWmMglqqZiHFSXBScNs/vX1YWo3ZcDjx5zrbXAJ9S1XsDn0qfAZ4C3DstlwFvXppqrgNSgITZ\nYbcaagIEg6gFhfLgQT79lKdx/hZH1dvHkB1BnFBUNRukxKmlkTg7x5uGsI5978zAstro50yPYe9i\nLY0aTDBM+IZuNUXbCsPDw0wZ4UZjueSf3svkxhNAoE1Jyw1DGV3LHAYkxxhdTg57H6nqZ4A9czY/\nA3hHWn8H8MyB7Vdo5AvARhE5aakqe09GJeaxbkxyLvaxS1VgUvwtBxM3cvVjL+H+J25iV10z1NqM\nFNnsDFFTm6utzefLuFroR0QPgDfxutc2LmMFdEaGmQhCMJabixEe/6//Cp02Q1qtaL3XM0f7pJ2g\nqren9R3ACWn9FGDbQLntadvtzEFELiNqn5x++ulHWY17Dv0JiDaplT0rlN6Bwn4XGBvfy3ee/AzO\nGjpAe8LgeyU1SjPssfXK1v1YsVDXdTFlVtvMJRtcFJJJw5wxmRg8cSCv3ThG/v7tPHLryWAKeoCJ\nfhCZFeBu91A0BkI84ltRVd+iqhep6kVbt269u9VY89jUFRMEjxBwaepOw1izn0/96gvZ2umw0W1k\nQgQ7ZKmdp5haT0b9gKpH1RNCM72u6uNcz/4y+A0BNasvotLcEW/b87TUQS/gC0fZtPjOGSfBllPA\ntKgIFBhcyHPBV4qjFZZ39LvX6e/OtP1W4LSBcqembZnDEUAweAw1jrYSnyIdZ+ef/w1n/PAGSm0x\nVWxAtaDnGnquR7GOElxpCKAKqnHWSlpHddo2Kaqo78+YTtqaX40d8RR+sh+dvtfgG6UYGsVJhy8d\nv4VH/f07wDp6BEocso6u9WrkaIXlh4EXpPUXAB8a2P78NCr+cGD/QHc9cygsMa0DAxdl6iB84+vs\n+fi/c0JniJ4JBIEiOR87D7VZP50yS8xd03et6X+2yPS8aoNgRQghzKtprhaMzsTpNQobNmykV7SY\nGCq5dngjT3zz26AcpkEpIApKA96sE5vLKuSwT5qIXAlcAmwRke3A64A3Au8TkRcDtwA/l4p/FHgq\n8ANgEnjRMtT5HskkgSEJ0CilNewTZeP4JJ/75V/n7BFlSkaYairGGkOwMby2x+HFxOC/90DmBiQu\nXAFBp9NgzEqFobG8iIARmhADGmvaFsLqaqO+wO9HPJ+oayY7HW7rjvO4D3wQQkDDJNaMIjVgAj2p\nCIAlj3qvBIcVlqr6Cwvsevw8ZRV4+d2t1HrEEANihGoPDI2ysRe46Reezf1GCxozDKFHx1piaAxJ\nWujqHOv1Js5StsERkp6sEusa+mkRjBAaj1GDVYM1LergkbanbuBAVbOz8pRbjue0+53PqY95FDzw\nYhgZiSpZWRAboQAxoAGalKfB96BXQV3DDTfQ+8bX2P7lz3Pj925gtN1iy/AIwyKEbpeWcTFQRa/G\nmQIVaKyg6hH1FNN6vqE2ZsYHUg89aBSPE3A+tgOkeJVJSHY7liFVuhNd6IwwaQvO/vO/5cx73Q/v\nLJaBDBFFPH9JZ+kvVmbRrJ8+3CrHEWIw12ILKpPc9GevZ4MYxI7gXYVdQwGECt+hZz1V0aPdpJFf\nYhSdIgC+jRihlEBddSk3DPEj7bJjcoL7nvRQTrv0FzjlgffjvLENYIfpNYZGChzd1H8VmukoO4Kb\nzjUUZ7uEEmQ4mTOOPwN96OM4+1dew9m9BroHoTpIdeUVXPufH2Po4H62tDq40SEmJibQoIgYhqRF\nxzsmnBBSvctkM1SJQv9QVqz+FFWIrkFBZlLkGAVXVRzEoGJpuYI7ho7jtHPOwUgPtBVfAJlVhayG\nrH4XXXSRfuUrX1npaqwonglst4UvHPaHX+fGX38hm6qSxgsTZZeRNaRVeFtTNgXWF1ROwXRx2iSz\ngVCIQV2L8Ua4dbLHcQ+8kPv+8R/A8BCTrY04oITkJNmAjeqcDgin6GqVUjD0B8OISdoCiiS9TAD1\nYG3SBDXEDXXyIqgr+NBVfPbd72RoYj8bioatrTZTkz1wBTbEKOa1i5F/Sh+P09gA6g6pXRpi0JMY\nZm0gXiVQ+AZttTFumJtbI1z47n+CcnMK5lywOl3p75mIyFdV9aLDlsvCcpXQTFKHIQqd4BMX3o8H\nbR6iLEdQgYmmoW3Xjp3KakCNJyCYUOLFEsRTaoUQuK7bY+P59+d+r30tnHAilCNRFQR6kqbuEdKk\naEsjSkjCb9BxxkCMJo6A6dJMi1MT0y30k6ondS4AiiQ/t5m5PU6B3kTMcfPhD3DNP7yJc44/CTlQ\n4QSCadCBoWgvgheHCxwyoLJRMx2414TYde/7UuIsQQzf1Q6P/cgHqesS6ZR4Dy2bBeWxJAvLtYYH\nTJcv/uLzud+dt9LVXkostvpskofFFYxPHGDzhiEMsGd8ivam47l++w423+f+3P8f3gxt6BUtChzi\nbex2CkCVTLIGn3wDhNl6Vhj4H+ZomBC/L2FWy9kF21HxBCwtfP9Ye+6ED76bj/39X/CQjaeiU1No\nRzEC1B6jFqFIXfFDXR+TZmQ16GSF1J7R0TGmQqAeGeaHtHnk+98NdiNqk7NTA7MNlpnlJgvLtcbE\nfr562Qs4/bbtNAF8Gzp1jBq1xjCeAAAgAElEQVRUFVMUzdoxL4+rZaRVYLtdrt93gHNf+CKO++WX\nQmcIKFC1BJWYA1uS731MMpSkVey+QhwMkv60B2GWEOnfuVFjnMmm3f9eLBEQLFZj+803JbIhUJIE\nbn+/D7HP3N0J3/ou33rZazh30wiTZpKuabDB4A7jh9DXLA0NTHmMOCpXUg2VnP13b4UTT4LWKA0h\nysfGoK4mOkVl7fJYsVhhuXaewHs63/sCm3fvRaylHAEmPYaKygoSStaShnlWu2FXaPiMNjzzM/8D\n5RCUrWRoVIJWqHGImhgAQgNqA3XqRlv63e0ouVRMGkU3A1sH/BQB4pynmS39aONCErRhet9MS4YY\nVjkkISlhOjmcGLBYtH0S8rAtPOAzV/HZX/0Vxm7ezWmtFqZlqHp+Om3tXGKAjJlutyscjTgmS8PB\nloPT7g3apdFJRIaQBtTVTNKjTQubheWqI1+RY0kAtKFHQxNndscHua64+rdfwRieEAJhKs0UF5tc\na459VY3GMG8qBqcBp3Eec+XivlAZSltSmxpvGnxdE6xAU/O+3Z6tr/sznvmxT9OMjKLtNkj0m1HA\nmhKLoZB+1znaD236NyMuZ+IGmdQdn693Grensv3u+GDh6S/FY8ycIXV977LfYNNotBhAC9hwMo++\n4j1ccNVHuF6GERmmVTqQwIRvKEZaGKkwUqM4vHY4KA0OT+EMU0WAwnKDHeLCt74XbwRsGyft+GJI\n/7VpYbIOsyrJwvIYMlsJkfiA9zzU45zTGSM0NaqKaD8pqkHUILoCWmXKMFhZqOxMjEUXoAiBkVHL\n7noSS5tR18GI54c79lP9zM/wcx//OFx4Eb5sE0wxS8CFNI493c1MQk0wSXiZGcFHf778EZjwFix8\niFt91nfMzKoA0sSR72DAD8OWLTz84++lefpPID1LcIaOGELlqaVF0IKWb7DSxeKZ9AqFYZMb5Zqy\n4Gn/8j4YG0ua80AAuek2cNlcuUrJr7BjSA9oYVJH0MUsZGEnV7/kJZxfexS9y6yVlSIASEyE5YJB\nNArJIFHD3De1lw3t45jqeqwp6UqbH/uvq6BzChQGCqGZHnmeYe29nRVLwBuTKt/BOsfmS1/Mbg93\nXP7PnL1hmPrABAwNM+mExla0QsWQOqqiwEwa/ueEIX7mre8i2E70pcyzcNYca+/eXcO0aGK3Njq2\nQN3j83/8O1y4+3asgqpijGE1DLrFNKwNG6rAaM8kf8M4YDHllKHhTdB4bt1zkPavv5wzPvlRdONZ\n0GlTGUOFw/cn5g38nLDGbrkqDbZYH2b0wFAQyi0MvfzVXPCla3DPew7bikAnHGTETyJq6DFMpVAW\nBWPvuZIfv+IqGB5DywI1wyv8qzJHw9q6c9c6Gnu3WsdAv4RJNn3x20yWvRjTUGRVCMqIQVN2wdoE\nui6OUNupQGcysHtnxTf3By7+j0/gn/QTTLktiDUgDS1raCEMITP2VjmKOH6rgBITh50ERBtCCtBs\nLHRqmDwoFL94KRf8y7/x7YmGUPcoHdihFjIyxvdbG2F0M7iSHslOmmNhrEmysDyGBJE41Fo4qPby\nsec/h+N8Q92YaVeZ1YSooTEw3gpMmIrClpjaMdQ5jn0nncbjP/Fx9MTTMeUYbQ14KmYNsvRNrQO/\nba3dcNLEF9yUgUpiaF6HR00D1AwNjeLDRvT4U3nkxz7NPjtGqSXN/i7XiePid18B7WEqmjjPvGHt\nNUIGyJftmBIQMCDBw4F9nLt3J61Oh3KqtdJVuwuiUHgXhbh4hr3CgXFax5/A53Wci9/1LhjZEMen\neg2Ix6Z4nJrccEgpMpS4CCzoarNqsVH2OwxlCs7cIHigKQTvAr2OYUodbN7KvX/797hhynDjyAiX\nXPkBcENMajMTj9JAz3ZX+ldljoI8wLPk9Gct3xULeBuwVeCDz30elzjBdysoWqBTTI+OSoMXk/J+\nB0R6+GW4VDHvTz9h1owi2K99X7AN1YEhCkJnmC9WE/z4Ve/jwMhGRlVpSY23cRik79Tddw6PadNi\nexiY7dazRmgkOoy7Adck17/EHnDgdIqWOGgsPPUJ3OvMTYydeS/GO6OMSI+WJEEpoKZeQx6zmUGy\nsFxS5guZNiM4JRi6tafznT/k4Q/+HgeuPZFRezwydYDGtRHpYaVC1GKDAZnCiwPtICyDoUvCgCEx\naoRBQDUKUR8CtlBs0eaHuxvOf9+b+PETzgQ3zAaIvpMUM/O1hQGXmMgs5+oj8gFaHbi5L76+o2bc\nCUBb2gOfW4zd/xEAjBA/z/hRglDQziPha5I11ida7Rwm8apC2Qrs3/F2TvzJgtOfO8EOuQH1AUeN\niKXBoBpoNQYXlMbWdJfpleZ8TL8aMNQmDuAEE/Am4AUKCxtGRth+oOb8/7gKTjqHqlGwOcNgZv2R\nheWxRKDkWkZbe6mafXRPuJ3zfnsLezddh/MdCq0QPF5HqK1BRXC+oFy29KdRuPuBGBQmgA3RF3Sy\nd4DeROD+V70bNmzF1wWtoRGmlqk2mcxqJgvLJcfMWQazVyvsvArRHmXTpsJRt77NfX/rPG7nu/RE\nKBlBfY9GahoU65VWVS5LTTUFrFCIXfK6x7A4hmyB8cLQxo18ds8BOO50oIPt9Odmr74BqUxmucnC\ncpmZlfvF7GTHf12BqNBrPEUxBVpQ6fc5/1Wb2LPxNnriKFUp1IK0CLYBtzy6XKCfKjZG9ukkIVk3\nMN4EPn/nXp76yY8Qpjr0AJUGCLTyCEVmHZKF5TJjjEkCM7D/2ldy3HE1poBmaJJhvwHtNqjxTLZu\n4byX7uKkV1TcVF7HVDmBaMBLQVUsTzfcasBoQDRgCKgqkwb2DRXccdaZPP3qr8DwifiOUCbNWCVL\nysz6JAvLpUYH/wRQg+Dw3VuobvkUVTlOLQHxbfA9nN2IrxroBXzZgeGv8NBXFOze8h28tVgtcM2m\nWSHGkrNm2nb0l1A0ZoaUfpZBa/Ctkm/v28/Ff/FXTEkHrKMgapSSIgGpyQIzs/7IwnJJiaFkIRCT\nEHhokrfJwSsZGe7SqiukKSjUU1uPN+OUzlLaEq0NUz1DM7yPB7xwEzuGv8kUByklpn31JqC2nzih\nxuBj+LCjxIQCCTZlIlS88dw6McHTP/wf9DadRrvVD4br0jIYFSiTWV/ku35JMUShElLM2SKGAK92\n8MOvfwh7CF9JkSmMmQBf0lSb6dUTPOjlx1FcsJNbmcDpGGICgYOoPQihQwhDqJ046tpWhSdIQU8d\nnQZMZXjIH/2/MDZC6f3hD5DJrCOysFxClJS2QHV6gkdjGuj8gDFzK8LC+WxFTIyP66ZAKpqipltv\n58RnjXHuTwt72tdTSBu6JRI61LbBG4+9G4E3vK+ZKoXCG5yU7NrfwCMfxVQoWZGIw5nMKiYLyyVm\nOnZEkjUWuO3Tr2Kk2R9n5SxAYwKeFoQWxk7gbMCWI1S9G9HzvsE5r3bsP+4WghvC1iVFXSChRy1H\n3w1vWUc5uR+KijvHhft85uN4U9B2BWryLJNMZpAsLJeQmIWwSdMASSEXttMKN6U0BYeaimNmkhOG\nIi6qmBA9xqt6N2e9tMvIo7bRtV3a0kWawL5mYW31cJimIbQMpjJsetFzYHQDFg/arLVZiZnMspOF\n5RIjKYgXUmNxcOcnaOl+vK0J0iz8Pd+JMS5NF5WAUoKkgLNagPY4WO9jwxP3cMrP7uZmthGGAkPu\n6OdCel9TdDbzvaZg8wufT48i2hFkYEpPJpMBsrBcWhQIJo2EVxBg93X/TCFCTZWcuufH0EQzocbR\naTVd1FTTc388NUN2hMmpSepzd3L+757MHe5GNtdjR13d2vQ4uLvikqv+DdwwJYA6FEOTVctMZhZZ\nWC41GnBotD+acWy9HXE9QgOHau4gMZiZqMEEg6gg/STXEigcBG1wzmGKhiZczwN/875cW1yN9ByN\n6RG0wXpB1EfttO/2Iw1V0TDlHKpCWzzqK0aL47n+pDEYdRBaEBzeRCd1u2BNM5n1SRaWS4kANgAV\nMSHjTrTeS2g87VYxPeizqEPpQO5rwASTRtujruldTQjf4hGvPoMtv1Gxr96GdSVTRmlcQPGoCkgP\ntMT6klaYom7GmVLLpLV8terxlMvfRq/dmg5wbvAQOKK6ZjLrgSwslwHFxwCx4z9gqGNwro1v7kY8\nSjX0h49QQ1UrxoEva4LeRjV0Hef90VnsaH0Hbyoww3i1WG2w6kEtLhiKUDPUdjTGUHY28Jg3/BEM\nbwTaBGKySYkRapeiGTKZexT5qVhiVCUOzuA58IOPoHVF06vplMMcdXNLiljUjzoucWZN46G2PWpX\ngf6A+79yE+Fe22hQCmkj+KghSrSiKoZ6qkKpCZXChQ9mIhRIA0Zg2vFJkr9oJpOZJgvLJUbE4hXg\nIN1dn4uTA53gFx7bWeSB+3PBY4i00LQwtgBxmMLQ1Aeg2s0Zz2pxsHM9E+EOAoqqQUxNEyoaOwyN\nxRi4acduGNlEy7QojIEQfUKVmB87z/7OZGaTheVSkux8IgFkB+3mFoSASiDczWg90YmomQ58IeqQ\n4OKAEAG0iKPwepDzXjFCed/dVEOT1KUStMYUjl4IjGzcRCMlj/7Vl+HF4WacO7HpPCr5xshk5pJz\n8Cw1HrB7mPrRJxkzXWqpCSKI6SJqj84eqAYVg1Bj+ulmk2gLKFYLggWVKWoJ2O4EJz99DMrAzVfe\nSfn9DZSFw3OASe1wy37DqZc9D6sBBHrJn9OpS4Ez4og4km+PTKZPfhqWGgOOmju+/1FOlG6MCqRR\nG7w7AydReAkBS4rSwWAk9jh6LTEikLH4ooeXWznzeWPU351k8j3H022Noq7FI1/1fHq2Q5mOMT2x\nMWmUQj/a0OpCAdEGL2CDQw1M0aOj5V1srPPp8fO1/noqd7THMkTviNjGVYyUr4A0KG413irLwmGF\npYicBlwBnEBsoreo6l+JyGbgvcCZwM3Az6nqXhER4K+ApwKTwAtV9WvLU/1VhsykgDXhNqQt4JOt\nMZhp0XbUzCdsB7ZJP9UsEKRBFGq6uHP20bv/OPrDk9k+fpCtT3pSKtZPEzFwjOm11dcRD4AVRVMt\nfV1RFBB6HlPO9gxdrJ/oeip31MeSGJeVwUFAYL1N81qMZtkAr1DVr4nIKPBVEflP4IXAp1T1jSLy\nGuA1wKuBpwD3TsvFwJvT33VA3wFnErE7Cd6jRtO0xRUSPmEShku2PtfAF7fxtU9s4fz2KbTWWP5u\n6D+aMi3cnbPUfhwpN+LX2G9ZSxgM0QEjAG69ychpDissVfV24Pa0flBErgNOAZ4BXJKKvQO4migs\nnwFcoaoKfEFENorISek46wAPe7dRuD1x1o4IRgGNcS451mkZpKDbg6HyDqpzR3nmC/8EbwG6KJ01\nJS9nab0CiEGD5UfjMDq2GnXhtUu/rRUgwJgh5q5Xm7rgTGv464UjslmKyJnAg4EvAicMCMAdxG46\nREG6beBr29O2WcJSRC4DLgM4/fTTj7DaqxUD9Nj5zasYkoD6Eu8sRvuDMsfeISfUQimeKV9wmzuL\nszdfhO2OQ3s6ONKawQbAeOIUTuhhkGKUF/zJpzFbH0YIDk3xPdXc9ceZeTSiME8b3FPLHarM3H2q\niohgjMHf+T0+8cYLKHyXQjYCMSuowawrcbloYSkiI8C/Av9LVQ/IwJOmqipyZBPkVPUtwFsALrro\nonuEYq8YRHv48W9TjIBxRRqUEESbFXH0traL02HqoTYnFufTC6dRljYZAO9eDp+VQKdfOKnuDYwz\nxmhjsGFgllSY58U0j83Xzqfp30PLHaqMZXAiQkxeZ4wBD9aWdAMUMmjNTMdag+aco2VRwlJECqKg\n/GdV/UDafEe/ey0iJwE70/ZbgdMGvn5q2rY+kClKcxOEGAINm24wU0FK+HUsCeqoJTA5NcVxD7yU\nYGoqLAKU00b7tcOM9bempAXa4ExBUIdRM61ZzmvumG+AbD2VO0yZucISsXjvaRVjGAOKizJS4pRe\nKLKwHCSNbr8duE5V/2Jg14eBFwBvTH8/NLD910TkPcSBnf3rxV4Z75mKwu2haaAUIZgm+l6mdBPH\nnNLhQwudbEFxUqrnATxtkPLY1+fuIEl7h/SQ1hBqiuAR0+Dlbsy/z0yjkrrhVvE04FzS490cwbj2\neiZ3h8Volo8ELgW+JSLfSNt+lygk3yciLwZuAX4u7fso0W3oB0TXoRctaY1XNQHCQaRoKACtDIYa\ntAVUBJFjPpLY6zWUZh+ueAS+fBCGCUosfs2qA7HeKi2gxhctCIqoYCimNUtZawbZ1UQ/nXOjFJQ0\ntGgTW74RcE3AuoKaLqV0VrKmx5TFjIZfw8KK9uPnKa/Ay+9mvdYmWuO3f4sSDwpq+4EpwooJJ0uB\nMZaN5zyJOD29xXTu8UxmkRzoQVnYGHBl+lZeX/fQ+vq1y41U7N35XYxqGmlu0syHEF/WKxD6LIRA\n0wQ4+cHJ2Tjm/85kjoTvb4fQv39NnNFjVulMr+UiPzVLSs3+nd+jMJL8AFOUIA2omBUZDS8LQUTB\nnER/auTMksksjIhMmzO++8M9GAEN9Tx2y/VBFpZLSo/CHAAfUDXJIV3SgMTKNLWGHsZZMCfF+JbT\ne9bPTZ65+1x3006CgOvPqE2xCNYTWVguKR4rE6gXNHV6jRK75fHTMa+RUQgeYCuzRy/vEa6tmWPE\nD7ffmdZCSsgHMX3z+nnpZmG5pPQwuo+Aw0gZZ0Bof05tCnt2jBERqtChoRUD+wIzId4ymcVx50HF\nzwTSB5n2S1ixOh1rsrBcUiqG2AWtDYTGgypBlMaszFRHiP5yu/xm3LSpaWJWxKFM5vAE9oybGKxE\nC6wavATsCk3hXSnyE7OkBIwEQuhFw3jfTrmiCW0MaDv+Qeh7i+ULnzkS2u2hOKfiLrfy+rmTcvDf\nJcaZQFCPMT5FJ0hZGVcKdRgznD4Y1lO3KbN0dIaGkflmWR77qqwY6+e1cExQjGlA07S7ftBUYKWa\nWpzFmdGBmZYxHcV66j5l7i4G50qa/sjOrPht60dcZmG5pISUqzuAepTokB4TjK2McPIhULQ3DYzn\nGGKA4kxm8RjbwlpmApWsQ9Gx/n7xMiM6z7zkgTS2x75CTQo87GFWeLNMZpGogLExdomsXy+KLCyX\nmPkDOSgrZysMMYCH+JmqAPnSZ46EHJgkPzFLTk8VocBqC0kpbKOD+srcbEE9vrJ4AqKengREO9Gc\nuiI1yqw5RAdiKc/tnayfMeIsLJeYaS+hEEVR6Gd8XKHpjpBiE8ZgkFlAZo6CMHu9fxOtlGlphcjC\ncokRBC81amJANKPKStoIRQQJk/TDsglJoOdeVeYIWe+3TBaWS4rFywhBFKWH9LNBiU8uRMe+uS0F\n2tsFFCCSZl0AEtb9zZ/JHAlZWC4pDjXDhBAzlGgtCCsrlEIItOQAPkbTmJ6gpvnSZzJHRH5ilhRH\n3WujYlPw35VvXoPHmf0x/JD0ox810TCQDZiZRRLmy5a5zlj5p/kehcWHNkgRXdPMyo8UKoqR8cEN\naWsms3iaplnpKqw4WVguKYYgQ4gahBihXNIoeFjBkcNSpnA0QA+kP8yTySweMx3FUqMigAE16+ql\nm4XlktLigB+mLQ2qQsMkSgGhQI1H5di/nRuBYVdj2IlHk7+lwynrzvUjc3QEgckDt8Z7hoYecaYa\nAmEdTZ3NwnJJcRRDW6IWqZKmhjWgMT/3SuhzRoSeCnA7hjYWMy2015NWkLl7hGqCJhDv61mucOun\nl5KF5ZJScMqZD6T2Nd57JAhqPCDISsW0DOCxsO+biLr0GVihwB6ZNYgaxtoBYwCxMSaLwHqLMZCF\n5ZJiKY8/l6oBay0hhBjSkpWbwWOxYKHZ9jmYzsxnUhc8X/7M4hjrEHOGI1gGeyUrP4h5rMhPy5JS\nwsiZ2HJj1CxFYggNUyPBrEiGR6MFqp7dP/oayE68gKR8z+unA5W5u5yyZSSFg3EMzg9fTwJkPf3W\nY0ABupXJiWGctTEPD4BUGHUro12qxZoem4fGwdxIRYMN4POlzxwBFz/wXgDT/rlRXK4vq3d+Ypac\nFj0dQYNQliVBwGp0HQrLpMqpBIIJ6EDcTBUIBuqmR1NDPbUfdn1l2t60Jr3mZHA1ABYTTIzZSX8O\n/lpZ+lcgaWkaBwNFZYVzNs2HcNbpJ2DvEogl2ywzdwcpqdv3Ai1pvKIG7NQIVdEjmOURUSqKN3GJ\nn8ELMQNfWWNcB1vW7PnW5bSqLl4aGtamj5xAemIrwGBqoTFTYH30a01LfDmF5N+aUhHPs8wnyI5F\nOZGKgMWbEEtpC+Mq8KvDPKKqs6Kin3MmlHUXS7y3kvfwuhKX68c6e6yQDiec9gDMbZ8kBMV5wMTZ\nhsuBAYra4k0Zu9zq6ZZdDIFWDQecsMF3GS/bdCf3Qus2LKczXDtwDcgaugUUzHR9Az1qynYBOkYI\nDgYf8IFgtavxgbbBRbuxAqZCAvh6CFN20VCymvQYg2e0A6aJL1iD6ev1K121Y8oaelLWCNph6LSL\nqW5X1HhcUxD6gX6WQWAGwDtDZQMuBDp1QxkCAQu+ZAM9uuIwYYqhEYv2vgHl8Yh01paghBgpSU3U\nLEXxTOGloC2BAiWF7Izo7MZebT1bqwVOfTSd0MNY8H4IXwRCL6xo/NO5WO3RNEPgYkqJwaZcPbVc\nftbY07IGCAUM35+9zSY2lrswdUljfczNswynM2qwAdR0Y6pSC103RK2BdrtHoUO0dJIbivvxsm/+\n3/bePN6Sqjr0/669d1Wdc+fbTXfTNBBAQJxRwSiJRo3+nEXzfA4xShyiMcmLijHqx6jPl8EheaKJ\n8885GkUNDvGXaHxi4nNCMYAyiICCgEA3TQ93OKeq9t7r98euc4emgaabvvdcur5adJ2quuesqlO1\nztprrb1WpPeTT3LOy57CFucZ1LhcKwQMViIqBtEOXXEI8Lm3PZDm1PfKMFqWAciASBdPehAr4H1f\ntHz30lk8e2tPsnKoKtbaNByPc4yZqSQPpKyOJgVtMJPnUKBVlncxXiPOjzEfj2EqbAOaoIvoQTFv\nYpMvWdRJVVQmx9mCbG4HBRA7M1y54cE8/98juvFodNZzC/NsYIR8jd3oTe35NARU1wxhYRqPWWMz\n3isCuWaoeCocDtgFXHj+ZdC9x8EZhtwJjDGEEHDOUc/uJjNbFq6/6CBPdy1d8QNn7ZgVawTrADp0\nJh6CE0swFQfTtokmMp/XRHKijhLEYGd3UJgO/Wycc0fuyyu/W9M56kSi7GRswxR//bl/wOCHbmh6\nx8SmU2W6bdWkYnNWA7LQvXJtLBaBGBFS2TxDsly29xxhSJqDGWOS0qzmsCFd68WJO+lH+lCitSzv\nQpRIJGBNxhH3fgzxRx+kshWZP3g3laDkTa1BbwPGzpH1YfvEFj5bT/Gp85Xd00cyVQtzkhFm59i9\n7QfsZpZ1TLCWbnghpnwoC5hITUmNw2mx3F95m39/a/b2NytxXCKC2pTmBeyahXxdqrS/yoZl6tsk\nQlVVFJnba33WNTYwOWDWzpOyJjCkGmgeiiOZnzuaGrAaMSr7PYPHEMDUeEaxCpnWWAJqArm34IWY\nW7yUeAvXbTiBv7tpE++9YgNz6+6JsRleS6YYweXC2BHr+MwFX0YJaZ64T4tv/rdgAA0ZBkPzf8CQ\nkdHBLtzFsmRZyu21HJK9LCtx3CDfVSWl41Sh5ms/gVALhph6J62ghRlMmqwQB57fHEovTLh57nOv\nXwND4yxIP1YWszAT7FChVZZ3IWk45VBy6Gxi1p5MTt44yS37e7mDWCIZ1uymNjmlKZjPIFDQyyJm\ndBrfn6XTzbjGPYinXXAs59Vb6Iz2yZnBxUhoWvKqM8zayEe/dQ4VHiUybyNYcNGkeb9DGvfZc5qm\n4G4zfeWOlNVQYAyRLg5PJhkf/dz5mGyMVenVFJMfUoEoFVJ58qzLXFXxP543DSy5lkPSBWClOfTO\n+CATI0Q6EEc5/H5PQrwjGZW3Fau9Y0QNEnM8XcTMYaRkpLIY6VHKJNSg41N8eusGnvezaez4GDPS\nx8cRbDQIihdDEAMhErSCzV12U9OPfUQEL0BM0c1AXJMJ62uNQNMLqbnY3YnDUFOsiiyihiA0wchA\nVgt1CMyXJdOrItHwcYfKUkQ6IvIDEblIRC4RkTc3248VkfNE5EoROVtE8mZ70by+stl/zME9heHC\nCvjoCWYSjnoQRh1qaPLp9g9DJA8RkR6eDMVgqHG+S6E72TW+kbdcNc3bZ+9P6G4kZoqrLdGMJwUJ\nRGla4daBAkW6yhvP+d+IUTqENOpeMCtbDj6xmfCYglYzAczIKCrKarS4MwvRPkUlEkqPEaU3P8Pk\niksznOzLk1ECj1bVBwAnA48XkYcCbwPOUtXjgR3Ai5rjXwTsaLaf1Rx36CARa6TpnjhBTU7QCGYQ\nrb3zRAze1OQa6RvLvB+BYpJKa27oHMf/vMTzz+ZkpDtFJ85gg1Kbgsx4oklR78EMotxYCh/QXo/v\nbbuEbexciCR7AcTgGPLh692CVGRFiaiB//o5BHForFiNAhWDugUpsGQwuaWTRe599AROqxWXZxi5\nQ2WpiUHHq6xZFHg08Plm+8eBpzXrpzevafb/tqxWZu2qMEgNARhnzk+gJkM17ncbBxWoJKdWw2gd\nKVxNbWpmx3+Nl/74cL6fPZjJXAlxnjmbkwXImcNFQyQDFYymQhuDSGsWLdV45MwP/AU72E3W5C8u\nMTBaDiI+BCIVBsfOAO/65A/x3mFFm8mEK0swAaEpuaYZlQXxs7zwd4671WyoQ5V9GnOJiBWRC4Gt\nwNeBq4CdqjqoDHEdsKVZ3wJcC9Ds3wWs38t7vkREzheR87dt23ZgZzFEpHp/GRIixPWsP/LhIB00\n2P0e4goeKz3ykFFoxa/MJK+67hiefu292D4+iY0lUYWREMl9oGcLSlPQd1BZg4uOPERqG5jPPD1n\nCDjGMvjF+Cx/9dWPUdKBJUYAACAASURBVIaKPKRhhC4UB245WBjTwZITKvj2RSW3dO6BkS5Os4Um\ndytJNB4XwEaB2KXMle03/ZxTj4YQV8ePOmzs07eiqkFVTwaOBB4CnHSgH6yqH1TVU1T1lA0bNhzo\n2w0JyaqsAEwJEex9nwkySuZGgEFNy7SomBT8kYghLLy21GS5xashqsVqRqDAFIEfjd2Pl//0CL4d\nTqKODmwBRgia0pacmFTVRlPKUh7S+3sLuReykAFClEjHAyPw79d9h1k7h6eHqyGgRPU01V6BxdUl\nm1pul4jSXEMi0CNQE4CAJ4QeuUJl4W8+fTHkOVbnqZNHecWlNdHRd4ZAjtOakQha72YE2mzshjv1\nE6aqO4FvAg8DpkQWKjEcCVzfrF8PHAXQ7J8Ett8l0g47C4qwBrqN4+8kfrVTCHXEKMtKhi36MJOi\ny2OJC45aupSVR21JYfoEqRDr+VJxGmdekLFr/BgkE7yxFLE3GDwRMc16UpYmpmXwSaIm1X9sLJcS\nBe+ZXj/OM9/7ckobcBlE79HBV3sIOVDuenSPtQxoKkW5jAhceA1k40cgGnHBNK6SVRAVUONB/MKP\n9wuf+Wj2P+Ht7se+RMM3iMhUs94FHgtcRlKaz2gOOwP4UrP+5eY1zf5zVQ8Rp0dTcssN0kEkAkdz\nxH1/h9wWCL5JzRj4L5dnfwfJ8NZjFDKBQjJ2SE50cMW6B/GOn44y29lE30VUlaBCsJ39FrdvFVcF\n5soZdk/Dxy/5V5LKtXg9SJU/DiGkSQpNZYkLjIIdlAnAMSvw6v/9LTpZpymsm4HUIOEO3/vgyBuw\n1HhjmO/t4hm/BUpADqF2t7fHvvxobAa+KSI/Bn4IfF1VvwK8BjhTRK4k+SQ/3Bz/YWB9s/1M4LV3\nvdhDjEBGBqYGDD11dO9xBrvnaR6CJYPZJXPaKpNTuho1NWp6mBpiGSCf4mw5lVecJ9xiR7ATXbq+\nj0UpXYeo+z9GspkjLwoqX9GrdvHub53N9rgT5yCX/c8LbYHBbG8lNolZSyr0SM2ch69e5Bk76gHU\n3uBCyoMV/EGrfXp7RDFNTq4nCvj+diZ10J6s9V8DyDAYfaeccoqef/75qy3GXUBjKUZHMDuxjDGH\no4OnuvAM4s1fRuIcYhRVyGwzPVIN1kXqOuKcQ6LQt57vZvflLRdbeiNHgMuYtWOgDktJN86RBZjL\nCmQ/29p6m1wDYxX0nVIqHN7fwOd+/yzsnKc7mjLsfPBktnVc3WkieOOJQIZBQgU2JWpdsqPgzPfc\niM9GEB3BhkA/r+n4CJr82StJjUn+SQ1UUvGSJ03z7PtE1Bok1mCyFZVnJRGRH6nqKXd0XOuOOBgI\nKF2gJgeCOronv5gqrsfZDoIlc8uVT9mPFHaUONPF21Gu6B7Ly66aZvsR98dLjaWkiDUZc1iNSMzo\nZ5EiHFiriiwY6tQQmrI3z01s5yOXfZF6JA0FQwwMww/qmmTQBhkH0YMNeCy7KXjJ675MpYfhLPTL\n2UY5+iUBwJXFmJQh5MVSbf8lj7k/EEtCDXo3VpR3hlZZ3qWYlDokHihQIMOTCaAPpK/rcLaLtRkh\nLPdLFVnGzpjR2zzFV3ZP87IfriMvtsDcVqQ7zqydwMoMRazJmaOfRYKO0z8Ag8/GlCbUc46IIxsp\nGBm3fOw/zuYGqYgascaSuT0ellZ37gMRxCOkiVHShJQ9jgrYdNJjKDKh7kXGstQrKOXFpqNXGiG1\nbq6jZ8zOMxkhWMVIaD2WDa2yPAgorrndM7TqIwo1U2w+7TnM9j0Gu6T/sqYiBmqoJkd45/WOv7z5\nKPqTJzJeBbqxwPkKoxETHZURemYUFxUxM9TS3W858wilA9TgDeTWUalncuMEz/6HP+JmmaNPueQv\n4qFXl+uAWWz6FXxGL8KZb/4Bt8xbJPYY6U6SOYCY7gNSA7qVxoceVVWRdzq89PceRqaBaBzGSask\nGtrrcBczqHQzsCYkH0tBHwHGf4+5MEksa/omJxsxeI0gOdeMnMTrfzLK2dUDmR89mtz16XdL+q4A\n38FopJaUsOyiTxZILChCefsC3Q61GLIQcVrhYgQvqBfmfJ9so+Hx73suO5lrdGMktR6MVG3C5R2j\nBq0cBksV+6iB2uV8/tzdXFkdSS6BIIaq7lGGDoNWYKuhKAGi9jEIWhoedT9IsfyckJwILbTKckXx\nbGbzE/4JxmDU1NSlZT5mfLF7L556w7F8Z+TBTEfHVLgBn+1EVLBaUWW9FZFvUENRVVPKyBQ87eMv\nYxdzEA2VZvQx5OLXQP2z1UUlUucl0Rty7dADXvjOCzn7/JrRkcNWW7xbUcgksWuIOy6kA039Sg9N\nIn1LqyxXlDmAcDL1TEUZu/Q4hr/bcU/e/MsxfC+wMVa4aicxz5k3U7h6ColdvKyCg10NUYRZs5vn\nfPKVbDU7yYGiCby3xuUdE2OJtZFg4O//veT6HdNUZUZcjdygO0BrT3/ectrJFqekfM+QAn/apg4B\nrbJcUSYBn1muKn+da7tH8vyLDuM/5Xhmu9OMZpZKaqqsSwzjjJRdumGejFnMCrvYk4VpcTJKp5tz\nS3YTz/7Qn7Jbeogq4Jgv+wAomqoqtSwQQkCAjilAIl+8HL51/g6yYgM2L3A6HLbaIMtBVclsTq49\n/vi5D2gS5JMf3bDY8+hQp70KK4lGLMqxT/s4LzsvsvXoo6k1Mj3XozI5M3lGz2aAQaSkLHYSbI2J\nK29ZKqmyeuENwZbU60se/54XsNXuJuDpFJ1l84+Gz1ZaeQbKJxXZMkDBDhx//+HLqZigthEj8+TD\noSsREWLTv8mSIbPnMwGIialalWm9LUtpleUKUolhvhyhZhqn96QuoepYJlxGEaAIAUtJlIp+5ikd\neDpoHF95YSUSTQ2aAknb5nYQ1kce+fbf5VfcQulTapEgmKUJ1Iew1hQRQggYY+jXcBPwu391MaPr\njsKLTdNdiRzE/nV3GmMM1lp8mONvznxM42ZJFQbUpOyHdi5XYoi+trs/OSWjRY/ReiNfeOWHKH+1\nlRG/gX50eDG4YMkD2Ogo6i4TvYIiKGpWJsCzFMHjtEfPOTRO0LXj1KZiwz3GOP3Tf8Iu12de+tSh\nR6+f5BvoyQV9uUfVojXJHlWXlm9PPYwW9imINfSBc34wy7P+5kYyt4k59YikyvbEMXrZgU0kOBCi\nRCTaND9dFcFT1zUdruKBW8CZsmk37JeEdlZP3mGiVZYriWZ4umQZTDDBnz/1TKr+bkoiRipqY6ga\nP1cwNXNZRmUEp6tws6oBzXDRI9LDGcGqY94HrIk8+j3P4hu3fJtga7q5Jfg0nFNCU0Y4poZE2jTB\nYlGhrCk0nUsgUhOXRYar0CdQUuMJNVDDDMIb/vEXvO9rYGWCOR0lk1SaT00ACeRhdZJxgoC3NUYd\nNlqieHIXQPu8789OSVFwiqYpnCMna8bgbfIQtMpyZWmGq6kpmPC4ezyS+qpd9KoaFyNOY/q3WQBU\nmkZSQ8bYVM7/Oue9vPSf3sRO08e6kObLicU2JeC8hcpAqhwfF6surSVkcfphWkt1KlU82DGELrYX\ncBGqHJ7z5v/kRz9fT/Czt/u2q4HgyX1G7Xp4A86W9GuH2XUhE10W/Jft9Na90yrLFcQDQgQ8AcMk\nI3zqDe9n3IwgC/ZXqkqZeo0vKsyhoxDyScsvRnbyiL99Ltczg5c6GSJNNDWkTt+p0IdGFKVaY2ko\npaTvzXmDC+n7SwV8FUuaMjrXL9hRwB+968dU7jS660cxo/kqS35rTFPoKkok2JJ+Twllxcfe+lhG\nzWLAZ6A0W5YzhE/h3ZcIWJpWoxjyaDiOzazfneFF8AZqq9TNXT2ofRiH8Guq60DQQFXtonvCFE/8\nyB9yM7PMshOMRxRySH29xYGYZk7I2sIScQMFLwaPAxxOLaavzJhtXD99PQ9/9Se4on8czpb0dt+C\nxOEbutroCIamkn4kK9Zjdl/KdGjuSxYDPi23pi3RtoIE0k0ZCNiY0a8jncIww24e/6mXEScs86FP\nrzfHhMvJgxCaft/7W4btYBElR/B0vQcJ7CYy1Z1m5lc7eOVT/4THHfkbrCPDhQyMwUvj/tKIHUZL\n+bao+pA7eiiZF5w4glbMOjj7p//GWV/4O6ZPXI+1G2DbSXS2vwQNOUpYNnVxGHr22drR7wZs3UPI\nCLXny2+aZnIg5uqLuCq0JdqGEAtNS4cMNVAUhuArXMzY7CfozhtizzM5PrFkUD6cMZEonsKn2Sl9\na7AxUNEnHtXlXd/5OI9927O4hGsJtgdSI0R6eKohUBp3Bs1zKiDHolR4O8tNbid/9q9/xQcu+RST\nJ27Al4a5+VvoT34HP/VRfPYr6lwXpo8Og6IECJnH1R6ki2SecPPPGAN63i929Wy5TVpluaIsWoc9\nIGgf6wzWdDnrjL9i5pqbWOfGmd81x+BnXoVV68lye+SxpswCPad4A/OAGIvd2aMstzNywihnfOjV\nvPUHn2EbFUpgrPZ0q7VV8EvUkMdUIWqr6/Pm73yUJ773hVyw4wJs9HgV5tRj8wLFMDv9XcoNnyVm\nl6226Lci2B5FZRCrzFYVb3jFr2OpqU25xjzJq0M7DF8lBkNySMEQUfjqzHn89Rf/lh2uRqyjCB6T\nOWoPzgyXxjTEJVH61JXSRLAK/Sydl1NL5h2zv9rJsx/xJF50yn9jioKOrsNLYzXHpiKTKLEJfzX1\n4xeSoQd3aMTTTMRL/W1uldhpFoeSEhd2LSoCw6Ar9/KcyRrXzL+PCq7ppYRCz8wyR8XffuP/5auX\nfQ+zcYRgK7KY6k8uvQapbYSH2MWWR5PNPhI795tQdzEy05xZRI1gNSZZlnytdiGnc//9nVFSpXWj\ndrFriQQgIi4S5hVvS+obLuCr73kcEzK4Fw/ZUfg+D8OHzwt9iJAUQZNKRLIgT5s4iXWyHteZZ3eY\nw0hOCDW5McQhu5XjQDk0DNaDgKsthogjUFmPO2qUr137Q875j3/l3huP4+3Pfyvrsdh+wBYjVCGQ\nuSwVcIgkRTfo+TY4cwWLW2gAFprPUtK1G0zLGwyVpInEw0D51s1fxfQHg+sp4MUkFaqKFcFTMy+R\neQl86Luf4Avn/gtj91jP+JGj9OoeNoBr3mbxGgwqjBiQktC5CnXbcdm1FLuejfospR3FLhJL0C7g\nmnMtU+GKZrZUami3v0bM4OcmLg6tJW0PpWAnCuz27Xzy3Y9jVAZBx0NXUd4ZWstySAiUBG+Yq2Z5\n4zfeyU9mL2G3SX3ANfTXVA8UiclyEiJ9p8zFGpd1cNEwno0wOz+Diznhhjle9bQX84QTTmOCkaZ+\nYrZgVVoGlmFcsMSXx2kbjaommbWALvtZWYxcDKy1xbnsNYKgKJGKSKQk8N2tF/Gaj72VfPM4IYtU\nFoo8x1Q9Qq9HnjtUknV6u7F9CWB6GD+Bm38wduYRmP59kOgQrTGioAWKI4oHKTHqQAvAH6CyHLTT\nHSjOVBSjYzvs6t/IP/yPI3jwdMRHAybg0DRr5xDVmPtqWbbKckiIVCg5NsKMmeMRb//vmGPyVEDY\ns6Ym6NY2Re9NDAhKHQPWWpxYJCoakxWleYcSS2/7HPeRDbzw8c/ikUc/HEiDbYfFYSigsTgHSmT5\nU62YZgCfXi2bMdTYTk4dQSAQ8UTqppLTdnbw4a99gnN/+iNuGQm46Q65VhR1jQ2BeWuwzqExYK0Q\nNFm0QYRc7UBH7wVDVIsxFme2IXP3I9v9FEz/flCPY80tSYFrBrik4pvh8oH04REVVJQoTdCmeS9R\nS8d41nM9733NiUxoClsFAUsNh3CH8FZZrjEWfZipZvY17OD0Tz8fqkC3M01g/yuirzQDZWk1Yve4\nv0SFYMAbRUUQH3EmI4jBR8XMBfpbZ7jvlnvw8t95MceMHMkYo6QyFEJB3jzWZuHxFjUEWT6tUomU\n1LjGPo306BE5f9tP+MZ/fZf/uupibvZz2HVdym6kq5CFQIwenwlGUxJ3FlP3zaqpSznwLxs1C5MG\n9o4hikFkDgkTWNPD1uuRnU/F9U9Byo0IfZAK1CHkRAkgVbIw90dxqSDqUIlECQSNiAjOWDLj2HHd\nD/i3d5xGwU4KO4aLDkxs3B2HpqKE1me5Jkk3rSfgOJwJRrYJcfMEeF1TP/odv6hEQhOYGvgWk5Vj\ncCFiCYhADCVBI04indFIcYzjF/ySP/nC/0Q0R3uB0PdMxTHucfjRHLFxExsn13HYxDomOiNMdEag\nDvSrkltmd7NtdhdX3HgNV9/4K3bM7mRGe5QTNd1ugXWCcYbsCIvtlXQ7QjEfqA3UDqw1FF4WppmG\nJhshSPoREzXkAfIAteW2p6KKRwZDbYEQO2i2FbPho+jun5NvfwlRBWKOFQFC8jRqwX4ni4mS5hsZ\nBk4Mh0DoYbC85czTyHuzjI5N0CfiBJKtnS0E1Vpum1ZZDgmD2ceKaSpVG77wpx/nCe99AXa9NM3u\n1wYpUp6S6QdBkIELbqBsVJIirTQi1mLJsKqUsSaztrHoanABpgxRDXNxnkv4GZfvuJq4Q+nXFdZa\nxDi8NHmNCiZomi46pYRRT2EsNnMQYuqHZB11r4fD4uuI7xRkMWKaqX7BJqsyD4baepQmUq3NdFVJ\nivJ2p6FqskBVDCoBxBNND8IYOv4f1H4jrrwX9I9D6xGM6aPGY+Io+++zjE0WgAOEIsvw5QwZFVu3\n3cCpRz+IUTEQBWOSW8IGu6ZcPKtJOwwfEpZPokiWWUlkhnke87rfoX9ql3X9CXZR08kiLgaswrzL\nyMLwZcktTXKWJYZx3Mu+RcytkqOXzVy6rSIcehtKS+Ie+5cflxR5bGLHt5Z7kA6V/jKtDFJ9Blbm\nHSKxKXkWl8iTpyCP34CdfTRu9hGYegLROYQ8DaO1m34gJaVLYZIbxoTObSrSKJGoiqgFCagPjGTC\nzTddyjlnPYyjiEv8uyYFw9Qc8iZlO4PnboAFRunyL2/5DMW1NT0tWZcXaZ9KKt8Wh7PW4MCaHDzX\nS6uq77lvkTRneemyDDV7X26LW+2Py5bB+y/9lKWyLQ3epPImi5+5T4pyIMPStCI1gEdFCG43cfIr\nhOlPEovrQaYR49LnSG8xpcjMNab4GPF2IvBGDSYGbJNrWWQ5ZT3Pu175MKbTEY1eHKSstYryztAq\nyyFGgAxhA+P8y8v/kU7mCDrLSB3p2RQEsG3vvTWJ2nnUVESpiKPfxa/7ADHbjSdNj7RkSFREPSZ2\nIY43OZkzt/meopCJA60xKuzql5wwcR2nHA1jqz+AXPO0ynIYUYPGCEEhKhkZm5hm5BYl9AN9p83w\nLN6+ZdUytCg2TWU1FZ5I7F5Lvf4DhO5lEEfTQVIifgq0wMgcmNkmveg23lPAqwF1FAZGZi/mTS8+\nNbkk27JrB0z7pA0pIi7lJhqbIpVR+Owfvo9qe0ozEq0IGoih9c6vSeI4qEtTHq3izTy+uIp6+tPE\nse8RbIlKB9wsaJUKDkeD+snbfMsgwlxQ8rwLYZ4v/d3DWd/o3TaIc+C0ynIYWZiN1yQUY8iMMMk6\nvv2Ks3nYxAPYUGxk1HZR2yY0rFnMHEiZ0ou0S8xuInR+Tn/je6k3vJfQvYgoBjVVmtkTx1nuYV2O\n1UAn61OXN/H+1x3GODV5c7xvH/UDpr2CQ8LAz663sQQEvKcbHG99/GtYN7OOcEtNbedXSeKWA0Gk\nh8Q0eyfNCzeoFum7NhXV+P8lrPtnYnYdYnxKizI14nbd9nsSGWeeVz1vC1vs8vnyrWF54LTKcojY\nMzC5NHYrCM4YJCqjdHj/c/+ajZ1NTJjOgh/LNP5LbVJUTOvPHFqEEjSHMJGKZ5geA6tRMajfgs+v\nxh/2QUJxNRGLkT6GQCQNuQc2YyQFd6zW/PYDNvKoX0vz6ZWs+Sw/dMWj1yLt0zRkyJLFLlsi3hjI\ncowKRQi85/lvprq0jysUDYEYDV5JFboXkpPbr3goiSNJiclc+jdmSJOSlLqKRAI55ejl9NZ/knry\nq0Q7h+g4tYDXDCVAHqmDJ48lj7qP4ZVPLKDfJ1+WJpTaerQcGO0VXIsodEzORsb47F98gu2XzjIS\nC+yIYqgpfIZiqFx/wcpsWUNICdn1YGeg3kQsriVO/yNh6gv4/DpGA0zWNVmM9OcFqTNOOHKeV5w+\nTVVVdDqdtunYQaBVlmuGPW7+4HBzytFMcN7rvsTTT3o8tnLUosxnqYhF16dpfy1rCxMLCOsgFmi2\nHcVQS0458Z/0Dn8j5egP6WcBEaHrhCfdcxvv+P0tyYZ0KeBnTPto39W0V3RNEJvit43KbDz2ZmQE\nUcN0HOWM+z6FVz/yJVRzhiLW1HVFFcxQdoZs2QfUpQLBsSASiLZPpeANzB3xCWbHv0BP+jzi8Ble\n86yjUXajlK2SPIjs85UVESsiF4jIV5rXx4rIeSJypYicLSJ5s71oXl/Z7D/m4Ih+6DFw6QfANxVx\nUpl1z3RnPQ/feCqf+f23U2gHh6GnMpw9x1tul2jKZtJBQMwsSE0MOdZ0iMZS1TejG77OU079Hq9/\n3hZ6Hmo/TqHFaot+t+bOPEkvB5Z2YXobcJaqHg/sAF7UbH8RsKPZflZzXMsBYRaUnm1mKS+mGkV8\nTKVspxjhXtURPPnIR2KzSSa6nXYYvlaREmlyMA0Wa0siPWxQqt27eNVvPp1XPOZ0rAVXeDpOU3W2\nloPGPilLETkSeBLwoea1AI8GPt8c8nHgac366c1rmv2/LcPSC3SNotDU9Y7QFJhILRaS0gyZwWmV\nyoiZEV7yGy/g9U/6A268/ueENsCz9tAug8xICSOYOEnwBmcj0e/kIy94P/9ty+lk1JRakZGKLatr\nJ4AfTPbVsnwn8OcsRhnWAztVdfBbdh2wpVnfAlwL0Ozf1Ry/DBF5iYicLyLnb9u2bT/FPzRIBTWa\nFJCFxSwsORlGUg+VPDdMUPDosYfwvT/9CvNX70pKNHo8nqIoiHXEkqVIuQQsNZmGlMNnwBuT0pRa\nDhqlM9TWEA0YAnlM19/bQBRDCI48CKKz1K6HywrYFvnvxz+RUzsn4jAIltzkTfUgk4oNtxw07nCu\nnIg8Gdiqqj8SkUfeVR+sqh8EPgipnuVd9b53V+RWK0v3mWXHxRAZsR1G6HDBn/8LN1Dx9I+/FDo1\nc/1dZIWjCj3EOFRTDclBf/I8RvIQEYVeO5PyoDFZ1tRGUjsNyfBL+v7WfjdjWUaNMucN63WC0a3K\nh/7o71lPgVvSt3Lxdmh/3A42+3KFfwN4qohcDXyGNPx+FzAlIoPH6Ujg+mb9euAogGb/JLD9LpS5\n5Q6wNj15IQSEEY5iig89/3/hr50ntyPMh4p8LCcbtHTFEMSlToOkIrLBtKXfDibRpGroqWJ8uv6D\nKvKTuWEmziNFh7zX5YwHnM45Lz2LLawDxlZb9EOWO1SWqvo6VT1SVY8Bng2cq6rPBb4JPKM57Azg\nS836l5vXNPvP1WEox36IUZYl1lpyTT7PB8lxfPvPPsvm3dOM+Qn8vAepEakRfNNcLBLFMJ/BfNa6\nmQ8mc5mhtOka2xix6hGpsJT06siom2D31pKvvfC9PP+o36JjxwkYsrp9lFaLA7HdXwOcKSJXknyS\nH262fxhY32w/E3jtgYnYsj8UxSCNpMappySjwwRnv+jdvOG3Xkrv6lk8ykw9i+s6qnqWLHpcBNQR\n2pSjg4zFxWTZO/UYG4iZ4p1iq1F2XjHPl178bjYyiumMkwo9R2xbEWPVaHvw3N2JPTCGPgUGyICa\neXZS8aT3PB8/JpiRjNHRDtXMDFmw1MbhDW3xhYOIUUMeIxCoLZhuQShhNORsNkfy7me+ni4QySnI\nUpAO3/isW2fyXUnbCrcFgNJ0sUBHU5IzYpBg2GCn+PYfn8MtzPOEv3wO1b0y+g5qG5ruaWb/Ggy2\n7BPBQM9Egk2dMOOOiled9gc884TH0EEwkYUotxfoCxS4VB+lNfpXhfay381xNBl7YkAywJLZHNH0\n8B3OCN96w9m85bGvonfxHFWdAYrxNVEMEUWMNj5NAEMUQ641mdZYwkLnw9T29dC5pYwarJrFh6hJ\nxTJSoxLx0sWoI48BtKQMJSbr4GtL7WeRToYJo/R/MsM/PO0veN4Jj2HUg8UhslgpyJJGBAJ7zYZo\nWRkOnTv7EMXCklJdSxaBGqXG0aHDaZMP4PtvOoeNv1Ty+QxTjOLrHjHUqIHSKaWLqERchHlnKa1d\nSH8ZtJU9lIbu3kZKG6kMREOqao4QsAiQa48onr7NKMmpKk+Rg693EWMH/fk8Lzz+cZz3pnM4bfI+\ngKU/ePMltfoG5foWtresCu0w/BAmC0KwUOJwwEi/5JzXfJDr6fH7Z70Sd3SX/u5ZTFBsxxFEkWbm\nkJcckYiN4HTQV3vQWvbQ+A02uuRMNVnVoVmsRgrtgThEHYUYsmKU2ZkZiokOj9v0GF7+e7/LpApZ\nyPE2o4zKqMmapuSreGIte6VVloc40q/odnJqQDpjTDDCBBN865Uf4wqu5QVv/TM4YRoNFUXwGCKV\nVTq+maO+oChTYY9DRVFCGobLgsKMRI2IGIykXNXaOIoAQUrqTgH9DHNdyTte9npOG3kggk0+SZve\noRtD6sKYHTrXcC3RKstDmL71OGtwIZJHAw68GCyGTsy4n5zED878Z27J+7zv+5/lI9/6PFPHrMdR\nE6VPr+4vFJrVaDExAwxppuTdfzjeN5YogsQA6iF6rA84tYyMjuGzEXbumsH9ss8HX/1m7p8dRx4c\nznRAU+WoCOQKhGSizxMpAHsI/eisFdrUoUMY35SpcbDQf1zFUNM0SVNwAlR9XB6ZpeIbN/6QN3/s\nXcgRjs7kCLvLOYrRDlW/ohMsmdpUf3GVzmklMVEIAsEFghXKusR4ZbyYoNrdw2+f41VP/QOee+Lp\niM9S7mrjfBz4IdO1B4gEiXhoWkK0ynKlaFOHWu4QF1PxjSCpyVkKBkVyGmXZRGMlyzHABBlPPvw0\nnvja0/j0VefyobS/4wAAC79JREFUsS9+kvGxDIxDRBEJhOjRQ6TIlFPFKVQRnFimOhuY+fkODuuM\n844/fCdHMUKBw2mBd+mapvBawGFTapbEpHBJUfAidadrfZZDSGtZHsoMvnoZxBSW2IOaEqZRA9GC\nM+lwVUSUQM0cnl2UvPPc9/Pdy84nrrNEo8tsoriXEnF6K6tpaU7nkuMXulQO9qSo+97YMyd06XGy\nNzt3SefLwbF7zSu9jRJ3Rg1qk+zaV3RXzSlH3Y+/fOqrmaCgIFs4TyUQfEXmsvS5asAM3jdd10hj\ndLbBnRVnXy3LVlm27D+NZVQS2EWPr93wbd5z9ofxecXI1AheAtqxlHWFxgBRk+JzecrfHCS/izTB\njsWIelJwzfEyiLLbBf2+p9IUXd57fdk+kjU3UJBh4ZjGcpY0NlbVhdQnay2qyuzcLkbGRql9wOYZ\nsQyMuBytPL2bezzwuPvzyqf+CZuYYIoRiqb9bKvz1g6tsmw5qATqlE0YXdJvlYeuwQdP7Wp6VNTA\nKz/yRi7bfjVxXYGMW0xhkaoihBpnDRp8MzRVjDGoNooLIeJA3cKwXqgXIu9L7T0V9tLFcvG+jliC\nFBhN76oaMPiFY6ILiG8qMAXI8i4exSN4UVzMKK/dySY7yXMf+3SecJ9HMUZGx1sK14XgoI7gkk9S\nxTfytl6utUDrs2w5qNiluskAXYcX8C5HyRlnFEfkH1/4dioqenjO+9UFfPbc/4/v77yCfCTHFDli\nM9QoKpE61mCrxffVQEqdb5SlAprqbi5F1IDmLJ1jsVR5WoWc3mDPggUrRFCD9HPUGTRzlBqpe+Bn\nK3LJeNxR9+MZj3oy9xk9ji4ZFgvkVCqoE3bHSGGTa6GwySKW1qy8W9Jali37RYp3L/rbDIv6wbDo\n/1OBSESb/4KnZJ4K+OGNl/CV73+Di66/kpt1Du0IjIGzFqeKjSEFnYJP7TOMIwzG30bSsFmaf28v\n/h4ChVhqIsY6ohE8Qh0iVRlxM4bcC8dObOJR93soz37IkxvFaMiafw3SRKjN8nG+7G3Yv+iPbBl+\nWsuy5aCyNEgz6AeEpuKZarLUeXKBwTRLMGSMkVFr4FGHP5RHPu1UApEKT0VkJ3NcdPlPuOhnl/Dt\ny89n3lZULlVB8t15TAbiLEYMYoQYIwhkkqOqKeczRLz3aIyEEMjIkSrDzsMRExu45+bjuO+xJ3Hq\nvU9mkzsMQTFNo+EMRxcHlQIZ5KkP2DJV3JybjR5iUtgpgm1SIWXMsh+PlrsHrbJs2S/22hurURqB\niGCa3j8spMKExgoLHnKX6m2qBkSEUaDyNevsJMfdczOn3/Ox1E/xVNRIY8GGZepn6bphoM6UNFa3\nzX4lNkPniMVhMSiBnLyxGEFihBCS/CalU2me3jGl9DRuh8HwWlJCuTemsSKXFNNYNoeptSzvTrTK\nsmW/8BKbKDMLw9OBbnSNglSB2qZpkRYWYtnBCkE9RgyqgohBA+QmBwkoiiAU5EC+xyfvbbhtCAtr\niWVWXZQFRT5I0kkqNQCCN4JKslYDacjvMBhVbOP79EYITc8bi2l+LAzLFWJshuVtHbW7I62ybNkv\n3F6UwUJ1HLP4Or+VQgG7NAdycKwd/IW7g+Hr3pXQ7RYQN7d+sTTv20FSpqTE8EXhFt950FNz+b69\nf1A7++buSfuttrS0tOwDrbJsaWlp2QdaZdnS0tKyD7TKsqWlpWUfaJVlS0tLyz7QKsuWlpaWfWAo\nUof6/T4XX3wxWZYtbPPeIyJYa8nznLIssdbS7/dxLomdZRl1XaOqFEVBv98nz3O893jvcc5h9+hK\nH0JKgj7ppJNW9BxbWlrWNkNhWaoqWZZRliVZlrFx40byPEdECCEwMzPDunXrKIpi4W9ibGodGoMx\nhqpKBRgGCnagQOu6Xjg2xoi1Nk1Pa2lpabkTDIVlKSJ47zn++OMJIXDTTTfR7Xapqoq6rrn3ve/N\npZdeSqfTQUSYnp5m06ZNiAg/+9nPOPHEExERLrnkEo444gicc+zYsYMsy5ibm+P4449HVfnlL3/Z\nlAFb/eIhLS0ta4uhUJYARVHQ7XYREa655hqqqiKEsDCMttZy3HHHcfPNNzM1NcVVV13F5OQk69at\n4/LLL6euazZt2sTIyAhXXHEF69evByDPc4qiIITA6Ogo/X7/9sRoaWlp2StDMQwHqKqKa665hvn5\neaanp9m8eTPWWuq6ZmZmhi1btlDXNSEEduzYwfT0NJOTk+zatYtjjjmGk046ia1bt6KqnHDCCczP\nzwPJR+m9R1WpqoqyLKnrepXPtqWlZa0xNJblYMh87bXXYoxh+/btC21Wr7766oXgjzGGXq9Hp9Ph\nxhtvxFrLtm3b8N7T6XS46qqrqOuaDRs2sHXrVmKMXHrppQsWqjGm9Vm2tLTcaYbGsqzreiH4Utf1\ngm+xKIpUo1CVsiwXgkExRpxzGGPYtWsXMzMziAjGGNavX8/27dsREYqioNPpkOf5gqJsfZYtLS13\nlqGolC4iM8Dlqy3HfnAYcPNqC3EnaWVeOdai3GtRZjgwuX9NVTfc0UHDMgy/fF/Kug8bInL+WpO7\nlXnlWItyr0WZYWXkHppheEtLS8sw0yrLlpaWln1gWJTlB1dbgP1kLcrdyrxyrEW516LMsAJyD0WA\np6WlpWXYGRbLsqWlpWWoaZVlS0tLyz6w6spSRB4vIpeLyJUi8trVlmeAiHxERLaKyMVLtq0Tka+L\nyBXNv9PNdhGRv2/O4cci8qBVkvkoEfmmiFwqIpeIyMvXiNwdEfmBiFzUyP3mZvuxInJeI9/ZIpI3\n24vm9ZXN/mNWQ+5GFisiF4jIV9aQzFeLyE9E5EIROb/ZNuz3yJSIfF5Efioil4nIw1ZcZlVdtYXU\nZ/Qq4DhSg+iLgHuvpkxLZHsE8CDg4iXb3g68tll/LfC2Zv2JwL+RGqQ+FDhvlWTeDDyoWR8Hfgbc\new3ILcBYs54B5zXyfBZ4drP9/cDLmvU/At7frD8bOHsV75MzgX8CvtK8XgsyXw0ctse2Yb9HPg68\nuFnPgamVlnlVvqwlF+BhwNeWvH4d8LrVlGkP+Y7ZQ1leDmxu1jeTkukBPgA8Z2/HrbL8XwIeu5bk\nBkaA/wJ+nTQjw+15rwBfAx7WrLvmOFkFWY8EvgE8GvhK83AOtczN5+9NWQ7tPQJMAr/Y83qttMyr\nPQzfAly75PV1zbZhZZOq3tCs3whsataH7jyaYd4DSVba0MvdDGcvBLYCXyeNOHaqqt+LbAtyN/t3\nAetXVmIA3gn8ORCb1+sZfpkBFPh3EfmRiLyk2TbM98ixwDbgo43L40MiMsoKy7zaynLNouknayjz\nrkRkDPhn4BWqunvpvmGVW1WDqp5MstYeAgx13w8ReTKwVVV/tNqy7Ae/qaoPAp4A/LGIPGLpziG8\nRxzJJfY+VX0gMEcadi+wEjKvtrK8Hjhqyesjm23Dyk0ishmg+Xdrs31ozkNEMpKi/JSqntNsHnq5\nB6jqTuCbpCHslIgM6hcslW1B7mb/JLB9hUX9DeCpInI18BnSUPxdDLfMAKjq9c2/W4EvkH6chvke\nuQ64TlXPa15/nqQ8V1Tm1VaWPwROaCKIOcnx/eVVlun2+DJwRrN+BsknONj+/CYK91Bg15LhwYoh\nIgJ8GLhMVd+xZNewy71BRKaa9S7Jz3oZSWk+ozlsT7kH5/MM4NzGslgxVPV1qnqkqh5Dum/PVdXn\nMsQyA4jIqIiMD9aB/we4mCG+R1T1RuBaEblns+m3gUtXXObVcDDv4aR9IilqexXw+tWWZ4lcnwZu\nAGrSL9uLSD6mbwBXAP8HWNccK8B7mnP4CXDKKsn8m6ShyI+BC5vliWtA7vsDFzRyXwy8sdl+HPAD\n4Ergc0DRbO80r69s9h+3yvfKI1mMhg+1zI18FzXLJYNnbg3cIycD5zf3yBeB6ZWWuZ3u2NLS0rIP\nrPYwvKWlpWVN0CrLlpaWln2gVZYtLS0t+0CrLFtaWlr2gVZZtrS0tOwDrbJsaWlp2QdaZdnS0tKy\nD/z/3B3R/7eWEaMAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [] + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "clXQrFVa2nN7", + "colab_type": "text" + }, + "source": [ + "### X-Ray Image" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "tbaIkUCS2eNv", + "colab_type": "code", + "outputId": "e6b11596-71f5-4a45-aea2-657c4ecdfef7", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 253 + } + }, + "source": [ + "xray_img_raw = tf.io.read_file(\"xray.png\")\n", + "xray_img = tf.io.decode_image(xray_img_raw)\n", + "\n", + "plt.title(\"X-Ray image with shape {}\".format(xray_img.shape))\n", + "_ = plt.imshow(xray_img)" + ], + "execution_count": 5, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAADsCAYAAAB66G16AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAH6ZJREFUeJzt3Xv0JGV95/H3R0BUQLnpiMNw09GI\nJzqYCWIkK9GNAqtBE5fAqowGd7xxFvZosmA2BjfqahIvIAYDSsT1AniFNUQWR7xEFJgRRC4io0KY\ncS4iMIAKBvjuH/U0U7+evndX1+3zOud3ft1VXV1PPV31qaeeqq5WRGBmZs31iLILYGZmxXLQm5k1\nnIPezKzhHPRmZg3noDczazgHvZlZwznoW0rS2yR9tOxyzIKkfSTdK2m7Aa8JSU+Zcj77pffZfpr3\nmRVJ35Z0UNnlqCpJV0p6RtnlqAIHfUEk7SzpFkmvzA3bRdK/SXpFn2kOk/RQCq17JN0k6bVFlC8i\n3h0RryvivectIv4tInaOiAcBJH1dUiOWrR9JLwXuiYir0/MVktZIulvSOkl/m98hpXUq//egpA+l\ncZ0dWH78X01QpuPS+7wuN2xHSR+RtEnSHZL+r6TFufH7SbpY0p2SNko6Y9QdqaRj0jayRdJmSedK\nemzuJX8P/K9xl6OJHPQFiYh7gdcDH5T0+DT4b4HVEfG5AZP+LCJ2Bh4L/HfgbElPK7a0VkNvAP5P\n7vljgJOAPYHnAC8E3toZmXaEO6d164nAr4HPdr3nrrnX/c04hZG0G/A24PquUScCzwWeCTwJuBP4\nUG78PwCbgb2AZcDzgTeNONtvA8+LiMcBBwDbA+/Mjb8I+ANJTxxnWZrIQV+giLgE+GfgdEmHAUcz\n4kocmYuBO8g2EgAknSbpttRyWyPp99PwJ0r6laQ9cq99tqSfS9qh+/0lnSrpk+lxp0X32vTed0p6\ng6TflXStpLsknZGb9smSvibpF5Jul/QpSbt2zffqdFTyWUnnS3pnbvxLJF2T3vdySc+kB0nvyLU6\nd5D0S0l/l54/WtJ9knbPd6lIehfw+8AZqWV6Ru4t/6Okm9N8PyxJfeZ7sKTVqY43SXp/10temY7M\nbpf0l13TfSe9/4bUOn1kbnxI+m+SfpKm/TtJj8iN/zNJN6b6v0TSvn3K90jgBcA3OsMi4syI+FZE\n/CYi1gOfAp7Xa3rgT8jC9Vt9xk/ifwOnA7d3Dd8fuCQiNkXEfcD5wDO6xl8QEfdFxEbgK13j+4qI\n2yIiP78Hgafkxt8HrAFePO7CNE5E+K/AP2A3YAPZBvDaIa89DFiXHj8C+CPgIeCg3GteBexB1np5\nC7AReFQadzHwxtxrPwB8qM+8TgU+mR7vBwTwEeBRwIuA+4AvAU8AFpMFw/PT658C/CGwI/B44JvA\nB9O4RwK3krXkdgD+GPgN8M40/qD0Xs8BtgNWALcAO/Yo4wuAH6THvwf8GLgiN+77XeXfPj3/OvC6\nrvcK4MvArsA+wM+Bw/vUzXeAV6fHOwOHdM3nbODRwLOA+4Gnp/G/AxySPpv9gBuBk7rKcBmweyrD\njzrlBI4C1gJPT9P/T+DyPuV7BvDLIevSl4D39Bn3NeDU3PPOcq0H1gH/BOw5xjp+MLCabJ1dUPfA\ncrKW95PIjjo+3VlX0vjXA59I4xYD1wEvH2PehwJbUvl/Cbyoa/zpwPvLzoGy/0ovQBv+gK8CvwIe\nN+R1h5EF+10pQB7MB0Wfae4EnpUe/ynw7fR4O7KdwMF9pjuVbYN+cW78L4A/zT3/fL+yAC8Drk6P\n/0MKDOXG/ytbg/5M4G+6pr+JtBPpGv5osh3OHsDJZF0D68jC9x3A6V3lHxb0h+aeXwCc3Gd5vpne\nf8+u4Z357J0bdiVwTJ/3OQn4YlcZDs89fxOwKj3+F+D43LhHpHVm3x7v+zxg44B14s9SPW0T1sC+\nab3aPzdsZ7JA3h5YBHyOrBU+yrq9HVnId3aGC+oeeBxwXlr2B4Crgd1z459O1up+IL3m4/l1Z4xt\nbHFap5/aNfxdwDnjvl/T/tx1UzBJryILiK8C780N71wpcq+ke3OT/CwidiXroz+drOWaf7+3psP7\nLZLuItuQ9kyjLwQOlLQ/WYt7S0RcOUZxN+Ue/7rH851TGRZJOk/Sekl3A5/MleFJwPpIW1lyW+7x\nvsBbUvfGXWkZlqTpFoiIX5OFyPPJdiDfAC4nC7rnk+u6GNHG3ONfdZanh+OBpwI/lHSVpJeM8j6S\nnirpy+mk4t3Au9laLx35uriVrcu9L3Bark7uAEQWYN3uBHbpVXBJLyPrRjkiFnZrdLwa+NeI+Gln\nQETcGxGrI+KBiNgEnAC8SFLPeXR5E3BtRHy3z/gPkx357QHsBHyBbKdG6rb6Shq2E1ld7UZuOxlV\nZN1VXyHbqeTtQtZwajUHfYEkPYGs++S/kh2iHt3pU4+tV4p0TpAtEBH3A/8D+O208ZKm/Quyvv7d\n0g5hC1kgEFmf5AVk3TuvZuHJull6N1nr67cj4rFpfp3+7g3A4q7+7yW5x7cB74qIXXN/j4mIz/SZ\n1zfIdnYHAVel5y8m6y74Zp9pprola0TcHBHHknVbvRf4nKSdRpj0TOCHwNJUL29ja7105OtiH+Bn\n6fFtwOu76uXREXF5j/msBaTc1StkAw4n61Z6aUT8oE8ZjwPOHbIcnfobJR9eCLw87dw2knWxvS93\nbmQZ8PGIuCOt0x8CDpa0J1u7sM6IiPsj4hdk3UZHjjDfXrYHntw17OnA9yd8v8Zw0BfrDOBLEXFZ\nRGwgC+mzJe04ysQR8RvgfcDb06BdyA5xfw5sL+ntZC3/vE8AryHr3y8q6HcB7gW2pLD589y475B1\nDZyQTo4eRRbKHWcDb5D0HGV2kvSfBrQev0EWTjek+vg68DrgpxHx8z7TbCK7CmMikl4l6fER0elG\ng6xLbZhdgLuBeyX9FvDGHq/5c0m7SVpCdh7j/DT8I8ApStd9S3qcpP/cayapHr5KdlTTKfMLyE7A\n/km/ozhJv0d2hPDZruHPkfQ0SY9QdjL/dODrEbEljT9V0tf7LPNryMJ0WfpbTdbt1TlJfRVwXFqe\nHciOAH4WEbenI46fAm9M68quZOdsrs2V7RZJr+mzPK+UtE96vC9ZN82q3PhHkZ03ubRP2VvDQV+Q\n1Ao/lFwIRsRHyVpwb+83XQ/nAPsou276ErLD0x+RHfbfx8KuACLi22Sh9L2IuHWaZRjgHcCzyY4m\n/pns0Lsz/9+QnYA9niwkX0V2EvT+NH412RHOGWRdEGvJwqKfy8n66jut9xvIlrtfax7gNOAV6eqV\n08dbNAAOB65PXWqnkfXB/3qE6d4K/BfgHrId2vk9XnMhWZ/0NWR19zGAiPgi2dHDeanb5zrgiAHz\n+keyo7aOvyLrxrs41yX4L13TrAC+EBH3dA0/gGy9uifN937g2Nz4JWQnVLcREXdFxMbOH9mJ97s7\nOwmyOrkPuJmsgXIk8PLcW/wxWX3/nGxd+Heyy4o7VxftAfTrFjoQuFzSL1P5biJbtzpeSrbD+lmv\nidtEC7tSrQkkfQ34dNqxlE7SFcBHIuKfyi5LmSQFWbfO2hm937eBEyJ9aaookq4BXpi6VuZG0qHA\nm1M32iTTX0F2gvu62Zasfhz0DSPpd8kOVZf0aLnNqwzPJ2td3Q68kqxb4oDUfdVasw56s1FV4p4d\nNhuSziW71PHEskI+eRrZSeGdgJ8Ar2h7yJuVqbAWfboC4DSy62w/GhHvKWRGZmY2UCFBr+wugj8i\nu5Z7HdmZ92Mj4oaZz8zMzAYq6qqbg4G1EfGTdBXGeWRf8TYzszkrqo9+MQsv+1tHdm+Th0laCaxM\nT3+noHKYmTXZ7RHx+GEvKu1kbEScBZwFD1+NYGZm4xnpuzJFdd2sZ+FXvfdOw8zMbM6KCvqrgKWS\n9k/fbjuG7EcAzMxszgrpuomIBySdQPaV/e3IbhPa/cszZmY2B5X4Zqz76M3MJrImIpYPe5FvamZm\n1nAOejOzhnPQm5k1nIPezKzhHPRmZg3n2xRbI83rarKFP41rVk0OequVKlwOnDdJebxzsHlz0Fst\nVC3gp9G9LA5+K5qD3iqpScE+TL9l9Q7AZsVBb5XTppAfxC1/mxUHvVVCgT9pOdb8ZxWmRSyPg98m\n5aC30s0qFKcJvlmHZq/3m3X459/PoW+DOOitNLMIvjoFXL6sRYV+nerD5sdBb3PnSxIHL880OwG3\n8q0XB73N1bgh1sawmlXL361863DQ29yMGloOpq1m0dfvwDcHvc3FKOHkIBrNpC1+B357OeitUA74\nYnXqzoFvg/julVYaSaWHTUQ04gtak9RlE5bbRuMWvRViUIiUHe6wbfkiAkl9v5RU9eXpGLeF79Z9\nOzjorXEmvXdMr+k6O4BRpps0LIv4xmuvndawMjjsm8tBbzNVdst30Pwn7aoYt3UM0y/rLN5rkta9\nw76ZHPRWuKLDo4p9zeOE5ihHDNPe3sFh324+GWszM4/ArfPJ007ZJyn/tMs8TnjXtX6tP7fobSZ6\nhcMsWob9QqcOYZRvHU/zjeDOtNP25U9aFqu/qYJe0i3APcCDwAMRsVzS7sD5wH7ALcDREXHndMW0\nKisq5Nui+2TuqP3zRXaz+GqcZplF180fRMSyiFienp8MrIqIpcCq9Nwaqlcrc5qrT5rY2hz1BHGv\n5c8Hbr/bIUxytDDuffqt3orooz8KODc9Phd4WQHzsAqYVQj0C6umh8wsl6/Iumr659AG0wZ9AP9P\n0hpJK9OwRRGxIT3eCCyach5WQbPorhkU8E0Jl1nuDGF4V84486vCN5NtPqY9GXtoRKyX9ATgUkk/\nzI+MiJDUc81LO4aVvcZZtc0q5G0yw06qFtF378su622qFn1ErE//NwNfBA4GNknaCyD939xn2rMi\nYnmub99qyiE/vmm+vDWs337c93d/ffNNHPSSdpK0S+cx8CLgOuAiYEV62QrgwmkLadXhH6guz6T3\noR+Fu3GabZqum0XAF9PKsT3w6Yj4iqSrgAskHQ/cChw9fTGtCmbRonOrcDYGXYI5jxOz3inUi6qw\n4fXrx7dqmVVrvgrrXFP167+f9TdjHfSVsWaU7m/fAsFGMm1wONzno7vFPcm3YUf5bP151ouD3oaa\nVevQ4TAfvcJ+0tslWDM46M1sIt5x14eD3sYy6mF9nW9GZhl34TSHg94GmuaSPn/Jphocxuagt77G\n7Zuf9Cf8rHiTnhD39fXN4KC3mXBXTXXkr7SZ14/BWLU56G0k07TqHATzM8p9cKZ53378GVebg956\n8oZbX/7srJuD3rbh+9nUT6cvvciQ93pQX/7NWJuKW49m1ecWvVlDlL3TLXv+1p+D3gby4Xq1zaPL\nptf8+nHYV5OD3hYYZ0P1Rl0N03wOvuFcO7iP3h7mk7D1M0lID/shdn/uzeMWvZktMO396H2EUD0O\negO8cbaZb3PQfA56MwPG77LxzqE+HPTWk7/y3k7jtu77vdbrR7U46G1s3ojN6sVBb2NfbeND9vrq\ntNi7f1O2F+/Qm8NBbxNx2NefQ749HPQ2MYe9gfvp68BB33LeGNtjnN/7HffHS7zTrzZ/M9YW8Abb\nDr2+CeudfnMNbdFLOkfSZknX5YbtLulSSTen/7ul4ZJ0uqS1kq6V9OwiC2/l8T1S6qnX5+bPsvlG\n6br5OHB417CTgVURsRRYlZ4DHAEsTX8rgTNnU0ybh1lcP23VNO4dLh38zTI06CPim8AdXYOPAs5N\nj88FXpYb/onIfBfYVdJesyqsVYvDvh5mcRvjSaf3DqMaJj0ZuygiNqTHG4FF6fFi4Lbc69alYVZx\nDu3m8T1srGPqk7EREZLG3m1LWknWvWMlcWurueb5YyT5eYLXqyqatEW/qdMlk/5vTsPXA0tyr9s7\nDdtGRJwVEcsjYvmEZTCzHspsxTvkq2nSoL8IWJEerwAuzA0/Ll19cwiwJdfFYxXiDbKZym5V99rJ\neF0r39CuG0mfAQ4D9pS0Dvhr4D3ABZKOB24Fjk4vvxg4ElgL/Ap4bQFlNrMBHKzWTVVYKSbp47fp\nzPJnA6uwDtl8+uVH/XbtJNPZRNaM0v3tWyDY1LwRV4N3uNaPg95mwmFvVl0OegNm0xr0ddtm1eSg\nt5ly90G7+fOvJge9PcwbqVkzOehtgWnD3l03zeXPtr4c9LYNh7314iO++nLQW0/eqM2aw0Fv1oOv\nIBqfGwfV5Z8StEKUcffESTnQrencorfCVD1A3Wq3tnCL3gqVD9KqtPBHDfeqlLcqvFOsLwe9baOp\nG3RTl8tsGAe9LVBkGA5r3Y8z74hYcB7AIW7Wn4O+hapwG9lpfyCju7z59xnnVrr9XutuG2sSn4xt\nmaoF2LQ7mEmmH1QHEVG5OjKbllv0LVeFLo9Zhf2wgHaAT64K64lNzkHfIv1+VarT3113nT57B3q1\nNGHdqjt33bRUdyu4KeE47rXxTVlus0Ec9C3RHWhNb/mOGvZFtzab8KWsupffHPStMEqgN3FjHneZ\nZr3ja2KdWj056K3ROmFbduiWPX9rN5+MbbBxWqj9XtuUgOq3fPM8EV2nG711NOXzbzsHvQ00KJia\nFgJ1DGKzUbjrxibWhFAs4xvB85jnLL6b0LQdeZsNDXpJ50jaLOm63LBTJa2XdE36OzI37hRJayXd\nJOnFRRXcBmtCCM/KoMAq6+qjtoRoW5az6kZp0X8cOLzH8A9ExLL0dzGApAOBY4BnpGn+QdJ2syqs\njWaeweUdSn/D6qboVvMk7++WfDMNDfqI+CZwx4jvdxRwXkTcHxE/BdYCB09RPhtTGcHbpLCfV8h1\njiSaVHdWXdP00Z8g6drUtbNbGrYYuC33mnVpmBXMoTE7RYf9PD6nce/mOc7rrH4mDfozgScDy4AN\nwPvGfQNJKyWtlrR6wjJYUoWAr0IZBqlKiBVZT51ul0m7bKy5Jgr6iNgUEQ9GxEPA2WztnlkPLMm9\ndO80rNd7nBURyyNi+SRlsExVArZpQVG15RkU3qME+6Dxs1jWKvzGgfU3UdBL2iv39OVA54qci4Bj\nJO0oaX9gKXDldEW0fsoO+Ulbj2WYpK6qtFzD7qE/ibp8dja9oV+YkvQZ4DBgT0nrgL8GDpO0DAjg\nFuD1ABFxvaQLgBuAB4A3R8SDxRTdyuJwKM+kX+oqu1Fg5VIVVgBJ5ReiRub9mTUh2GdxY7dZHBVM\n+9l131663/N5c9dNadaM0v3tb8baQG3ZWEdZzip0deQDveyydFShsWiD+V43NeONajL5UKx7HeZv\nxNbrdwbyqrIzsHK5RV8j7rKZjV6t4bqHfz9FL1fT73raFG7R2zbaspHW/W6VZZV9lFs7WLW4RW/b\nqHP4TaOtyz0O11E9Oeitp2HXbTdlgy/6pGavPvNp5jftl6Mm4fvy1J+7bmws3thnY5TLIoedWO3X\n9TSLX83y59wsDvoamXef8rB5jVqWOvTZdup2lJAc93MY9J7T3pqg39U3o5Zr1urwWbeRu25qpoob\nUpWu6Z7GNKFZtnz9l/VZNGEdaCq36G1io3Ql1HHjr2OZYfRy13FHZtNx0NdQFVqeVShDEca5d3vd\nlr3oWyRbdTnobSzj9BtbNRS9Q/LnXX0O+hqbR6t63pfyVV1VW/FVLZdVg0/GNkBRYdu2EB/FJHVS\n9HX6VejCs2pzi74himjdN+HEahHGuU9OEV9eqgqvD/XhFn3DeONrriqFvNWLW/RmFVbFcHdjon7c\noreZG3aP9HHep+w+6Hnot4xNX26bH7foG2jW13gPasH1m8+swn7Q9E1oWeaXqw7B3oQ6byO36K22\nqh6MRfwGrdkkHPQ2MQfV5OpYd27N15eDvqGK2Cg74VSlfvOqlKPpHPL15qC3keRDvmqqWKZJzmuY\nFcVBbwPV5eZdVSvjqCepzebBQW99+XB9ck2quyYtS1v58sqWy2/EvX4JqU4t0Fn8hN4k8xxHXY6Q\nrFmGtuglLZF0maQbJF0v6cQ0fHdJl0q6Of3fLQ2XpNMlrZV0raRnF70QttU0XzKqc8hbMdyab4ZR\num4eAN4SEQcChwBvlnQgcDKwKiKWAqvSc4AjgKXpbyVw5sxLbT1N+ruhTfpWZtXLXfXyWTMNDfqI\n2BAR30uP7wFuBBYDRwHnppedC7wsPT4K+ERkvgvsKmmvmZfcZip/JOAwKobr1coy1slYSfsBBwFX\nAIsiYkMatRFYlB4vBm7LTbYuDbOCNeVHuqflQJ0Nr0vNMfLJWEk7A58HToqIu7tO4oWksbYuSSvJ\nunZsxpr6e65mNpmRWvSSdiAL+U9FxBfS4E2dLpn0f3Mavh5Ykpt87zRsgYg4KyKWR8TySQtvg7lF\nVg117A7zutMso1x1I+BjwI0R8f7cqIuAFenxCuDC3PDj0tU3hwBbcl08NmfeYM1Mw1oakg4FvgX8\nAHgoDX4bWT/9BcA+wK3A0RFxR9oxnAEcDvwKeG1ErB4yj3o1d2qkbi3JWZnXDq5f/XZ/P6Fu3ECo\njTWj9IoMDfp5cNAXqwqf8byVGfS95l2nz8AhXysjBb1vgdBwdQqYWarSclepLMM45JvJQd9wbb3k\nch7LPEqA1ynkrbkc9GZmDeegt8Zp4xHMLLjemst3r7TGcFBNznXXbG7RW2PU8YtJVeCQbz4HvTWC\nw8qsPwd9SzQ5CKu6bD66sKpw0LdIVQNxGvllqsLy1e2GclWoMyueg95qq1dIObhG57pqD1910yJ1\naWUO44AyG49b9C3hkDdrLwe91YovoZwN7zDbxUHfAk0MxiYuk1lRHPRWW1Vu3Vf5ZnJVLZcVx0Fv\ntVXlMO2oevmsHRz0VltVbc2bVY2DvuEchmbmoG+4pncdeEdmNpyD3qxg3ecSmr7ztepx0JvNST7w\nHfY2T74FgtVeRPQMzk63TlVDtbtc7oayojjoW0BSq0Ok346g6Hn2MmiHZFYUB73ZHOWPMhzwNi/u\no7dGqPK3ZHupU1mt/oYGvaQlki6TdIOk6yWdmIafKmm9pGvS35G5aU6RtFbSTZJeXOQCmA1T1T76\nMrgu2mmUrpsHgLdExPck7QKskXRpGveBiPj7/IslHQgcAzwDeBLwVUlPjYgHZ1lws25VCrG6/dKU\nNdvQFn1EbIiI76XH9wA3AosHTHIUcF5E3B8RPwXWAgfPorA2uSqFYJE6XTgO2G21ZR2wbY3VRy9p\nP+Ag4Io06ARJ10o6R9Juadhi4LbcZOsYvGOwOWn6hu5wN+tt5KCXtDPweeCkiLgbOBN4MrAM2AC8\nb5wZS1opabWk1eNMZ9NpetiXzTsbq6KRgl7SDmQh/6mI+AJARGyKiAcj4iHgbLZ2z6wHluQm3zsN\nWyAizoqI5RGxfJoFsPG1KezLXtay55/nnVB7jXLVjYCPATdGxPtzw/fKvezlwHXp8UXAMZJ2lLQ/\nsBS4cnZFNquXKoW9tdMoV908D3g18ANJ16RhbwOOlbQMCOAW4PUAEXG9pAuAG8iu2Hmzr7ipnnz4\nNLWl54A1y6gKG7mk8gvRclVYD2at6KAf9TYHVapb7/waZ80o3d++BYIBzWvhlxloTag/axbfAsG2\nUYffYjWz0blFb33VtZXvnZTZQm7R20jqFJ512imZzYNb9DayQScZq3wCsih1vNVwGffmt/I56G1i\ng27c1YbgH7RMbVh+qw933djURmkhzrMVOa95uWVsdeEWvc2NW7lm5XCL3sys4dyiN5uDeRzN1PHk\nsM2HW/TWKPPuN59kfvkfCM//zYt3Bu3joLfS9Aq4Yc+HvV+d+RvJVpSqdN3cDvwy/bfMnrSkPsYI\n+0rWybjhPOz1Y7zfNvXhHUU115EC7TvKiypx90oASav9IyRbuT625TpZyPWxLddJb+66MTNrOAe9\nmVnDVSnozyq7ABXj+tiW62Qh18e2XCc9VKaP3szMilGlFr2ZmRWg9KCXdLikmyStlXRy2eWZF0nn\nSNos6brcsN0lXSrp5vR/tzRckk5PdXStpGeXV/JiSFoi6TJJN0i6XtKJaXib6+RRkq6U9P1UJ+9I\nw/eXdEVa9vMlPTIN3zE9X5vG71dm+YsiaTtJV0v6cnre6voYRalBL2k74MPAEcCBwLGSDiyzTHP0\nceDwrmEnA6siYimwKj2HrH6Wpr+VwJlzKuM8PQC8JSIOBA4B3pzWhTbXyf3ACyLiWcAy4HBJhwDv\nBT4QEU8B7gSOT68/HrgzDf9Ael0TnQjcmHve9voYLiJK+wOeC1ySe34KcEqZZZrz8u8HXJd7fhOw\nV3q8F3BTevyPwLG9XtfUP+BC4A9dJw8v32OA7wHPIftC0PZp+MPbEHAJ8Nz0ePv0OpVd9hnXw95k\nO/wXAF8G1Ob6GPWv7K6bxcBtuefr0rC2WhQRG9LjjcCi9LhV9ZQOsQ8CrqDldZK6Ka4BNgOXAj8G\n7oqIB9JL8sv9cJ2k8VuAPeZb4sJ9EPgL4KH0fA/aXR8jKTvorY/ImiGtuyRK0s7A54GTIuLu/Lg2\n1klEPBgRy8hasgcDv1VykUoj6SXA5ohYU3ZZ6qbsoF8PLMk93zsNa6tNkvYCSP83p+GtqCdJO5CF\n/Kci4gtpcKvrpCMi7gIuI+ua2FVS5z5V+eV+uE7S+McBv5hzUYv0POCPJN0CnEfWfXMa7a2PkZUd\n9FcBS9NZ80cCxwAXlVymMl0ErEiPV5D1U3eGH5euNDkE2JLrzmgEZXfj+hhwY0S8PzeqzXXyeEm7\npsePJjtncSNZ4L8ivay7Tjp19Qrga+koqBEi4pSI2Dsi9iPLiq9FxCtpaX2MpeyTBMCRwI/I+h7/\nsuzyzHG5PwNsAP6drF/xeLL+w1XAzcBXgd3Ta0V2ddKPgR8Ay8sufwH1cShZt8y1wDXp78iW18kz\ngatTnVwHvD0NPwC4ElgLfBbYMQ1/VHq+No0/oOxlKLBuDgO+7PoY7c/fjDUza7iyu27MzKxgDnoz\ns4Zz0JuZNZyD3sys4Rz0ZmYN56A3M2s4B72ZWcM56M3MGu7/A73kGdm7fpW0AAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [] + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "UwqfpOm--vV2", + "colab_type": "text" + }, + "source": [ + "# Play with tfa.image" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "jIa5HnomPds3", + "colab_type": "text" + }, + "source": [ + "## Mean filtering\n", + "Mean filtering is a filtering technique, which is often used to remove noise from an image or signal. The idea is to run through the image pixel by pixel and replacing it with the average values of neighboring pixels." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "SutWnbRoHl6i", + "colab_type": "code", + "outputId": "4755c425-b3f6-4309-e53e-e46e72acdaac", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 269 + } + }, + "source": [ + "mean = tfa.image.mean_filter2d(google_img, filter_shape=5)\n", + "_ = plt.imshow(mean)" + ], + "execution_count": 6, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAUsAAAD8CAYAAAD+D4bnAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsvVmMXFl65/f7zr2xZSTJTCaTyb1Y\nZLFYS1d1rd3V3epRr4PWjAGNAUkzI8AjDwT0yxiwAT9Y8IthwA/yiweYlwEEa2BpYIws2J6RZkbu\nRb2qu7q6a+uq4lIssrgmdzLJ3GK993x+OOcukSSrWKzIjXl+RDIzbtyIOBFx7/9+59uOqCqBQCAQ\n+GjMag8gEAgE1gNBLAOBQOA+CGIZCAQC90EQy0AgELgPglgGAoHAfRDEMhAIBO6DZRFLEfmWiJwQ\nkVMi8kfL8RqBQCCwksiw8yxFJAI+AL4JTAOvA/9UVY8N9YUCgUBgBVkOy/JzwClVPa2qPeAvgN9e\nhtcJBAKBFSNehufcDVwo3Z4GPv9RD9i2bZvu379/GYYSCAQCH82bb755Q1UnP26/5RDL+0JEvg18\nG2Dfvn288cYbqzWUQCCwgRGRc/ez33JMwy8Ce0u39/htA6jqn6jqS6r60uTkx4p6IBAIrCrLIZav\nA4dE5FERqQL/BPjrZXidQCAQWDGGPg1X1URE/hvgu0AE/BtVPTrs1wkEAoGVZFl8lqr6N8DfLMdz\nBwKBwGqwagGeQOATcbd0YFnxUQQ2MKHcMbDuUe6upYHAMAmWZWBdEMQwsNoEsQysD8KUO7DKBLEM\nrBP0TutSBzV06f0iQWEDwyOIZWD94VUxSGFgJQliGVg3CE4nc5G8w5Rc0eEENhhBLAN3paxD99Ig\nXXIj30+W6pjeZb/Ss2b7q5a2aulhWrImFVTB+t9Zi0ER95QiIOKn4LLkDUixbzYqKY9Gi/20uK1L\nHi/3fHdyh2sgCPjDQxDLwB1o6bfg9Ciz6sonfzllJ5Mm9cKm3scoIoUQ5b1TTfFIVbdZFTRF+z2S\nVov+4iL9uXnodpBuF+l2Ielh+z00tWBt/jtXKCOIMRBFmChGogiJY2ylhtbqSLNJVK9THWkSNZtI\nvQ5RBUSWvLey/erGrZiSSCoqJTEXQf27l/KVIgjlQ0UQy8BdWSqYg3dmMjlofxa3bK6sNtMxVSds\naUJ/dob+7C36s7dIbt/Czs1ibt5AFhexrRa21aK/2CJdnIdeF+l1kF4P6ffRfoJa6yxLq6AWUQUp\nBFMlgjhGohjiGFutYGtVqDeIag3iRpOouYloZBS2jGFHR5FNo1S2bCHeMkZt5y6i0U1QqYIxqBgQ\n6z8L9z7LPbMzI1QANCXFOusWgwmpzA8NQSwDd0VKYpBblQPoHSKqOCvP6ZaAWuh3sQvz9GZm6N68\nSTpzg/TKNDp7Gzs3i529TTo3h5m5ibRaaL+PUcUAsZjcZFU/EvePXKEEvDB5i9YqlhS1FpU+KoJZ\ndHauWrdPokIfwUQxjDbR0RFMcxTdNEayZZz2jl2wdRwzvpXK2Faq2yapbp+ESgUw+dQ9v4yoIv59\n3zkPDzwsBLEM3MFdxdHPx7UsUvnkO9umYBNI+iTz8/RuXKd79jT22lW4cZ3+rZvorZukl6+gC4tI\nPyVSqIqgAkKKQb0WuedWQK0XSMm2lDybuV65abLmWqX5bae2gmjhElAsYvvY+VvY+RmwkFhBTQU7\nNgbjmzHj4/TGt9KemCTav5/K5BS17TuojG1F6g3UxCBgsiuLCGhmSwbFfNgIYhm4K4P5iyX/XOZq\nzM27LOCS0r9+g97VS/SvXSK5egmuXYPT50ivXyWdm8H2OhigKjGi4kRF3POnXgRVTD6CPLDi/YNl\ni7IcpJH8eQp/YRYIkmziXAoEaS72zhqM/OMlAqSPzl3Hzl7FnrH0TIQ0N1HZt5902xS97TuR7TuI\npqZoHjxEZcs4WquixmQey8ItEKbgDxVBLAP35M4k8CyMAaqKpIq2F+nNXKdzaZre++8jly6h1y+R\nXLuM3pql0ukRixCLYKVaCqSoExUUwYK4IIr7cYKGlKb1AqgBKQtQKXhUbCr9XYpqi6KZoJZ+jL9f\nAWuyxHfFYIgxiFWYnyM58i59E6GjmzHbJpGpKfqPPU68ey/VPfuo7NhNtGkzxJF3R2QOg2BhPiwE\nsdzIZAajZB7Bkux4cy3PoFF1wmEt6fwci+8fITl3FnvhHOmlC/SuXYQb16klSkWdOCbGksYWi2DU\nCUdmeVmBvg/KiLr7jbcMVTKb1eRxm8yGvNN3uuQN5RQR7nz6reU0oPL+ziY0tiyk5EJrsWgEEYpZ\nmIOFOfTsh7R/9RraHCWenCTevQezZx/VZ5+ntnc/1a3boOqi/uo/y9ynCfm2zGJe+j0EkV17BLHc\nqJSjuYq35bR0lwzu0O/Rv3md3vlz9M6covPer7EXp9Hr14k6LeqZzxLBCi61hmwi6p9Ri4mp8zCW\nxU+ctQrgLTMn5kuCKUu4e/DJy42W7E4/Xy8LlpT2z6bQ7nHF52ABlaWjFcBQs4rMz5PO3SY5dwbd\nMkb//Hl6jxyk8uhB6vsfpTo1hYw0QUzp01VUM0ksJSSV80xzv21grRDEcoOi2Umb3y5yKq044TSA\npCnd6Wl6Z0+TnD5FcvY0/XNnsJemqaQpUensFheBQUuJ3qJmiV8xu8c/Krf6vLBJWT7LI7sfPmo/\noUh6LyeXl0JGUr5Y+P3Ev7PcF+ovLqVxxiJImmJv3qTz2qukJ96nt3cf3f2PEj96kNEXXqYysQ0q\nVUSyy5IiGJ/DWfhny+IfpHJtEcRyA5MJpjtDS6JhBWzf5UKeP0vnl6+Snj5FcuECeusWUb9PtXiS\ngoFqmOwvuedJP7BdyhJx78fc/XnuY2+50/68M/lJBn4Vfw7Kl2Rz+nw/f0EQaNgEvXGV/sw1eqdP\n0N+5C3vtMtVDh6kdeIzqtu1QqZUi+8XYBGFQzgNriSCWG5XMREJBs0iuQtLH3p6ld/4s7Q+Ok3x4\nguTXb2BmZ6iohTwIE07oMqqDFnE9tejtW3RnZ2jdvk7v1DG6B5+kfvhpagcOUZnagRopgl1kVmUW\nzQqf7lojiOUGxrnvxPno+gn9mevM//Kn9I4fwZ76AL1yFdPtUUWAyE0bsRhs6fQOLEWBVAQVIRJD\ndH0Ge+0W3bfeoTsxTnzgAI2/9w3qh5+kOrUDKnXcRcii4qfn4bNdcwSx3NAI2AR7e4bumdO0jx2h\n8+rfkV6+gGkvEiMYccnclsidyGoQsWGyuIRy70zF1Y6rd29EqSFCiJI+6dXL9G5dJ2kt0Dv3IY2n\nn6V24HHirZOoiV0GAISPdg0SxHKjoqCtRTrT5+gefYfkvV+TfnACuXLFWZISkec4qkVEsODjJJ/M\np7jRcIEtyaudVBKfmuRFtZfQP/Ie3UtXSM+ep/vMs9SfeZbGwSeQuJplx6/umwjcQRDL9Y6SJ/0M\nBlYyyybb4tNxEFCLXZxn4c1f0H731yRHjiIXzxO15xFjnPVoAdE8Wm183Xfx/BBO6HsheX28isWK\n9fEgQdQQqSGyir1yDXt7ns6li6TTF5C/36f6yKNEo2NAfGcKV/HsA7d1yW7hW1kegliuc3yIBnLB\nzMMERWccX37nAjgp/atXaJ04wvz3/jP21Cmi23MueCOx90aW8/18qaEKkWapPSG883FkAqb+UpPF\n+F0RpM+ONyD9Nnr2DHbmNnPap/rC84wc/gy1yT1IXPE5nkuzScNnvxoEsXwIKMmjtx8NolkjCYso\n2H5C9/IlOu++TffNX9I7eYzo+nVq5XNQIc7Sxss56QOZ0uFEvS/yLCQpkuOzj84lILjySrEuv2D+\nJsl3/5b262/RPXyYxkufp/7MC9T27UeNGZg1fHQVU2C5CGL5kJAJZj71FhflFlW036N19Aitt9+g\n//abcP4scWvBNZAI4rfiZKIZWffbipKKpZom2GvX6S8ssHDzJv1rNzHf+odUdkyhUeyj5IOJ+/mk\nPBiey04Qy3VPeSKOL6OTfB6Ytjt0z5xk7vv/geToUaLrN4iSlCibaouUGvKEs2wliRSMQiqQipKa\n1Lk7Ogvoqffpzc6x0Kgz8tJLVB/ZD7URQEo5nVmeZmjZsRIEsXwIKKRSML6vI5rSn5mhfeI47V/9\nDP3Va9Tm5jFE2KzSRrPSPT9NVA2CuUIorlAqC+IYdbdT44JBlbRPenma9g++Q3L7GiOvfIn6oSeJ\nmptcyWT2PZVrSQPLShDLh4C8WE59DXPaoXv1Mq133qb1i1fhxHEatxcRDKm45rQGc0cJYBDKlcJ9\n7qmAGt9xyVuZLgjk7o9RkvNn6LbnSW/fIp2bY+TJZ4i3bSdbmM21rAvf20oQxHKNk8tZ1gnHp/OU\nvJR+P1dXQ3eR1qs/YuHnP8UeO0p04wYxkGSddrEYX94YzrHVIeu+5NwmBhWKxC7/hWffbMVaoqtX\n6d74MbPvn6D1/EuM/5e/Q33XbjSuUSxxUU4fC1/schDEcj2Qn0CWrB9Q1udR8lUGlWRxns7J4yx8\n7/+DD94nnp8j9nXcqTckjYKKdV4uDYK5mmTfofpMotz7LKXZtbrshopV7OXL9PuvMj+5DV5+hdoj\nB6E+UupBB+ELXT6CWK518t6KWdOLwuoweEtEU9L523SPHaH1058gx44St1sgxq+uKMQ2e8Kij06Y\nda8Scvf0n+ybsf63qFtmQ32HohhFb1yj85Mfoq02zS+l1A8cwjSa+XO42vLwxS4HQSzXAX7RA3wv\ncWdfiu9AbpV05iadI2+x+OO/xR45Qq3VA4n9kqyl1RYDa5ZSCqZf6kLyGvPsTsVF0Dl3nsVugq3X\nYXSU5r4DA2sXhW96eQhiudbxc7Fi1cLBFQp7N6/Refst2j/8Lrz/DtHCLNBAs2UauLNrY2DtIqUu\nwC6f0paW1vBpQzZFuj201wVNccdCkeEQvvDlIYjlWkfBFchp3hhNvb9ROou033yN1o9+hBw/QrQw\nj0aGxFgiW16wILDeyPyXRr01qaDiluyw27ZRf/4FRp97kdrkjoFUsOBaWT6CWK5xxHv7DWDFoliM\ntXSvXqX11ht0/t2fE129RGQVpYaqYk0KBpdzma/1ElgvqI+9IbhORSpYiehEEcnkdkZ/53cZ/dwr\nVHbuAxOj+J6kQSmXlSCWa4klJWt5srn3O7qNKf1rV2i/8TqLP/4B8dXLGJtiJXKBAMUHc3z3oCxS\nGs6jNcPHVSZmEXLIrMmIronpTU4SP/8SzZe/RGXnDlSM92+Su2vCF718fOwq8CLyb0TkmogcKW3b\nKiLfF5GT/ve43y4i8q9E5JSIvCsiLyzn4B8mVIupl8u/K3Ios63GQnL9Bq233qD9o+9hThwlUouK\nyVdUFFGf5CxLc84Da4bsux380vMcS1/gb9SJX1eE3rZtxM+/yNhXvkFt5x4wlbzEMYR0VoaPFUvg\n/wC+tWTbHwE/UNVDwA/8bYDfAg75n28D/3o4w3zY0bv/5IIpoEo6c532O2+x8OMfwqkTVNsLfldf\nAZLV8mRJe6W68YeZpZ/cuqC8XKSUpt64i152oewDycQk8XMvsuUrX2fkiacQ31RDxLiGKeKawEmQ\nzWXlY8VSVX8KzCzZ/NvAn/m//wz4R6Xtf66O14AxEdk5rME+rBSZlDb/Ed9sVwGrgtqE9pE3af34\ne/DBMUxrEZEIxCDiIt9G3emiIpTMjodiZnaPy8ldxXFdCKcWFVhWih8VC7gIt0Ww26aovPAiW772\ndZqHn8Q0R5wvUwzGf/eu8lFcuWrwWy4bD+qznFLVy/7vK8CU/3s3cKG037TfdpkliMi3cdYn+/bt\ne8BhPGy4uZgqpAhiXa5d2l2kfexdFv/i32LOfEAlTRGtohqBSb3t+bCfJFr6/+4MfAI+8VsZNOLW\nAqJO4Ky4oF2W3iW+okpw3386uYOR3/19mi+/QmXHLjRyF8fgmlwd7mca/pGo6gNdxFX1T1T1JVV9\naXJy8tMOY32T13q7pgiuzttVD0uvTe/D47R+9mPk3DRx6uvAjYJJfVOMjcTd7cs7tMO7IbJ/a83O\nLI9IVImsElkQNfSISCaniF54mebnvkhl1x40qiBEZMdIYOV50PPsaja99r+v+e0Xgb2l/fb4bYGP\nQEq/Vd3JICqQduldOE37tZ9h3/wVcS9BqWBNTGoUa3zC8trSgaGhqvlPJn6u/ZzNO8HnP6qua0/+\neehA0GRN4r94o4KxghLRlYhk2xTm+c+x6avfoLpzN5gYEeP8kkrIn10lHlQs/xr4A//3HwB/Vdr+\nz3xU/BVgtjRdD9yDso0kpa29q5dZePNXdH/5GubKJR/nKUe6nUX6sNboZH643C8H3pKWvEOPa20m\neZ18ZneJLw4tosVr6zPKxp+XokpE10T0t08Rv/Aym7/6DRqHn3JLSigY9T5tsUEqV4mP9VmKyL8D\nvgJsE5Fp4H8C/hj4SxH5Q+Ac8Ht+978B/gFwCmgB/3wZxvzwIWBVEbUYVRflTBLa771D57VfIBen\nfdK5m5yLVd8A1mD98hFrhSyK69L+fGFmngNY7JX9XyTNZz5GLaRNXZgrQUkAayKkUsXEMaYSY6IY\niXxUGNDUYm2CTRLSbgdN+mCVWC0xSmSK+unB35qPw71skbKVlxmWtix99J2fweD7LT938V6dNSw+\ncSERoTe5neiFl9j0m19j5PBTRCMjftlhHwzyQhnJxnK8rBU+VixV9Z/e466v32VfBf7Fpx3URsTN\nHC2qFnptFk8cZ/Gv/j3R6VNUbFr4MP3+WWWPO4nWmt3kgheILSL0/oTP7OdCUNxfPXBle7UasmkT\nbN5CdccuzKYt1JqjVEdG0WYTGWliqhWkUsFUYiSKMSYCC9ZabNLHJj1sexHtdpF2C7O4iC4u0L91\ni/7cLHb2NszdJlpYIE76GBEfWNNsol/yBbvxGRX/nnQgaHSvT94Z/oUIl0q+XdDOv3ejSoLxlTn/\nmNGXX6G+aw9Esd8/k0pBiEKwexUJFTxrAs2XRxWrdC+ep/X6zzEXzhOnFuvPTINQNlLW9nmTuQgU\nwboTXvx7VAEjpAqpCLbRgL27qGzbTmViEsa2omNOLOPNW4hHNxM3RzH1GlRiMMb3f/R5htlz+tdT\nFNUUSVPodrGLiyQLLbo3bxDP3sLenkFu34SbN+ifP0d/5gbMzUHSw4gSUcpV9Qn/SDHtL2zHe5eS\nOv+zkzlrSsEcgCwmKkKCIZ3YTvzCS2z63Oep7tgJkfdbl5QxJJ+vPkEs1wjZVLo/c4v2O2+Tvvka\n1U7Li8L66VHo7agBn6rDYtS1AukbIR1pYiYmiXfuwuzYhTy6n3hyitrkDqrjE0TNRr5utvo0oKKR\nSGafLp06W7+vtwvjKlQbRJvGiID6gYNgU7TbJp2bpXfjOu1z50ivX0OvXCS5coHk6jT21i1AiPz6\nl4pCluLjY03WUCzjcbfPwKcHuZqBkp9Rs3wHSDDYbVNEz7/Epq9+k+rOPX7Z2/XxXW80gliuAbIT\nLm236XzwPt3Xf4U5fyG3y9bTyVPYXFkE2/lV3erYgh1pEj3xBLU9e4h27yHeuZf6zn1OKCoVNJLi\nGbIpbP68ZuA1oCTHvuqlkGcZ2AcBIiCKoNIkajZp7NxF4/BTJHOz9C5dpHf5AunF8/SPvUdv+gLR\n7TniJMEAqbjeoDYfx/2l8OQXO9UiMCdCH8FObCd64WVGv/b3aTzxFGri+3rOwOoQxHKtkFh658/Q\neeOX6AcfYDp939B1vcW6JV9XRnyXpH5UId00RnXPPmqPHCR+9rPU9+2mOjWFNEcRUwUit0RGOQNR\nBn55b+KAPVkIIYJolO+cB2SWmn4i5E5eFaga4sntxBPbGDn8JMmtmyzuO0B8/jT27BmSs+fQGzeI\n+20MNh8DNquWuXdwrZzhEKnrIASGRJyPMn7+JUa/9g0aTzyFjIySdSB1M/T19a1vBIJYrhHaJ48x\n/53/SPKznxLN3hrwTWbdhNYHzheXIth6k/iJJ6gfepz40BPUDjxGdftOpNZw0+VSeZ7gKpfcbR/t\nFc3dE/eUDp9OpXfspKW/Sta5Ln0mL3/GIPWYeGeDzVM7kaRP//YM3XNn6X5wgv7PXyW5eB5ZnCXC\nuuXhtBj/XYaV/xaf7qVEdKOI3uQko7/zj9n0uS9Q3bkLNZU8jGOxXijDdHytEcRypcgrSiiULztx\n+x0W33uL3tF3MXOziBTJ1sXEcgVPnDuEWcmC75n3VLLa5mI5QsBVNae1Knb7dqqPPU7ji1+h/tjj\nVLZPuUi3DNiOpTBQOapf+CazvXKbssj2IR/UkkEPhl3KkjMYmc5epPh0/XMYA5U6lcmdVMe30Xjk\nUVpj43TfP0rvxHHSyxeJOotEaimqrnyQLn9HBtQFt5whaugaQ39yO+b5Fxh9+QvUdroSRsEFqCxp\nvoxEkMm1RxDLFaJs51isP0l99PvqZTrvvY1euUxFXLDUlPOB7rCGlp8s79EKmGwJrWxabF2itDWZ\n1esWcu2bGLt1K/HBx6g/9TS1p59h5KlnkWoDydcpLyL77nXyF2Tpn4PRfxn4PfiJFAGwpcKY3Z//\nf1chyjMfi2m2eGs0rhJP7mD0N79CfPBRov376b3zNsnJE3D9CpH3SWYt9dw02txh6fZE6E9OEj3/\nAlu++g1qu/ZANGg9SpZ5v8EKWNcLQSxXkOyUzXLIxSrp4jyt40exJ09R7fb8qWMGjKbVsjJU3BK6\n5CkzgE8FUlGXEI/zASYi2McOUXnmORrPPEf9sUNE27ahlZoTSv/YXISym3e1AO/kDjGUO++7q/tw\niQiXPcCyZEdBUfHPptltL8XNcRqHNlGd3EFnzx5ab++i/5MfoTdvEtnULzHsXAs+lOXWcReXHpVO\nTFJ5/kU2feXrNH2btXwKn/sozT1HF1h9gliuECrFomP5mt22T3f6DL133yaemSUCVweNDwzIKnmt\nxI0j67GoPopr/LTbmtRbUTFqDWljFPbuoflb/5DG089R27kLW6tijeuSY7Qcm4Zc3mTJ7bsP5RON\n++PuuGMXGbxRFuO8CYcqSgQmwmzdRvO5F4m372BeIvq/fov0wjniVgsRIcWApBh1ZQQpBjuxg/j5\nFxn92jdpHH4SMzIy+Gpyx6sH1iBBLFeIvLlDdkpYpXPhLHM/+VuS1/6OkTRx3c5XdZSObAlWAWoJ\nRNb4dmKu26ZzJRi03kQeOUjl5ZdpvPwyI4c/g5iK87b6ZQYjLSa5he9xfaCl8WZWJhpBdRO1Rw4T\n//4Oul/4Iq3Xf0Hy+mtw/ixxr4sVQyrG96PcTuP3fp/m575AZccuMKFz0HoliOWKkfmzAFXS1hzt\nE8fpnziOzM74GhezdlJGSkZPapREnGREGoGNSMYnkMNPMfLKF6k//zyVnbsgquSPMX7qXZ5YroUL\nwSdhMLE+q7rx9wHRyGYaj3+GaHQT7S1jtF/9O9Ij72BS6CHYySkqz79E83Nf8AnnkW+eIaFscR0S\nxHLF8JEbC2hK7+I5kuNH0PPnqVCk7a2Nc6ioTLGiJMZiEWI1aGpIR0aJX/4CjS9+mcaTTxNNTOBW\nOLCgmUhmEpNnPOa+QP8S6wD3OTj/bGYXZxU9Pn5eqRHvPUi90URGx2jdXqR/cRrGxqg8/xKbvvZN\nqjt3o8aXMAIugT5MutcbQSyHStl2WnIqZFNsgXR+jtaJY/RPHCWenUcw+bTXPUsxWR1cnXG4p1ce\nvV0y3lzUpCgjrKgBKyRbtmCfOMyWb/0WjYOPY5pNH7Rxdd/lPj2ai8tgeGb9iISWBpsFYErfscG9\n5zimMrWL+MUq/avXSU+eIJqYYNNvfJnGE0/mQmk0S5GyCFGQy3VGEMuhs3SyWY7UuhOlc+ksyQfH\n4eJFquA70Lh9MsPLCohGGFKyhG0d4twt6x6u+DSlXDoLC9DVNUOkilqwYxPwmWcY/epXaDz5FKba\nKL1FKX5yf0NJWtb5+uWuSjx7j0ApL9SIa6snWycZ/c2vEj9xGNMcZWT/AUyzWcqVhdxkX68fxAYm\niOVQKadbF6EB/P+qAnYWZv8zMT9Ex26QplXoV1CNsGIR8d5LP521pby7YZ5fUh6vGLIUePUJ8a5T\nhBPPXq0B+w/R+NJXaX7+i9T27INq/a6h5eJ5l4rjYKR5XSDFe8j1v7jTfafip+QKEsHI/oOM7D9Q\n2OxS+D5dOWfRfzOwvghiOXSyYMBSRRAgwXYvEVfeRPafJhLon2xgL48g7QZZsoqoIbKCmoTEGFQj\nn34z3BCJ+HI9i89k8sMUcOuPA6mCffQxml/5Os2Xv0R1525sFPlcwnud9HdRw/UikCU+Os0o23Q3\nH6wsvXn37YF1RRDLoTIolOqn3lmJn+gi6eL72M5xKlvniOoV4hGl14DkA4P2I8A1orD4Po0WX/64\nHGMln2pbvzXLiUSUVEHHJ2j+5tdovvIbVHfsQ0XWWDAqEFgZglgOnbKEeL+gAli0dxUWjmG614EU\nM6pUHrVEdVjsGJLLdbRVJSLKp3eRTzVShq9Orveji/SiSqSuOw5i6KMkm7dgnn6WkZe/RHXnXpDI\nW53kAaBAYKMQxHKoLFWPkg9TO6SLZ5D5E0Ta9dZcAvWUaLdSf1bojCj98wadNYjN6ovLqdHD9nVp\nvj6OIVsd0bUQ6zWbyBNP0vzNr1Hb/Qhq4tx3Fw0EOgKBjUEQy2FSbsBT7qmmlrR9lvTWz7C33wCT\nIAJR6oIottKlfjiluqtDb7pN+1SNdLqBzDV9dx+/5O3dXuxTiVZ5OVn3XB1j6G6bIH7p84x987cY\nOfQURHG+32CSdhDMwMYhiOVQySzJkohYQDvYxQ/RxWOQXMu1RpBi9T5JMZu71A70kWaVzmif7tEY\nWagQq+tmkedfKj6yik8Cv0NJ72uoBvFBHcWKszJ7zRHM4SfZ/KW/x8jBw0i9UWQDZY1zA4ENSBDL\nZWCgakUV27tJungC2zpJRLewQH3LsmwJBiWBuqG6C0zNYNvz9M+PkM7WiWyEisvILCbj5lMVxLjJ\nd9FaLFVg/6OMfv5LjBx+GtMcBZ/mkgWpgjEZ2KgEsRwiRb0K+foxSJe0ex5tfQDdKz7lZkmCMkVH\ncKsGYqU62WP0mQUWRyA5Y0hx6Z6AAAAgAElEQVRuxpik6NajmvVCtA9UF5MtXphmlSlW0UaTkRdf\nofnMc8Rj4yCDtepBJwMbmSCWQ6Xsz3Mo86StU9A6jbGtgfvKuHpjJ1+KYqOEyi7LSF3oNS3dD6vo\n+VFMGucSWXTnzl77k+Gm3oJRg4oh2nuA5kufd91xxARxDARKBLEcIm5d7MF667R1HubeROaPE1s7\nWDGXPQ5ASmEWSd32GCrbZ6lsXaD+SJ3WG0p6bhRuV4nUiV0qLrn8k8bJBTA2RYySRBXMY08w8q3/\ngvrhJ1xjWiRkBwUCJYJYDpFSYWNWPIhtXYD2OSRd+PhHZ2VzWXmcukayxAnxZJfGs7N0R/qkp0fQ\nmzE2ibHERLlD8ROghZ/STkxS/exz1J59BsmXOpCQSxkIlAhiOUx00G8p9ND2h9A+j0jfBVLuKj5e\nZhUQ6yPOeVGxe0aTEO9uIfWEfrNP58Ma9loDacUP1ihSXIVRUm8SHXqK+nMvUNm56yNKGAOBjU0Q\nyyGS2ZTge/ikLWifQXtXQGy+ZMTSCmGXF+7EUbVoi5alF7mSSUVNSrSti9RTGLV0Tgn2gwp0K6VM\nSbLsIopHZ12+XSs1UVcdlIiBHXuoPv1Z6o8eQuI6g36CkEsZCGQEsRwyoq5zj2LR7k20fQHsIh+f\nC1kEapyYab613EBDJcVsTqk/KsT1iIVWjL2Soq0KRrOvM6/0zkXU+teOfEMOBdLmZiqHD9N46ini\nsa1+f/eqoeVDIDBIEMsh4iLaKS4VqEd/4Th28QMMCeDsvHtKUJ4vmfkLi2d1v8rpRgnSSKjtTzDN\nhM6FKt1TVdIrI9Cu5q8h6joHKeIrgFxvzNQYUitUn/ksjS9/mfrjB7FR5B9lMSo+ZSgIZiCQEcRy\nyGRrSEvSQrvn0WQGQ7ZMQ7EcwYPhBMw1wLBgOsSThkYjJWomtEdS+tMjyFwTo4LR1MudwfUzcknt\nloikWqP2+ONUHz2IxrV8XFlwSf0MPMhlIOAIYjlUBN8FkrR3E9udRrTtLcrS2tkPhC+lVIMSIQIW\nRUwHszmlVqkQNVJaI9A9VkUXTSatgMGC71/p05smtlF5/AnirVNA7EovsUWpTlhRKxAYIIjl0PEh\nlc5lpHMJIS1th08tlpnw+qUgrCiifUzdUtvpyyQ786QXKqSzMcZWgBQVg9UI1RSNYmqPPErt0ceQ\nuObEXDO/prNcs0BRIBBwBLEcIoVHsgPdS0j3irfsXKWMPKhO5qRF5Nx7I32nDvdT6VGZVJrPpnSa\nVXpn6tibFpNU855BFoMdG8ccOEhlYhtIFlDyjT3uEPWgmIEABLEcLgqQoL2raPskLE4D1qcCOfIA\nzn2TCZj6HkEJRW/LLPndNdRIJYGoR31fj+pkje6jKd0PU5LTwPUIFUMaV6h85hlqzz+PxBWXOG/U\nB4AGhVLWUFK6c/ne2dVp4FZwHSwPWrrK5x2zyo1VNkbuRBDLYeKXf0h7M9C7hiZz7jCSLOvxUz35\nkmcoT8uzTkfqrccUqfeo7RaiutKrQ/vXEel8BTs2QbR3D7XduykO8jx+PlCFtDYpLF7r37OrdMJ3\nlA8Mm8FLU/YJZzObjSCTjo8t1xCRvSLyIxE5JiJHReS/9du3isj3ReSk/z3ut4uI/CsROSUi74rI\nC8v9JtYOzvrT3gzavw52oSSUmdh98oOreNTS5yhbgl44/aqQSgqVNvHUArWn5qgenkcn28i+bcje\nvcRjE7iv34WfJFtUx/+WNbjITnY5yDIO0NT5YNUn86/2AB96tGRlKoPi+fBzP5ZlAvz3qvqWiGwC\n3hSR7wP/NfADVf1jEfkj4I+A/wH4LeCQ//k88K/97w2ABe2j/VvY/i3EdMkSxEXdAmSfrifkvR94\nxz3OuYmaBLNVaTyj2C0xsmc31X370Eo9F+A7n3WNqeQA7mKR1UkJSoL7lEOh5vJggGhAFAvX0Ebi\nY8VSVS8Dl/3f8yJyHNgN/DbwFb/bnwE/xonlbwN/rq627zURGRORnf55HmpUFJI2tncT279FTEIx\nVcn8jPYjn2Oo4/H/WUkw26E+vgnZuZPKjm0gXmDWsi6WKfvJABUBNViU96cXuHirj5iIj+yGfLdz\neyPt9yDPZVN2bq3y9J4GkVh3Z+bLXi/HzpD4RD5LEdkPPA/8EpgqCeAVYMr/vRu4UHrYtN82IJYi\n8m3g2wD79u37hMNeoyiknRvQmUbbN3yAxFfGrMZyDF5gjAp9U4M9TxLteILK6Libxoqus5zKYtqn\nCCoRCfD9I21+8G4biRobzdhZHsp5Y2mXzx9K2DVRYXzErzZKdiEu1aStl0PoU3DfYikio8D/A/x3\nqjpX7qCtqiryyRJjVPVPgD8BeOmllx6SQ1yxyS0kuYnYNoLxHdOzSPbKHlFFTbmi0RZMbR9RYzdW\nGoga5NMWFK0gSyd9+eJpCm2ts5DGxFpfpdGtf+44AbMP3FaZWUyZbSdsGamWSiuKJLN1cgh9au5L\nLEWkghPK/1NV/1+/+Wo2vRaRncA1v/0isLf08D1+2wZAwYul0Y6fqmTWUGkKs0KHV2bMWgSpbkOq\nOzG1cayfRUXr8ijPLj4gaonUBbXUB7ayAMRdLwJrZTq8WvvdY59yhliea6BeEEVY7KQstFJ04s6A\n4kbifqLhAvwpcFxV/7fSXX8N/IH/+w+Avypt/2c+Kv4KMLsR/JUOiyazaDqLEdc8Q0v/5/PiFUNy\nD2lU24mp70biJkgKmf9pXR3w4lUwM3tcgMdo0WwZybo+ceeP3OVnI+13j30o3c4+5mx/RJjvGBY6\n2X1S2v8hmRDeJ/djWX4J+K+A90Tk137b/wj8MfCXIvKHwDng9/x9fwP8A+AU0AL++VBHvJbRBE0W\n0WQRkzfxLfICV3w4+R9CVNmB1HYA1dK6PasyrCHgT9hS/upSy6i0W+A+WPpRFXajsNiLaHV9Spr3\nU0oulRvHwryfaPjPuPen8fW77K/Av/iU41qXaH8O+regNw+kgAvuqID1jXdXdkBufmW1jmnsIxrZ\nhVAvlq1Ypwd57i/XYingIthQvKf1+v7WBFqIYKerLHSUxBpiU8QFYSNJZUhNGyo2WUD7c2jSJqvE\nzgVSsknjyuG6ooNKHa1OuCl4KbF9Y02iAg9Kqkqrq3T7foMsvdRujCMpiOUQsekipPOQdu6YG2pJ\npFYWhagBlXEwtdLWAS9VIHBPFGGxZ2n30nxLxkbqER3EcoiobYO2wPYz77hvUMEQOg59ckQUEYtU\nRqAyBlTIXfnZwAKBuyAipR9Du2/pdPuUI+HOI79xwjxBLIeIaBexXe/8dpdc17E832NlB5TpYtxA\nKltAKoP3BQL3gwi9xNLpu4q0O+VxYxxMQSyHiCTzfn1wJY+dies3mfW1XGljLiVGK1sxlXFUfcBp\n3aUMBVYPl2vZ6kGrd2dmx0Y6ioJYDhGxbbAdBIv4FlZKuXnayl+BLRWIRzFRPRdsN9iNdJgHPi29\nBHp9A25FqdUezqoQxHKY2B5oj/Ia4a5gZ7UOLkGJ0GgEMVXvaip8lRvzkA88CP3E0E+XHDEb7IIb\nmv8OEVWLaoKRlLy7kK5WFDwjRqQGJs5Dl0UNxsY62AMPTj8RkmTweJE7Z+UPNcGyHCaagk1wCenZ\n2jirKJQiQARS9d2PBnKZNopfPvApUYQkVZL0zgvs+i1t+OQEy3KYqEU0wWh6R0e21TikXK10BKZS\nTJmyqTgQrpWBj8UvjpdawdqyKVku5d0YBLEcKoW5JkipNLzcpWVlRTNviFAe38Y6xgOfEtUiNDh4\n6GwUm9IRTIshUlx3vSiu8ix8sImHLrmd9+kJBD4a8WscDfgoN96RE8Ry6Lhpi6gha52xeofVoBWp\nxXqIqzaiwHpjqU1ZVs2NRRDLYeLThHRgQ37HaoyIsmBmQql+bq6rUYMZWF8IA5f8weYZGyscHnyW\nQ8WgkhU7FlfirC/gQEvqFUGcFaCp+50FeUKeZeATctdjZYMdQMGyHCamipoqVkAlpRDK1XNeik2Q\ntAWaIL76ojyKDXa8B4bCxpyRBLEcIiJVoIZKtGTSsjqeS1Ew9DG2hdpe6Y4sg0g26nEf+KT4DkQb\nmSCWw0RqYOpYjQGTV++s5iEmkqDpIpq4RVQEzUsxA4H7RQbEcmOKZhDLYWKqYBooWbVMJpirhGgu\nljbtUI5shqShwP2iKMaLpQ7e4VziqzWwFSaI5RDRqI6aOmgWN1ME4z2XruHuSh5YzlNqwS+iBokf\nU3nJqY1yqAcehMzbHhmIpHyhlSV7PfwEsRwiGo1ANAIa4WrDU38cGb8caVYvvnKIWOjdRrvXgG7e\n41oy8d6YM6rAJ0GgVkmpVVzQUkuX2dU4pleLIJZDREzdrXMTFZalXzFsad3hyqGCTVrYZBbRBPI+\nm4HA/VONLdV4Yx81QSyHSFRpInETqdRKSUMW8onvCrvGVdw4tIP0ZyBtM7iqYzArA/eBQi1WL5ZL\nj+WN03U/iOUQMXETqWwuLTlLsVqZrvxBlcXiDT3oXUV7tzdsqVrgQXH+9loM1YqXRykuuRtDJh1B\nLIeIRCMQjaHRKKrGW3aQL1+WTcdXbkSAEEkf6V0m7V5HpV9yz28cqyDw4Kgq1UioxT7LI+ukJbCR\njqEglkNEqWMq2zGVbWiWa4kA1jdqGf7HrXf9N1jJK6Ro6wxp6wzowkAzjXV/mA+0wLvX32v8J/+z\nlIuzhiYAapWRWoVmo4azKQsjYCMRasOHSgTxJoi3YLUK4tZZzrsPLdNa3SpFMofrjVF6vex29zrS\nveQi47VxUoTIP2hdCWZZHNVZ64qF8lIefseyx2FtvccsCyFLK/M+QEn9BXV1G+sOFi24T65RjWhU\nowGPtzvolqYRPbwEy3KoCFQ2o/EWMHWXFI7m03Fr7NCvxtn0XhDfFs54UfYC4lM7bHIT7U6Tdq+5\nqLhmgrOerIPCYsw+R0Vc85I84aBkXcuq24z3+NH8o8/X/tRMpNZeKk4kwkhVaFQUKR0vG0MiC4Jl\nOVQMUWULtjJGWmmgiRNKUZOfJMMufnQXdsl9pG6S1HcWo5/aWbGoLpL0LkJnmnj0SdBRxETr6Ih3\nJ2mWY6BYVDJb0ldLld9M5k8rr6axUkP9WLJ+p6Ck3iqO3AxEbLHPmhixUomhUYXIlKYia0vPV4Qg\nlkNEEExlDKlshWqTNFUi9U5xscty6CuCFSEVJVJFrHudXDzVTcpTI1xdvIZZuMy2zfNsqo6uK9d8\nWSqzLc4OUyok1OliMCXLvZCatXZe+7AbTur7ICmiEalUSURcstkaGbSgNKqWRs1f6AXy9VLWy8Ez\nJIJYDhFFwGxB649AYxdJ+1TulQJdphNAEFUiSRFRrAiJxKhAZBU1EQvU+Vlngtduwqb0Ol+qXeHz\nu3dSX47hLBf+/HTWufEpUVBB+OazNZ7cHWGkcs+HrxHtAQrhd86ZyLU5VeXagvDOeeX4+ZS+FsK/\nmt1+VFO2jiZMbIoxJf92Fo/KL0kbQDiDWA6dCKlMQjyFpQGymMVYludiLIpRkNRNQ1NxE3FjU6wx\n3KDGsf4Y312c4mRvlPHbs+ydv84zSZd63Fxj09N7o0v+doIJiPL0vjpP7RPMOmkhZrPgVB5VdgfH\nsekOl2f6HE/tqkcTMoFWa2nWhU11JxXZ8ZIHFFdrgKtAEMuh4tTQVCcxlR2YaBOazvsG5ctj21ix\niAjGGvDT0EiVvtS5aOu83d/Ma+0JjvQ305WYfnuG6dnLXJ2/wfh4c50d7GWbrEiSikv3rYfT1xWc\nglE3E8hGPjefcHvB0l+GFLMHRdUyWo/YNFIpb3S/hXVzsR0Ga+dbeQjIoq+muoWoOoWpbPVpPfZT\nTMGz0JBPbVcQtRRRU/fEmd8SSWlLjRPpOD/o7OI7izv4dXeCjlRRI7SSNhdmL3D+1jmspO4V1PW4\ntP5H12CuX9FJsZgIFjFxwx0BnjVN0a++XLYws2C5tZiiZmXfR55BUOq35qxfSyTKaN0wUotYWuS4\nfj7v4RAsyyGjKBLXkeoUUp1Ce0cQ1Txq/aDPWjjWKR2jilHjouFORelEVd7tTvCL9lbe6I1xwY7Q\nlwrGJzxrBBfmLnHm9nn69mWM1H0gSH3K0dKFJ9YO5XGJ+FsK6++an1lmtrRFuLkItxbtgx8mn3pM\nPuFcrFvUDqVRhc0NqEalT3+95eYOiSCWQ0RUgAikDs1DSOMQOvcTsAl5bfgnNDGLKLA4K1UyD5ei\nYlBiBEhFuGTrvL24nb+e3810WqVlIqyJ/IFtULVYgav9WV67epQv3/4ch8YPYIicXeaTjMuZdGvl\npJA7zlAXT14zA3wA1E/IDTDTSjlzw3JrwYAxTjBXUDWz+gW34qdFiFAVJrYYtm/2zX8HH7FiY1sr\nfOwlWUTqIvIrEXlHRI6KyP/stz8qIr8UkVMi8n+JW4AGEan526f8/fuX9y2sJVy+n1JF6tsx9V2I\nreKE7sFmtVmyOT46mvUPzMQtFcMiNU4mm/lRe4rvtKY4bUdpmSoun88lElsxXlwhjSznWpd5/+ZZ\nFpM2Nu9rWQxyFfp+bBgKR4K35FS4djvhyqzFSuS+pxUVyuLLVrGIWudPtbC5GTPWjMKhwP3NX7rA\n11T1s8BzwLdE5BXgfwX+pao+BtwC/tDv/4fALb/9X/r9NgiZHBqojiH1PUi0ySVPywMu5aBZlY6z\nKi2CJUKswSDMa4Wj/c18r7WD77d28H6ymb6JUImwEjs/p2QRVyFWiCRlPpnn+MxpLs1fAVJfAVP2\npQWWC1XFYsnquRILl24lXJuzYFbbpeDj86oIKWNNw3gzRh7oUv9w8bHfjDoW/M2K/1Hga8D/7bf/\nGfCP/N+/7W/j7/+6bKhl4TJ/VA3qu9Dqdr+WuM1Kaj7Z0+VVKoqo9daIIZWYRRq81dnK9xa28Xfd\nSc7qFrrG+Sdza8FHW90tl5sYWcWIcmLmDO9f/5BWfwHxFTG5z3KNBXgeNtyl02KB2XbK9IxloZO5\nGlb3w1fvLmrEKVtHLFtHo1Uby1rivi5jIhKJyK+Ba8D3gQ+B26qa+F2mgd3+793ABQB//ywwMcxB\nr22y+GyMaexGG3tIter9T5/8BHB+JOeTFBWMCqmJuSF13u6P852FXbze2c5VO0LfRFh/shUr/ziZ\nNKq+XtxtMSpcmL/CkRsnOTd7iRQXGS+3ogjWxPJQ9v4pMD3TZ3omRamQZ1+u4Edf+MIVY93F2GIZ\nG7Vsayrjo3GYaXCfAR5VTYHnRGQM+PfAE5/2hUXk28C3Afbt2/dpn27NkDnt0YiovhvZ/BnswnvQ\nv+ybELizoGhrWYRwluasaX4ry2avMEedY73NvNYZ4+3uOOeSMayJQCyxtagYUnElltZbisaCUUvf\nWKwokXW+1YV0kVcv/5qR6gj1qMFjY4+46Xv+LqToW3wXwgl0L7zDpUhH9IGT7JZPqle4MZ/y1pku\nR89bDCOoTe8SzFr+8eYXeY2xYkhsi93bYM+EsKnuLr0b/fv+RA4SVb0N/Aj4AjAmIpnY7gEu+r8v\nAnsB/P1bgJt3ea4/UdWXVPWlycnJBxz+2iPvKgMoVWjswdR3gEZ+EnyvQ26ww2R2+Po4OCBc1hF+\n1Z3gewtT/Kw9xdl0M9YAYp3ViRNGF2RyZOeoC3QL+VITOPfY7f4c7904yZHrJ5jtzuZdZYJN+Wkp\nXRj94nXljqOCC6Ccu9bj9HXLrZY694kP7uiKSlPW0s9XfwEVYxlvRmwdrWS1Uhue+4mGT3qLEhFp\nAN8EjuNE83f8bn8A/JX/+6/9bfz9P9TBBnkPOeW3apDGfmjsR6n7VmpLah5d9MbFRfPzy30tKi4M\nkJiI6zLCTzrb+c7iFG90J7imTdL8WiXFCean4W6qXRJLwU/jy/0SFWssF+Yv8ubVY3xw6xxWrUtS\nv493F/goyn7HrAVbtskJ5q3FhJOX+5y9ltKzJs+7XK1MhCLbwrKlAZObYNuWGHkQX/tDyP1Mw3cC\nfyYiEe4s/ktV/U8icgz4CxH5X4C3gT/1+/8p8G9F5BQwA/yTZRj3GqWc0O3CKVHjEWzjIFoZh95i\nqQUX/oQobMdyjqPTUKETVbiqDd7tjvP9xR1MJyO0JcKKYMX4rkKDZ9fdz7PBaVSpJojFtM3Rm6fZ\nMbqDJyYOMho37zkVDDbGfaLFBSlLEXK2v/uGe1b48FqPk5f73JoDkShPBC/PKVYMKarUU5uybSxi\n+xbDlhGDYL29ubH5WLFU1XeB5++y/TTwubts7wC/O5TRrTtyj6Vbr1sFU92JjDyONPZie9MId1mr\nW12bNZfj5ixMUaFtRjiTjPJmd4zXW2OcSjaTmBhD4k8641/xQdq/OWsUtRijXO3c5I2rx/iNvS/y\n1MQBqpl1Ww42bKSkhk9J0SRDsZK5QMD5n5Vr8ynHLvY4ddXSTWuID84Vizas8Ged9UJVBVImNwtT\nmyNqEfj2QhueUMEzTMT6WExWuy1Y2YIZex67cIykfYYouQJaLDfhH+hF0rUeS8Vww1T5bmcXr7fG\nOdMb5bbW6cYNXNKkIdKUWBNs5p98wINZEIyClT4nZk/xH07+LYn9ezyz7RA1qXmLQllL1Txrn3K0\nu5h/iyhWLV1r+NnxFr840efKfBVM1S2P4ZfGMFkjjRX6wJ09GTk3kcLkJuXgpOHR7XVcv99gVUIQ\nyyGTFSJKflNUiKo70OZhdOQAdvYmprReTBHvNigRfapctVXeSjbxnc4U08kIfYkRiYg0gdJULp+y\nSfmZ7h/j3QaFBZTw7rX3mahtZnN1lANb9hIjuUGZpwCW327gLujAb/cpWxRLIsLl2wlHzqVcuR2D\nxOTHjAub59/JSg9ZfZXYri3C7jFhc8OAal5NtNG/7iCWQ6WwFLOJlEFRM4oZfQyaB9H5Y6C9XKLA\n1+WKYVGqnEubvN3Zwi87E5xJx0lFMMZNhSOS3P9lJZv0P9iBnAmfekvWkgIpVzvXeP3yu2ytjzMS\nN9nVnCwWOMgj5eHk+XjKQRFXdmoRFnpwYrrPh1eUblL1a3CnLs3Lu19WNhJeGqsIalN2jgm7xmMq\nkS3Nf8K3HcRyqBRBkSyPzh2IFeLGI/Sah6H+OrRuu+CMpL55hWGWBu8nTX7eHeft9gSX0i1YU8Wo\na8Lh/Jw2fx31vk/FJ5w/6Hi98GaO1ISE8wuX+dmFNxmtjlDZ/VmmGlvz+PnAYwdTBwMZAznlRZCn\nZyPOXGnz9oddZhYrIDFCCoiPOK+WqyNzG0UYEnaNCzvGq74BcPhyM4JYLgMDV+KsW0tlArPpKezo\nIWzrA5fKIxaViLY2eL27lZ93t/BOf5xrdhQ1BpG+e4osP1Jc2Vlu2ZUznx+YrA44s1Shoz1Ozp2h\nfr5CvVrl63tfoSox5ep2U36PgQGyybfmcRolQbgw0+ONUx2OXxSsVPzso/AHDnyaK/nRZseRWjY3\nlN1bK2weicmPrfA1A0Esh8od05XSn1ZHiDa/iN06TTrzC4y9SZsRzqZN3uiO8R8Xd3MtbaJiQBLU\n9FGqFCnqMChRw5kcZbaDajHhUlHm7QK/uvYO1zozjMQNnpt8gtFK0z8mq0L6uHSlDYqAVdecRKzQ\nTQ3vX23z/XcX+dVJmFmoIybzBS/5Hlfhg3Q+0ghDl4O7DI/vrRObwgO/agNbYwSxXCFUBI3GMZue\nQkce4fZcwulkhFe7m/lVfwuX7CiRuCh3IpBSI0qriKQuSrrM2Rv5miulCp6eJJxdmOb7Z3+GMYZn\nJh5nc6VZBCB0oCBzVRfWWisU9RfqnSWGszd6vPp+h3fPwa3FKsjqN6YYrBNxfQdGa8ruCWFr07f2\nQ32xQ5BKCGK5Yoi6E8c09jAz8lnenUn55YLwa7uJK1p3J5Cmru8kFYyNiNSivkxuZbuBu9CUiJLQ\n463r77iEeVWe9YJZNAYJ7n9YKj6FlT7bVV79oMMbH1quzjo/ZeFjXhufmiiQJkxtidg3EVGLMn/0\nKkXm1yhBLFcILz9oPMGNxud5R3r8snuR23EV1/PbkESZH9KAWtR0/WPjFYuQSr6AlktRMgg3Ozd4\n89p7pKnStwnPbnuc8eoWvz9F7XBIKypZ5obZruW96S6//KDHpVsxlggkxajJU89XCxEZEPh63Gff\ntoj9k7HLsSi7CPTu1VwbjSCWK4g7QerEo8+gzev0by8CHSJVFxPN64YtGMWiiMaoRvkzrBw+8q4R\nKsr1zm3evH6Enk3oJl2+vOdl6qbOoH0ZAPd53Ooo713o8uMji1y4GdG3MRifxyC6Yhe/e46xJJSq\nlokt8MiksHtrXMp8yCzk8A1DEMsVRXwu26GJJ/iNTpsb/S6vXzlKT7qoWIwvOUsFUIPRmKVu9pVB\nkbwCyQBVEMut/iyvXn6Ds7enme0s8OLOp9m7aYoqFVSifJRZDudAezcpUo3WjWuz5L8dWCtOs8V4\nU39J8ZmoCq3Ecvpaws9PdHn9VI9LM0JPqyDGW2iGVLIuoyv5Xly3fgCjxq+5kyJGGal2eebRCi8c\nqLF1ZKk/NQhmRqhjWiHcyeEm47WozmMTj/Lk1sfYVt+KppAJomKcaGpW1ePuW0mrshhr8drZtlQs\nFxev8N3TP+UHZ37O+zc/pJW2XPQ3W0Eyzy0sre6dt9xZnx4wV3+TvRvrpVJ9lyYLKfRTOHqxyw/e\nbfOL91Omb8b0tU7mqCiEduVF587jyHUYUttlx7hyYLuwcyzOBUHIxrmxBbJMsCxXiEJy3P8TI1v5\nzPbH+XD2Ire6M3S07RKT82YL+K7n8inWHH9QlgZtBgMSNrKcnDvDQn+RmfYsX9zzPE9OHGSyvhV8\nMr5PDfWPsiXbRPLfa52ytBTfgebbVV05ICos9ixnb/b47tsdjk4rN+YrqMTFkr3Z85T+X1Eki87j\nZT9FgZGKcnAq4tCOCpvqmVXpR7lupgArQxDLlcJHj7Nu6VWpcGBsH89ue5zp2fN8uHiWTCRFi5PT\ndSNaDcH8aHr0ON+6zNUEsMMAACAASURBVMKlLtfas1xeuMnXH/kcE/UxIh+wKkId7jc4AV0PQglL\nChZ9+zTN1mjHSUqKMtPqcOJKwhsnU946rcx2jZt2r6Gpa7lRu5VsPShhaizisamY3ROVNTLStUsQ\nyxWiXP4mKEYNE7Uxnt3+OOfmznF64TyJKU7CCHyzYCn8fmsEl1ViUFFu9mZ4+0aHG+1bGIEXpg6z\nZ9NO6lEjn8YNvPe1lTVzH/gLnJo85xCc1rTTNufmLvL25Yu8dTrm9PmdzHe3lS4GgxHn1bXU3FGl\npIUz2VoemzIc2lllSz3CBEvyIwliuUK4KpnsL3dQxmo4vHUfnQO/wXs3T3Nq/jxp7L1jVok1a2Cx\nasO+B+7Ei9QiYunZBT6YP8n/fvQaBy/s4ZnJJ/js9id4bGwP2/3UPFuV0pRTjdY4rr+jxarmQRlV\nSyvtcmH+Oj88+wveu3GEM7evMN/eTFx9imrtS5j+FKqN1R7+AIUDyCCkYC3bmylffLzGoZ213Ksa\n9PLeBLFcMfKVb1x4QNzJWJE4n47fbs9yI5kjjTK/0lqmFMjx5uLtZJZ3Zv7/9s41Ro7suu+/c6uq\nu2d63g9yniRn+N7lrvalfWjXq40etqMISgIoiAIj0QcFApJ8cJAPjoQAAQLki/MhtgMEcQQrgRMk\nkRPFjgUFgWNbEhInzkr70Eq73AeXS+7yPZwZ9rx6+lV18uHe6q6eGZJDLnemZ3l/xLCrq6urTlVX\n//vee849p8yltXnOlS7x0PBhnp9+gv09o+RMDpxg7h0EFeM+i4RKUuPq6hxvLbzHz+bP8n8uv8Ji\ndcHWhM+XaQQVTJzDVB5E6lNI3IPo7s/WAeuSSkuXKBAGDWbHhMNjEblQUfbK4Mju4cVyh8j6lkkT\n6roSE/1hL4/ve5Cry9dYm3+LchKT1vtOiz900o0sqUfYOaHSPO2C0KDBlfVr3KiU+GD5EqXqGieH\nZzg8NM3+nmG6pABEqGa6tJnxzJuPaGr745bTPzNrZGO4VdZdldku203euA97ltSJubB8mTML73N6\n8Sxv3TjL+eWL3GgsI8Y4w2M0XKDe8zImXMVU1girM9AYQpLI7kltOFZajnjzkTOxVnfRxGuesW5x\nDSUBVYwKDY3ZN5hwYipkpC9NzrLp1D0b8GK5w2SDMdIxvVDggZFZLq9c5Vp5nvfKl0gC5+lRzXxv\nOuRudn6LloffYjNtK4mBCjXOr11m7XyFM6XzHF2Y5tjAFLODBzk4eJBIomxATWu/7iufmUDSihBw\nr6azS7I1ztMNt5ba9GfHGq2bXsu+2/48JSjVpMp8eZH3Fy/y0tXTvLP0AWdXLlKqlUBiAqQl+u6w\ncXSdxDQIgjKYVUzlOEFtMjOckjVA3HVsxbU2139I31BaxtjuwwplOn7aE8UcnzA8OJ0jMvZ4HXJn\ndTReLHcMad78LYlwKxT294zw6MQDXC7PUbq4wmJ9iWZ1AdXOiuTOKJlo61wErOceMKoYSbgez7Ow\nUOLs8gecXtjPbO80D489yMHBcfZ3D9OXKxJq4BxZ7RGeWX90WytMN0YMtl5r+zHKquKmwd/N70aV\nhiTMV5e5vHydC0tX+GDpEu8vX+T0/LuU4hXqJkYC5x3fqgyjxGiwSFyooMESQbCMEGHqA6CRszHB\n9i4Cl8atYesvJXZdOsJ4Nyg2lELUZM5ZEU3luMHkUMJDUzkOjeaQ5j/P7fBiuQtsimIUJVTDyaFZ\noiMhBYn40cX/x9XGjdY7OicKhVtFC6bfz3TqZiJKzdS4GpeYLy3z2vV3+MHFFxnrHWWqZ5yp3v1M\nFEc5NniQfcVRevI9hK7Eb1Y6065667F1xKa0SHvhtmx6MVHZsN6OQ5Yb69xYX+La6gJXyotcKc/z\n9uI5rqxcY668yGpjnSRQhBiRGJu0XogRRMzWQiMxSbCEBqtodAXCOYL1B5HKUUxjCFWT7oHU+WW9\n7aapkXfzUbciDRSVhhtLtvsGA0mD6VHlhVN5njnWTX9XiIpiNEGa91jH3GQdhxfLDkEQIkIO9U/x\n9OSjXFtf4NqVHzdzRnaUVt4CBRIDqLhJgEqgALEVuhBKyRKLN5Y4s3SBvlwfY8URjvRPsb9rlJGu\nIYa6Bhjs7mO8d4Ri1EWOoClKIqkoNLNv0iaY7v/EKbZCUyhrxKzVKyxVVymtL1NaX+bS6jWuVxaZ\nKy8yt77ItfIC8+UFlAZGgDA9SqtOji3rIRi1xd62HjsN7LUIVom73kDNGoFZh8pJpD7i5vvXQY0r\nJWHn4DcF/06LhzeviQtXEucgVGmuH+iKOTUdcOpgxFCPIWhdrj1zf+0mXiw7BbE3dHdY4NjIYZ6u\n3ODni2eYr5RAjCs7sDdo9tLVBqq4EkKkc+MBJBTq1JivL7CwsMDZxXMM5nsZ6R5ksNDPQFcf+7tG\n6It66MkV6cl10xMV6c0VyYUR+TAiMgGhBATGtooSTagnMbWkQblWpRrXqTRqrDfKrNXXKFWWWa2X\nWa6XKdVWKFWWubxynVJlidV6mYZJbLKLUNMy2lYMJa1ThP2cVAiTzLne9CrYSIEkWEEL74GpoMEq\nwfqDaHXG/aq0WtBtAw53pVzZ1qGgmrhhEkW0zpF9CY8ezDE1FGI2jYenEwY8N8OLZUeQDmbaL+lg\nvp9H9j/AqaGjvHztNKvx+p65iwXFpAmLNnTp0g60LfmbtLrDAVSlwtX6OtduXMUkkDM5BgoDdOd6\n6I66KeaL9ITd9IZFckFIZEIiY2zCZLFVCRNVYk2oxzGVRo163KAa11hLyqzWV1mqLlOuVyjXy6zX\n12loTBIaK4ARrjtKq1UntjRE05EltpUaKASJi+u2p7QFzqmktsaRBKuQP4sGK6hZI9Q8Uh9Bkrxt\nXTYdP5nQqjv+zLNj2+ImNSQYbVCMajwyk+PEZEQxyrRa0+qN4kcub4cXy47CtkYCYLI4ynPTn2Sp\nusbpG2eoSt3Vlk63TNshNx8/3BXaxs1cRsy2rmorX6aVmkw/UG1cYxwKFRKuVBfRyqLtViYQIORN\nRBgEBEFIIAYjxs2/tt3POEmIk5hao06cNOxzSYiNIsbYbryo9aOk8qBZ29IOqa2yKK6JabIeJdIM\nPmlT7BZXX0BVUYnRoIrKJZQGknRhKscwtQNIow9QRBLIZJpqHW9jX3+L47mWYuv1NB9pg8jUODCq\nPDyTY6Q3cBNR7b0kze5+x9xBHYsXy47DdshyEvD8gU/Smyvy38/+kFfnTrOWVF24TIKRVkKHTmoR\nqGwIKUp7lc0QQLUJQqzrHCGwi9oSqPRt4pI/iNj+sIoNSXJDoJn9b3EFUscygLrxuaxNslGQWka2\nTS91OmUy9qfjsuk53czRZcdL02a2NSqRBI2uo70/xBTeIVh/kKByElMbQ5ICkil3bMcEbOvbXlHj\nWqGbD6rOfm2GJymGhJxUOT4Jn3+sm2MTXUTpfpq9mc66fzoZL5YdQ6srDoAqxbDAgyNHWK2tsV6t\n8MrCWwhJaxvIZCrqjBu+lVWItuG3rAC1Xsu0NyWNN9wwNrvpy7zhqy2bFhy6YZuNQwLaPG5rnWul\nbxoe3jw/vyWUtxu1zHSL05QiEpMEy6ipg1mHYAk1DxKsn0RJA9izUQA21EhvN1FUW24uAE3qjA0r\nj8/mOTVZIEI2zU+XtskBnlvhxbLTyHwJA4TBQi+P7j/JeqPKB6uXma8uuLEycWOcnXer386Sm71+\n6/fd6fnd+d62fsfWvu7t2rBBmtxjgpoEdJ04dxHMKmpWES0itXEk6XJinLjmqQv9cQ4j2cJTnnr8\n0/tBgZFe5ZHZHI/NdjHWF5E6kjaa1Dl3Tmezlybq3kfY29cghBjGiqM8Of4Qnxg5xkDU48bAsonC\n/O2+11DsuGcideLwOnHX6zSKLxIX3iEJSqQhRKKBnV+uih172DxvCdLhDzvma8PQ6pw6IDx1rIuD\nIxE52e1CFnsf37LsEKRtSbFSaYOFBZjs2c8Lh56mHFf56eJbrMSrdts0UYP/Juwh0jFDm608EUVl\nGe36KcasEJg1qB7G1AeRpAvSqAFNu/Jbf9hWRgXRBuMDMb9wosCx8TyFyPiu9j3Ai2UH0D7SlF1j\nmt2kEOGZqScY7h5m4vz/5c8vvcKVtTkabnac/xLsJZwXTBKa45Hi5pWHSyT58yTVGYL1BwjXHrGC\nqREZj9UW+1QMMYGpcWQs4fMPF3j6SIF8lCZk8W6cD4sXy05DyHSvLWkChC4ijg0cQA8kaAx/fvkV\nLtautrZthha5LldblS1P52DzgNpPKWhFg0uCUiWOrtsyyKaMibsx1RkkHmy+VzfOH3ce+4AG00MJ\nz5zI8chsjlzo4gs0c0/5e+Gu8WLZgWxsaaZ+URFDd9DFsaEZYrVB2Msf/IiVpAzGJdhVbUacNGeF\n6JZz8jy7hpMubTlu0GzWIUjMGo38e9CTIzDrBNVjSNyPaECiQXNGVDPISGMmBxOeOZrjqcNdjPdH\n2bgE//HfA7xYdghbC2T7i+qCr4tBgePDB1ESPli+yNuls6wm69ZTimZiGnfMfM8dYAPo3ZMtJ2Un\ntpUZ1Gl0nbGtzGCZoHoU6uNInLPpKV24lSpMDiQ8eTTkU8cKHBiKyDVDrnz3+17hxbKDuNVNnXo7\nRe1skh7Jc2LoEJ+bfQ5zzvBW6T2W4xU3K8O46opJWyiSp1PITC2UpLnO4rzeEtt2Y7AMhXfBLNuU\nb5UHiconECIShZiEYgRPHQ149mSOmX058sGG4/iP/p7gxbLjyUxjywRgBxrQZ4r80sxzHB44wP++\n+BP+7NJLnC9fpkEDk6Qxd4JPu7WHSOMq3TimSkwclEnyF4ijOUz+bTCfJqgcJagPMNUX8tSRkL/6\ndBcjvaEbzWwvlOa5N3ix3DO0/JkqAsbOie7GcHzgAJExRFGe/3XpJ5y58a7tubuKhF4q9xianY7Y\nsF1tSdCgSpK/QqI/Jh+sMhGd4ukDkzx/osBIT+DGKF0mI/8Dec/ZtliKSAC8BFxS1S+KyAzwHWAY\neBn4m6paE5E88O+Ax4EF4K+r6vl7bvl9RCtGLk1OAbZGj+2W50zEzMA0QZijJyzw++srXC7P05CN\ne/B0PAI2+DwVzFY33QadJ4T5D5jo7+FT+w/zCwfyHBrNYZN9iEsU4oXyo+BOZvD8KvBm5vmvA7+h\nqkeAG8DX3PqvATfc+t9w23nukswscJt/UDPzp0VTtzeRhBzqmeDTE4/zwvTTHOybIjIBepMZH57O\nRomxopmmtAOICZKEowP7+ezRGV44McLsWEQUqUvH5ifkfZRs6+qKyBTwl4Dfcc8F+AzwXbfJ7wJ/\nxS3/Zfcc9/pnxf/UfQhsbZq08mEaQ2nTKjQ3wQCRGMaL+/jM7LN8buZZTg7M0GMKkLjcirSKVtmE\nO2neosRlx2mt2ZCRzHMPSVNjZK+4HV9OnxkEgwFMEpLW5YkSmO4e5pdnfoG/cOApDg9OkQ9DAoNN\n5us/sY+U7XbDfxP4NaDXPR8GSqracM8vApNueRK4AKCqDRFZctvPZ3coIl8Hvg5w4MCBu7X/vqAl\nkdl5GO2P6e9RIobD/QfoCvIM5Xv4swsvcXrxLAu1ZdRoMzNZMzEDavMbChv26/lIyTq/aSW/UDGo\nSzAsJHasUgMK5DjYM8rTE5/g+emnGCsOEzYzU3qP905wW7EUkS8Cc6r6soi8cK8OrKrfAr4F8MQT\nT/ifxJvQDEiHLb3assGBY1TJEzLTM8lEcR8PjZ7klaun+YMzf8KFtUtUtOJ2kyBiZ48k0MxqblQJ\nE5ezUVrrPfcOUSVQbWaPSsQ0P1slQSRGXUfcaMjBrv18ct8pnp1+hAdGjzCSH2z+aPpPZ+fYTsvy\nWeBLIvIFoAD0Ab8FDIhI6FqXU8Alt/0lYBq4KCIh0I919Hh2mJxEHOydpBgVqTWq/PjKa7xdOseN\nhi2za7vehgRDWhfREG/IaOS596iLr7RZgtQ5c4wmmLTYmIBoxAMDR3lm7GGemjjFocFpimHXbht/\n33LbMUtV/aaqTqnqIeArwA9U9VeAHwJfdpt9FfhDt/w99xz3+g/UB33tKCLS/Asx7CsM8plDT/Ol\no5/j0xNPMtM1TT6xSWYhRomxtVps+t26ERrG1o7x3HtUhIa42jyo+9GKgQZCgiRCF0WO9s3wpcPP\n84szz3BieJbesJsgnUvu2XE+TJzlPwS+IyL/FHgV+LZb/23g34vIu8AiVmA9u4AtyWDbjKPdozwx\n0cto1xAT3aP8ZO7nvLb4JjXqYBJQJVA746MhPnv2R4/gqmWQkBCniX0Tw3A0zLHBWT45eYpnpx9l\nOD/o3D1pAJFd8uwsdySWqvoj4Edu+T3gyS22qQB/7R7Y5rkLsoEHmvGwCgG9YTcnhw8zWOhjf/8o\n5qzh3dIH3Kgvk5gEJEYSQ5AY36r8SBGCRDCqzoljR6WLWmS6d4yHR0/yxPgpjo8cYjA/iDQrCPkE\nvruJn8HzMcZGZIaAErpRydAYpnr3M9G7j+ODs7w+d4ZXr7/J6wvvcGX9GjVTs04Gl4RBbYxRxlnu\n2jWaDZbGpZZrT2H8sWeLYmctz7Ydh5TUq+0mFKB2eEQloeEKjOUkz3T3GL80/RyPjZ1kdnCK3qjb\nBpmn8/ztQdz/98XV7Ti8WH6MEdedbpWiTduY9u9g/yT9hT4mBvZzoH+c1+ff4ZVrb7Aal9Gg9eVv\nNVYzSb8k3V9r/f3EzWvEZetTuvRrZMqApF5vhUADRgpDHB08xEMjx/nMgacYL46Ql3BTiJgV2o/w\nhDy3xYvlx5x0pKst9EjTr7KhP9/LqdFjjBdHOdI/TX9Y5M3Fc1xav0Y5qWCMfUMiWX0UklSIbZPy\nvm3rNPVrqx8UTVyL26C0fnw0UYZzA8z2TXFq+Ain9h3j8OBB9nWPEGJcriCXSMPtq00679eLvct4\nsfwY0x5nLu0vKG7qpBBIyL7uYYYKvfTlujkyf5Y3Smd5Z/Ecc+V51rUGrmWUVnNJXCkEo67Dqa5O\njMjGo3+MaSXfRdO527Zykg3Iit3s7sDOylEoSMD+niGemXicU0NHODF8iNHiMJHJu2TADqeQbVfx\nfrikHYwXy/sW+0U3mmqnISd5Hth3jMn+cY4tH+L0/LucufE+r1x9m8XaEg2po2JF0QAkZITy/uwl\ntlp99npqmlotbY0rkChdkmN/9zBHBqc5OTTDp6aeYLw4SiHI2znd6kqRuaakts3W8nQCXizvY2JR\nRGzIkK3fYzAYhvNDDI0M8InRk5SqK7x65TTvLL7PmaX3Obd8kYXKInVqqCTEkqbbFIwGrnV0f/TL\nFbFtR2nN9A4UTCIkKoQSMZQf4InRBzg8cICjwzMc6p9kuDBAJGFzdNO6gpSEhMTg5oXfBxdwj+HF\n8j6mOT05TZ0oaWUXwAnnUH6Q56Yf5/jwQc6VLvJu6QPOLV3i59ffYaFyg6rUSdxdZB1J90f7Umid\nbyyQuJIf2hC6TTdjvcMc7Jtgtm+KJ8c+wWT/OH2FPgICsrP801Zp1vnj6Uy8WN7HmGYyDZrKKc1S\ngK3sRF0mz4GeccZ7RjgxMsPFpWvM9E5xfvkSH6xe5XJ5jpXGmi1jIerUd2vRvNU0yva1N3v/zQWl\nLTfFNo+48SjbsSEl9XWTWM92T1jk+OgM073jHOobZ3ZgkgP94+zvHkXUkLguehqUlRaXsGOdree+\n+92ZeLG8T2lmXceNWaZiuaW4CEhADsNY1yj7ukaY7p/g4tJV3l+6zPurV7iweoW3F95jqbpCQxvN\n3LUJzUFR+59sPkLWlmxyOMk8bmeu+s2kTTL73CyQG+QxY2e6QtOBRNLWpI2f1AS6wy6GuweZ6Blj\numeMh4aOcWBggrHeEXqjIkHGs50K5KajSmufXio7Fy+W9yupR7wZ6tISk80e2EyYEDa74mjXMMNd\ngxwbneF6eZELS1d4o3+ay6tzXCvPM7e2QKmywho1CGyLVVIPvOBUKePuVTJTnjcIeeb/rSSxJWXQ\ntsNNJ9w6y6Ymuv2mPxhNWW46rdyjCBorJhEKJs9goZfZgUnGiqNMFPcx1TPOdP8E033jRBJixDTF\nL/0BaIYEbbJ9i2B+r5gdhxfL+xnJPmwILdrctmyimmDnBhn6TJG+YpGZ4iTPTD7CamOd+fINrq7O\nc31tkbNLF5grL7JQvcFiuUSpssx6bNPEiUmly5Z+zUpimt44SWfC4LzwmjRFNSvvm1qemQFZlUyb\nzoU42RR1mazzLr4RbJnaJIEAQ3dUYCDfy1TfOCOFQfYVhthXHGJf9zCHh6YZyPeQkxyB2oS9kgaj\nagJiMtd2G0MPXiA7Gi+WnruiFRGYtjoDQgL6oxx9/X3M9k9TS+pcWb7K3OoC1ys3uL5+g+vVEhfX\n5liuLLNSWWWlskq5sU4dcK7kNKLbxYHaVGappiW0GqXZFmj2MX3SbLFpkrFXXcxoQqJqk3YmhkAN\neclRzHczWhymp9BLf66H4UIfo/l+DvaOM9o9xEhxiL58L5GJkOaUxmxAZMYKzXTnPXseL5aeu0Dc\nrBTaGk2t9p1tZXWbkNmBgxwamKauDdZqZRbWS1xcuUJpfYVSdYVSdZVSfZX5SonV+hpr9TXW6+tU\nqhUq9QqJumzhYuMWbeHDjQKUMSLtX6udd23rFlkNDlQITEgURhSiAl35Ar35fophkd6oSE/UTX+u\nyFhxmP58LwP5Pka6+xnq6qMnKrqAHtM8U1umg1ZwZNMWtlj27HW8WHruinavdDqTJTv6mfp1bQhS\nTkJy+TyD+X4OD0xR1wbleoVSZZVSdZX58g1W6+us1ddZq61TrpVZra9RSar2r1GlmtSoJXWSJCHW\nxGZV0lYAt6GVxzMwhsAEhEFAzkQUTJ5iUKRgchSCPF25AsVcN335XopRN71RN/35Iv2FIsOFfiIi\new62XOKGuEdpnu9GObQzmLyT5uOIF0vPXbDlKGEbm2VTSUutCUJeQvK5PIO5fkBJsHUo60mD9XqF\ntWqZpeoq60mNSmzFshbXqCd14iSxLU6lKZhkhNI0xdIQupZk3uTpCbopBDm6ogI9+S66oy4KJmpz\nr4iAaMbts+UJZrzzkvXcZ0OmvFx+3PBi6bkLtvY042bzZN0ZqU/ZJiJuCVvb+2iNgeZMSDGfZyTf\nv8UxbmZNu/f81lvbLdsc1Ru3bhNA6723mtzyZptMuzIbjNTub/eC+XHCi6XnztmywbVF/KCLsWzr\nlGaDOm+z8zvtzG41krmVrt9s1rWSDoemMY8ZwcucR5q6siXMrRalT8/78cWLpeeOuaWIbYiF2SKC\ncBsNrg8nkrd78WbhOpvXb2n9TQ641Y+F5+PEbQuWeTwej8eLpcfj8WyLjumG70a1XPEBwx6PZ5v4\nlqXH4/Fsg45oWcZxTKlU2rHjBUFAPp8nn8/v2DE9Hs/epiPEstFoNMXS2ApZJEnSfF1EMMagqm3r\nb4c0K+m1d/FzuRwi4sXS4/Fsm44QS1WlUqkQhiH5fJ4gCKhWq9TrdQCiKCIIbKH5JEloNBpt790o\nsM0pb+496faq2hTO7D48Ho/ndnSEWIIVxEKhQBRFzXWqShAETbEUEaIootFoEIZhU/BSsQQrmGEY\nNkUx3S4IAiqVCuvr696x4/F47piOEcu0NQitbnO6TkSarUJjDGEYksvlAJqtR7DCWK/XCcOw2QIN\nw7A1Zzgjql4wPR7PndAxYhnHMfV6HWMMxhjiOCaOY5IkwRjTbF2m6xqNRnM5Fb4kSYjjuG18s1ar\nbep6Z7vjHo/Hsx06TizTrnetViOOY4BmyxJagpeKaRzHbV33MAwxxlCtVqlWq00hNcbQaDS8SHo8\nnruiY8QSrBBu5XipVqvUajXAtgpFhGq12ny9Xq83u9q5XI4kSajX622tSo/H4/kwSCeIiYisAG/v\nth13wQgwv9tG3CHe5p1jL9q9F22GD2f3QVUdvd1GndKyfFtVn9htI+4UEXlpr9ntbd459qLde9Fm\n2Bm7/XRHj8fj2QZeLD0ej2cbdIpYfmu3DbhL9qLd3uadYy/avRdthh2wuyMcPB6Px9PpdErL0uPx\neDqaXRdLEfllEXlbRN4VkW/stj0pIvJvRGRORF7PrBsSkT8WkTPucdCtFxH5F+4cfiYij+2SzdMi\n8kMROS0ib4jIr+4Ruwsi8mMRec3Z/U/c+hkRedHZ93siknPr8+75u+71Q7tht7MlEJFXReT7e8jm\n8yLycxH5qYi85NZ1+j0yICLfFZG3RORNEXlmx21Op/7txh8QAGeBWSAHvAY8sJs2ZWx7HngMeD2z\n7p8B33DL3wB+3S1/Afgf2HpVTwMv7pLN48BjbrkXeAd4YA/YLUCPW46AF509/xn4ilv/28Dfcct/\nF/htt/wV4Pd28T75B8B/BL7vnu8Fm88DIxvWdfo98rvA33bLOWBgp23elQ8rcwGeAf4o8/ybwDd3\n06YN9h3aIJZvA+NueRwbHwrwr4G/sdV2u2z/HwKf30t2A93AK8BT2CDjcOO9AvwR8IxbDt12sgu2\nTgF/CnwG+L77cna0ze74W4llx94jQD9wbuP12mmbd7sbPglcyDy/6NZ1KvtV9Ypbvgrsd8sddx6u\nm/cotpXW8Xa77uxPgTngj7E9jpKqpvNfs7Y17XavLwHDO2sxAL8J/BqQZqQepvNtBlvk/H+KyMsi\n8nW3rpPvkRngOvBv3ZDH74hIkR22ebfFcs+i9ierI0MJRKQH+K/A31fV5exrnWq3qsaq+gi2tfYk\ncGKXTbolIvJFYE5VX95tW+6C51T1MeAvAn9PRJ7PvtiB90iIHRL7V6r6KLCG7XY32Qmbd1ssLwHT\nmedTbl2nck1ExgHc45xb3zHnISIRVij/g6r+vlvd8XanqGoJ+CG2CzsgIumU3KxtTbvd6/3Awg6b\n+izwJRE5D3wH2xX/LTrbZgBU9ZJ7nAP+APvj1Mn3yEXgoqq+6J5/FyueO2rzbovlT4CjzoOYww58\nf2+XbboV3wO+zWffEQAAATdJREFU6pa/ih0TTNf/LeeFexpYynQPdgwREeDbwJuq+s8zL3W63aMi\nMuCWu7DjrG9iRfPLbrONdqfn82XgB65lsWOo6jdVdUpVD2Hv2x+o6q/QwTYDiEhRRHrTZeAXgdfp\n4HtEVa8CF0TkuFv1WeD0jtu8GwPMGwZpv4D12p4F/tFu25Ox6z8BV4A69pfta9gxpj8FzgB/Agy5\nbQX4l+4cfg48sUs2P4ftivwM+Kn7+8IesPth4FVn9+vAP3brZ4EfA+8C/wXIu/UF9/xd9/rsLt8r\nL9Dyhne0zc6+19zfG+l3bg/cI48AL7l75L8Bgztts5/B4/F4PNtgt7vhHo/HsyfwYunxeDzbwIul\nx+PxbAMvlh6Px7MNvFh6PB7PNvBi6fF4PNvAi6XH4/FsAy+WHo/Hsw3+P5cgPlrAb514AAAAAElF\nTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [] + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "Mp6cU7I0-r2h", + "colab_type": "text" + }, + "source": [ + "## Rotate\n", + "This operation rotates the given image by the angle (in radians) input by the user. " + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "9kxUES9sM8Jl", + "colab_type": "code", + "outputId": "f79b075f-a204-45f3-c5fe-e80cd6ae20ee", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 269 + } + }, + "source": [ + "rotate = tfa.image.rotate(google_img, np.pi/4)\n", + "_ = plt.imshow(rotate)" + ], + "execution_count": 7, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAUsAAAD8CAYAAAD+D4bnAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsvXucZGlZ5/l93vc9JyLyUre+0VY3\n0CB3Re1GYWFUVGBoUWAdXGXGEXWUGWVGHEdXUGdEZgcb5aO7+tnVxc+osJ+VcRkZUAQBERWUbrlI\nI01r3+iG5tLdVHdV3iLOOe/7PPvHeyIyKquqK6sqMyMiK779yc7IqMiME3He+J3nfa5iZsyZM2fO\nnIfHTfoA5syZM2cWmIvlnDlz5myDuVjOmTNnzjaYi+WcOXPmbIO5WM6ZM2fONpiL5Zw5c+Zsg10R\nSxF5voj8o4jcISKv2o3nmDNnzpy9RHY6z1JEPHAb8FzgXuAjwEvN7NM7+kRz5syZs4fshmX5DcAd\nZnaXmdXAfwNetAvPM2fOnDl7RtiFv3kU+NzYz/cCT3+4XxCReRnRFHDddddN+hDmzBgf+9jHJn0I\nO8GXzeyysz1oN8RyW4jIy4GXT+r555zMvOx1zvkiIpM+hAvlnu08aDfE8vPA1WM/X9XedxJm9kbg\njTC3LCfNXCjnXAhmth8E86zshs/yI8DjROQaESmB7wX+aBeeZ84OMBfKOTvBxbCOdtyyNLMoIv8W\neA/ggd8xs1t2+nnmXDgXwwKfs3fsdwtzx1OHzusg5tvwPWcazvvZMGD/fvT2LzMomB8zs6ed7UHz\nCp6LkFkQyiGzc6RzhszS+joX5mJ5kTFLC1nar7lizh6ztM62y1wsLyJmZgHbSd+yYs7Ioc/ZZGbW\n2zaZi+VFwsws3NZRaYAyF8xZZ2bW3TaYi+VFwEQXrDXbelhE842RUCra3mvtV96Ta/s1Z1bYL4I5\nF8t9ziQXapa6gnS2BxqMC6C2tx0O18rm8GHWiqXNBXOm2A+CORfLfcxkLUrQdnnlo3h4C9MTwPJj\nHQ7B8CiqDU1dAW3AxwJooGa+K581Zl0w52K5T5nowmz3zoqCRYSKSKQinvLQbHVGhIiJtnajQl1D\nMrx4uiGgCpiS2hQ+j0MSc8WcMWZZMCfWSGPO7jHZrTcEybcKDETw1gFRwGV19JAVNWEUmAQUxaHZ\nqrT2sWYwWAcHftCHbonrLACdbLV65mI5g8xqpc9cLPcZOy6UyljC41mem0g2KaUVRI+JyzppDqka\n6NUQHwJ3GZx4kCBd7nv96/j033yAS6/9WvRvb6TsOO4bCE9c6OFswEOxYMEJYfkQV/7+75GWHkED\nlArJgbcGpNjZ1z1nV5lFwZyL5T5iVyxKhSr0KekgD+e1MYXWQjQ/fFyF9lfwYRFCjb/5I/Dkx3Pn\n//FLpA9+irsfup9rv+IrCP3EVwv4T32S2CtRTVx+5BLY2KDxJcseDi4uEDcGUCyg5uhIPrbkwM+F\nciaZNcGci+U+YaeFcvjXxEc6BPK2mNNamKkB5wAPZXR5y/3gMfxShL/8EP/4zj/jS5/4KJeY49Eu\ncHCpx2IlXNE7TF31UU2YOBoVREpKZ6T+BuYUE8V5eGj9GAtVB2KDdIxgAgrljr7qOXvNLAnmXCz3\nAbshlA1KqUMLcdxya/2JLQnwBVCtQiOwXnHz//Ic7jnesNCBJy0uc4k5rugW9AaK9jxdb1S+Rh00\nlhCp8c5T4rO/0gLJG+oUp1AoaK2syzpHRIk2IIqjWxbMY5Szz6wI5lwsZ5yd33rneLQnZCtS3chn\naVrRT30W/KH23xp8fx02VuFDH+SvXvc6rjzU5RHBuOISz1odKWslaqReEDYWld5Sh8H6CtFFJHi8\nOryViBNqdXhxCOAsx3cKVTweCOiBI8AS3dQhBZgL5f5hFgRzLpYzzK4IpSkiASNvgcUbgyiUjeG6\nHRYMaGq48y7e/QMv4ZGdDpeWDXWZePzBg3SSQ9XRGCyKUeLR4Omb5h38Q+t0ARMPtWMoeNFBCgkM\nfJtv7k0JCl6Ehd4SxwoH9RppYTHHj+bsK6ZdMOdiOaPsjkWZSFJgKIKSu4wWEMA5geoB+OuP896f\n+nG+crnHdZd0kZV1iAUFy3SiJ6YK60Z6FMSYqL3gzBHUUaS8pW68EsWBOMQgicObUibHsLNpcjml\nPToIztE0DYupgINX4C2CbArtnP3DNAvmXCxnkF2JelsWHxPwKJJaMRKlW21w47/5QfqfuZMndXpc\nd+lhmqaBtYg0HZpeSXKKtwYrGiwZiYS6bP+pKEkc2kbJ10qALJ7DkkZgJJTOHGY50o1TBloTipJq\nbQNWHoDlJUwc0Jk3B96HTKtgzsVyxti1qHd7I0iTKxPFQdPn5p//CQ597GYe6Wu63pFSgwLee/Ae\nLcFcxAHeHAMnBPMkO9nqC5oFswoQUhblMjmSnFrjraKI5eh7av2lVd1HFpah48H1SOQt+nZzQOfM\nFtMomHOxnCF2Y+sdDQpxgIJroAb6fVhb5bZ//jwWO0t0UAocSbIAqiqqSggBsSx+3tyoFFHs9Ntj\nbzpSZ2+KPexnQfFtfXkpiVAEym45Vv3DmNJf0JswZ0qZNsGci+WMsONCaWASCRJITcRrH+64ndt+\n9Ic5uLiIa9ZYZAGvJd6nUecfAOcczm0KopgbJRQV8ex+RAcj4X04vDpYLKnrmqSKkwKkzfmELJIu\nMiDRmW/J9yXTJJhzsZwBdsdHCZoUbw4f+/zxdz2Pr1xf57AVLGCsNYLgcSS22z9yu03Ttvs4Z6Ab\nA5rYp+iU2EMncmJnaNu2aQBzFH4/GZeK4fbR67lwpkUw5+HEKWd3mmIo5hp8fwDHv8wd138918bI\nI73DgiN6oyg9RaBtarH3OHM0Pm/VQ1lkazb47MeMEYdvG1y6nBO6jxCaeb/OLUxDt6L9tcr2Gbvh\no8x/MSFNnw//9E/y2H/8KMtlD4nQNwddpV8NKHRymYzOHCo5uCNo/rkR1hYCB53iXPtKguwjk1JH\nGQmIawWTh6/Hv8iYtIU5PxMXEYZDtI8cf5B7fvpnecLtn8CHRNCanjmci5AKgk1QKNvvyWnb1k1Z\nKjrEuiYeXwPpIBrxVPtIKCGhmFSjGRpGQRbQSR/ZdDFJC3NuWU4hu7IgBhWy8WVuefELOZqE5cIz\n6IBYB7EuyUe8LuBtM41oLzGB6JWQ2tlkoogprkloalheOsiXOyWsP0g6dHR/VfAYeAvZSSsRLCDk\nIJiTsfOxjy4OF8KkLMy5ZTll7IZQJoAP/wV3vvQlXBmEplSqQlBJOQndaHMWdWI+SjFww4ygViBM\nwIkQNVHXNRprkN5ZBlTMDgaoRnLaFvm7pNE/OhwNDSY6F8otTMLCnFuWU8TON+6NoMYdP/x9HLz3\ncyw6paKG0CXnMTpCs4iTirbj2UTx6rJmmGPoMtUAqTEgcTABRaCr/VZcepM61B1BTfESxt77DhaP\nEwKgnXwBEahxCH3KGX+9O81eW5hzy3JK2HGhbBrY2OBDz3smh++9B9c0CBHxeYvXSYpPHZBqqobL\nDo/Dq8uiiSMFRx0jG5LAKnAFkc7MuvMMSKY5MUtgAKxqLpxac4dotEalDwZBfduzs0dNNY+Sb2Ev\nLcy5WO5HBn1YOcHNL/4WnuiV1ERSoYgZ3hy9xhF9mtiW+2yoKK51DTjL9eniEpdLCfUw/d0hEc4+\nZ3f6UBgJZU3+esF/uo3rf/bvMQdrbpkkQi1NjownpQA8HdIo73U6z90k2CvBnG/DJ8yOn+j+Ou/4\nJ9fy1Zf1uNQCSQvK0kjUiHYxhI2yoowdVGBaP3RZMB0ntOZIt6AJXaqNPp1uJ1ucorl+3cEGfRZm\nZIs6blGuAr/2prt4760VvcNX4VLge1/zRb782Vv4/pc+h3/3nIoGxXmHb90mSEFEc7MTYG7vZPZi\nSz5/pyfIjgvl+gq883/w9UcPItHhQoeOE9RqREsMiKGiExml5UwzzkDw1HWNiFAUBYw+EDnoYdLQ\nYTZm8GwVyl9981285x+yUIoKMRkbZZfDj/sG3vqX9/MvfvmzJAPTRNSESc6TDepIOOwcqqsuBnbb\nwpyL5YQ41xN71kevH2f19a/lvje/kSIVeEo6TaJhgIlhosRQU0SHiZuJj5gKLHqBGPHasOYacImU\najyuDR4XU1/Bs9VHWZOF8n2frugdPApJMQ1IESkIqHk6i0vcV13Gn96cMPEE5xEaUnvmPA6haH2Y\ns3A294bdFMy5WE6A8zmhwqZgqm75cKw8yG2v/DdsfOTDNP0B1UafMjYkVwOGmEdFR00uZuOjlcsd\nVRSKDnWTiGUHmoQP2UrOjTQmfJhnwWij3mNCWQHvvbWie+goooJpgNAw9MVCu60sSt7w+5/iV97+\neSoc1ngCFeZAURIgBOY+zJPZLcGc8qU2Z8j46XfObS6IB+/nEz/0z1n6zG3EusJjBAEfWo+WCc4U\nr9NtfY3jzGGieWAZirfsr+u2PjvSBtgA88xE/uFWobz+VbfSO3gUUcPUIQWY5Y7y4343M6Nz5Am8\n/xM1L/mFD1EXAVKBtOJLK5jYXDC3shuCORfLPWbbJ9FOc9MMS7kEThBSSnz4pS/m6PH7KZwnYBAb\n/Kh9mkPFITYbA2OH/S1VlORygrwYdKSg21lgo66hexBcSZDu1OtkIluV40L5/FffSufwNbgkkEqk\nUMwsR/1bhoI5Es7uFfSLx/Dsn/wgq64dJMeYYApjgjlnyE4L5lws9wgzO7eTJ6My4c27RPIHyAzu\n+Qx/8y3P4AmxnxtNpISYI/hNYbS2YW+agWCOSa4Ht7HO6WLQDCqqqkJV2XDA8WOk8Z6WU0qy3LwY\ncawCN7zpLp7/6mxR+thgGtBQIyonCeWQrZFd8wdYPPIU/tkNx1gl56CaRjyu7Q9KK5hzxtlJwZzu\nFbdP2JETNrQqAT53Nx/8gZfy1aXH8JgZ3hWbHzBzWHtq/XRr5Ihhx/Vhl3UxhzpH0e2QMJpYsdzp\nQVngUwPEyR7wGThTMOe9t1YsHjiKU8PMoaFuR/xu9yPoMF8S1fP8n7uTFQFxAbM0srDTFKeCTZKd\nEsyznikR+R0RuV9EPjV23xEReZ+I3N5+P9zeLyLy6yJyh4h8UkSu3ZGjnGEu9EQpYFiul3YO7v8C\nf/Pyl/FkrzQpojhw4aTnsdaSnERDjJ3AmctpQwbROXxZ5PehXsuF7F6Y1mz00wVz3vfpHMxBBUsl\nLug5CuUQh7hAZ+ESXvCf7mUF2sFtw7+kbbR8Lphb2QnB3M7Z+j3g+VvuexXwfjN7HPD+9meA64HH\ntV8vB37zgo/wIseR/ZMOgRPH+NiP/hCPTxXJDBNp64rzQjBhtI090xycWUBF8erw2o7KdcogVRyS\nLqgBgX4jWS+n5IIwdJmcLpizcPAoLglmDlcoZhcSxneIKwmdQ3zHL97PKjk/U4jthtzNBfMMXKhg\nnvWMmdlfAQ9uuftFwJva228CXjx2/5stcyNwSESuvKAjnGEu9OQMBxdaqwi3vOJf8cj+ClET0grl\n6LnGtmCzLJRDVJQiZcEMahxYWmA1DaA0QCmL0OblTPpIM8rpgznFkWuyC0UDLgzXxIWfHyce5xZ4\n0X/+Muu+DQjNBfOsXMhn8nzP2hVm9sX29peAK9rbR4HPjT3u3va+UxCRl4vIR0Xko+d5DFPLOQdz\nzoLbWKd+/X/m8mP3YTGNhoWNT2oRyyI5C0K53SNspMNqMqyzQL/W3C2pDe5EGF1NBvR37VjPxshH\nuSWYM0wPKmI1CubslFACIIYPDvOB77rhOCsEICBEvDkc4STBnDfg2OR8P5sXfOYsP/M5P7uZvdHM\nnmZmT7vQY5gmdqPWm3e8nQc/9C7qepCDOHryc8ySb3LUjuw088JPQhSRhoNecNUawSLBGrDc/9FD\n66NQuhOsC9/qoxyWMI623hpQH8/TR3l2ggSaZLzgNQ+wku8Bie2acJuFDOhcMMc4n8/p+Z69+4bb\n6/b7/e39nweuHnvcVe19FwU7LpRVBXffzT2/8+vQ9HPbMpWpmHR3PphAdODIKULVGTJdHBFvDUEj\ntVdUQFyJHyikbFkqrr1ET86S3hr1HgZzegePnrT13i2hzDgKV1B2i1MEE9qdhoInnDTOeM65f17P\n9wz+EfCy9vbLgHeM3f/9bVT8GcCJse36nHNl/QT3/OS/ZklSTmK2op142G6tZkwzk0CRcoOMJA4T\nKNzJTTBMYL0eULsOK2VBlEAjHR6qahrncp9O8gwey9GvPSe3Ij41mPP8V99K99CmRemKuLNb7zPi\nwAKh9Fz/n+5jBUjJ4+lTbzq+R4ON52xyLoK5ndShtwAfBp4gIveKyL8CbgCeKyK3A89pfwZ4F3AX\ncAfw28CPnduhzy670eX8ln/2nSzXG6BKiAvErqKuodDNJT8rW3CH0m27HTW+HUSWHKV6iripeBsx\nsrR4CBqj1MRCqghas6Q1TV2DKyDm1JuJXiu2BHOuf9WtlIevGeVR5mDOXhavO5yUlN2C5/7s5+m7\n/O6U9IlDwQQ8cVoSCGYOmYZ5vCKz8pE/lV15/7ThI8/5Zq6RNVIyvCxQl4kiJbxBlM7UNu7dShuK\nIUmg9g5vNSlWlFIQpIBeiU+RwaCicJ4meBYbz4mNFcrlDifW+/QOXMKd/cASyrXvfCssXXGWZ90d\ntjbFGLZZe9+ns49yuPXeTR/l2cnNR4QBb//pyzngGG3JrbUsA007PXImSut3HRH52HZiJ3OxvAB2\na7jYAz/xA3T+/hYSFViH6AV1SpEUo2zzEHf8qXccE1CrcdJt8yUjDmWQahaqAt8tacQQgwfrhpgC\nn9E+lx99FE/9jufRfcGLoFOA68J6Dcs9WDw0sdez1Ud5w5vafpTjwZxzrszZebQtSlj5zI188P9+\nHkGhcCAWSRJIQEmF0QHmgjkXy11mV943A97/Vm766Vdy9aVX0lFPkhzgyPXDDpWYAz0zQApGN3mS\nU5I6xJQYI0VxENYjdx827usP+KpnfhuP+vGXwZGvgFBQS6DEQSo43tQsd0t8E/MnfgIidKY2a8OE\n893Io7xQFOi6NdyxT/D//fLzKCHbkq1g5lSnLJhzsZyL5a6y4++bAWt380cveiHPKBdJjWA+4kxx\nFvDq2v6Ubib8lIrRmNIFajzeHFIE7tuo8E97Ok9+7augdMACdDo5f1IdKh7XToe1DqwDXZQwyag3\nucpyazBn3KLMwZzparCpAP3P84yr1vmPL/9aOtD2KYrQvqNCA1Zc1ObldsVy3qbkPNgVodTjPPAn\n7+bJsSB5IWok+EipgDrM1Xk9T7lQ5vHXRpKEBWEQB4TlS7m7CDz9x17J5a6Ab3oO5vLICBqHVjWu\nk31oTsirUiIVSpeAn9Br2WyNpyRxJ1mU3cPX4GKzQyWMu4MDtHeUv7rjDt7ztxXfea0nz9kNOBRt\n631EYu5YdBEL5naYW5bnwE6/V0bu2Vib52Pf8wIe+eUHwBlLKeBNqUND7ZUi9lo/1HSTvFAKWOFZ\nWa34QqE8623vz1bjcg+Sh2GvTVHiWA2SYQiGQxn2DA8TvJafKZgz7B6Uu0CVU+GjPBsKqHZ52885\nLokVCcGFMvuRJTffKNu2bxejYG7XspzeMzxl7PxFRVEaNAXKqka+cCdeEkupJCj0i0gVwKdeO4Vx\nMmx3gZjk0bW1Oe5bjVzz5j/gWW//M+zAEVg6gFFQu0S0ZtS8NtDgifikBN0cvuVxhAn2Zjydj/LX\n3nTXyQnnMyKU0A6rcA3/4Tfu5QE6+OBQq0HyfHYP1KSTeonOOZXpPstTwm5Y30rExw5OgfU1HtdZ\noKCHIqx0a6ID0YJJjgsYXxwPlwBvAhoCn2kqPnLoMp78nhvh6kfD0iVtaSMklGQRP0pCDxhQ4cDn\nVmyGQ3AT23bDmYM5W0sYXTHJ9KDtM+xT4Eh8YXWJm+6GvgacOCL5wuVToGh7W805M/Nt+DbY6fco\nkgixlQRr+MCzn8BTO4+gcUL0FWXKbvhpaYphkictiggu2ZZ/U1bKDvcde5Bn/f5/h8c8iSgOTwUU\nY9tsJTd6OJkaKDWXQQJMeqyOQbs9PTmYM771du0oiFkQynHy+XuI/+fnj3IpYNQUhIt2+z1kvg3f\nIXbeTwlhkIVyECIfeelzeWLZwxlEX9GLjqA6NWt33KJMKY16ZwIgyl1Nw50Lh3nW+2+Cx2ahDEQk\ndhB1oNkXJqcRSoAS2tpl8EzudQ9LGE/XZq176JrNxr17VsJ44ZwymsIM9Yf53p+7lQ2g0JLa0kUt\nlOfC3LI8A7vxvpi1fSjNUKn5xCt+lEffehtB26YSKnirwUluFDEhTLJ1NS6Uw+MWy9Ml11bv44uH\nr+AZ77gRNGLd9nGQm134YQBre1ZLnHR60JZgzq+1CeeLB9oO51OScH6+DM+fmeVZTqu38ZbXfS2X\nTfrApoC5ZXkB7N4FxAOKiOHX15AbP85SrVQugRrmBqifvFCOf0fb2T+aPYoiwoZzXPl9/5pnvPNG\nCJA6WezyrwTM0/ZSZNtWy6SE8mwzc0YljDMslLC5pr33pJSQxUfxlr+c8EHNGLN55neR3an1ZrNZ\nLQ30+9z9y7/IkUXPg91ELZEYqpx3OGE/5Uk2vhrOOYJzeBFQ5ZgpV/9fv4X86E+RPJgkvECBw9qg\njTIchzHdy+tMwZz33rplZs6MBHPOhpmRUgLn6Xrj3X91L1+a9EHNELN99meAGsBpG7iossdPj/GZ\nWz/KwaLEI1lmNExFQGfcohQRLKU2Ri3c0ySe8N/fDkcfjfmAjyDtxSVhY0KrMyGUcPqZOeNTGKc1\n4fx8EBG8FygaHmyWqTpX8eG/0ymdkzl9zP4K2EF2I5hTWgWa8qRCPEIgvudvufbzRq+t8RaVqRJK\ns9x0UqzdehvcXlVc96fvhQOXU3cPUMYGXCS5YV8habufZ0tt2jnTzJzO4Ws2o95hNqLe20URonpc\nbFiUASElfu3PHRsAaTDpw5t65gEedimYAyQigdBW4gKWIEb+/lueyXJaY7Fcwgo/FbXeJwmlGd57\nRLO1uPhTP0vn2c+lKZdxARwNkH2rOSUoEazTiuX0C8uZgjnT1j1oJ8kD4Dy1c5goTtZYTB0q9ax/\n+Wb+4FefyVco+IuwAHoe4NkmuyWUYm3QQocF+EpqBpx43//gqktKDh84jAvTJZSq8RShvLVWOt/6\nfOgsU3jINpgiY6WIoW31Ne3L6UzBnGHUe7j13m9COSS5hCchgNJjLTREi4TLnsIHb0o0YVqnsU8H\n+2s1nCO7JZQwFKCEuWxhNk2DN+Mjr/sVBqsb1HE6JqKMW5TOOZxzI6H8hzrxrD/7ELFbghq1rGAk\nRLM4GprLEpWZyNU7XTBnawmjC7ovhXLY4k8MhIRTB9pDvRGD8Tt/3bDG8FU3Ez3WaWV/rYhJYydr\nhlGgVhObmoICjq3wpE5BkM4Z/8Reky1bzcEcVZzlEbu3VYlnvucvoOtzSaYTSgqCLoBBjW42upjy\nVXSmmTnDfpTT3GZtp3EmeHU48vjeYMsE89AMeOFP3cI6kK9+1WQPdArZv6viLOyar3aLYHoJlMUC\nOMc7X3I9C2438zjPDzGHqeIRvDjuqo1nvPsDsFgScblaWxh11kbaypvpehkPz2mCOcWRa7Y07t3f\nQgmQnI12E0XKe5sydTEc3YNHeOM7HiDRyfWeU7H3mR7298o4DcMqhh37e2SrxSCrZLsQRzfNIQk4\nfi9fs9Qltc89TYJpZjgJ4AIfP3IN173jPXCgQySg6ojOSE4RlOS0TQV1J73eaWTooxQDxLEK/NKb\n7+L6V+XGvUWsRsGc/RT13g4q0C+UGCqQBrFA0znIWz6ZiIBREmujZrauibvJxbM62COB2vIUVmf/\nz00v/nYWmjgqN5sWRuVvIjQ+8K1v+GU4lIcQmBV4l6coehTD4cd6UE47p2uzNkw4H0W9JzpcbHI4\nczm3lxpzFY5ILyrLnQN8ZpBbj4ZSKOfW5YiLZoXsllBmIRk+CSdZWmaGlAX8+dtZqGtO+B5mMlWC\nORRKE8elL/if4ZGPAtfFCDjJC8Qbo/Zps2JmbI16DytzTko4D1yUQjmOWCCJICRiXaF14p47wBQS\niYRDmZnTvqtcvKtkhxlJn419E4GmT7O6wZULB+nFBkmaSwenCGeOO8oFih/5IUDpw6hbuajmfPOh\nUE7XoZ+W0wVzcvegsRLGfZZwfr44cxglq1FwriT4gl966xdQA598K5WRRCRe5FbmRbFS9tQ/KIzq\no/PScnzx13+NhhrfEdTXCLFN4J483uCzdeR/+oWfge4BjGxtFZBnkxuMlsmUC6W1X1uDOde/6lbK\nw9fsyxLGnSAo+NChKTzEhOt2+Z7X/jWIQ9Th1bV5D+6iLo3c16tlp4M5Z2VMTLzlL/7xZi4rEwsd\nB0kR16OWLma9iQtmbRV9b3ztO94K134jpNAOsXLt1L/su5oFhk0xThfMWRgP5vjZ6Ue5l5RiOKDy\nfVy1RL+4ln/5htupHSSnkBzBLu4Jh/t2xUw22qx5nkk8zl/+wL/geCroxy5qHdAO0UdE1tuywcng\nzFF0So6+8Htg4TLwvVEPylyg6Umz46I848yc8a33xRrM2S4OCK7EFvu4IvLAsbtzoroGkh/rS9rm\n5l5s7MtVM0mhbGhIaE7pdYsIXYIFnBlOGlJYpxPzoCi1ySSnO3OIeG5eX4Xv+0EG3UWSgEmD4EgE\nQHPi8pQvkdOVMJ4yM6fdes+F8uw4C6QI6jqsFY/Jxa2uwZHf65qqFc2L7328+F7xbqI1QZXQeIro\nYKA85XAA10fdgCI5yuTw5kgyubdeLeI7iaue8zysWMwRbyqUPEzMj/oITX+akHL6YM6ohHGUcD7f\nem8XT0G9sU63vJy3/SVUqWjLJKGkOOvv71f23eqZnFWp5FybDgTBO4XP3kISpfIlWNkWYitpQr5K\nE8V3HaINdz4w4Emvej3S61LaZnXOpmvAMc3LY1gMcLpgTvfQNVtKGOdCea5IUSI43vzWj9J4MIs0\nTeJifh/3zSvf82DOSU8OCYdRIMSsiXGFd77qZ2i0wDcBR+6GPqmREdHVqDfqDUjW5Smv+gmyyQWI\nIhGMhpw1Gpj2paGWa5vHgzmJNiPUAAAgAElEQVTPf3UO5vjYjAVz5lHv88HM0EI4cNUTeO5PfpKY\nEkXhUb34fJVD9sUqmmgwZ5Sv0qBEKrLPh6bgwD0PsBRKlD5ObWIjI0yAZBRV7jl58/H76Tz3W8EL\nKQDqsBBJzMYoiNO1WTupe9AM9aMsbGHSh3BahsUKSR2aKooi7zzmYjnDTLzGWsCctvmJjk6E0K+h\nOs6TLnes+RorBJNJ5uAovbSADxGh4qqnPw07cAm0M3OGKUwl050ndLaZOeNb71kQStggpj6EzayI\nia9nNkfomhmdULF06aNZ0SyU3k/3GtlNpn01zQSCI+DzNjZA7NW877nPZrFRvAY6cdLTGgP9TkNv\nEU4UXZ78q79NWhVqwLXJQWJh6q1K2MbMnBnpHpQMzBVYUVHVq5xIaSSUkxbM4fMnMwYxURQlv/un\n94+szYuV6V5RZ2HSi+pkHA2OPhA2VrFHHKUflvDNZLYtw2avQR3B+nS0olnp8/gffw0UBzBf4kQR\nDGR7s70nSeJhZubMYgmj9iD2KHSZVC9z5YEOBdNjYRbqiHUNolRW0MTDrNUyO4m3u8AMrKpTmWgw\n52EoKegBfPZODq4cR7UG9n7he1MQJfmK6JQoPTbSIl8sjsAzr4OuUiwYgen3UULroxwL5tzwprtG\n6UGjYE6oZ8aiTAJSHKcsTjBY/wLv/sWDvPnfFdQPfI6En7j1VoeKOiQ6xQFgmWZQ8db338wD7WFF\nLs7xE9O9sk7DNIrkKTzysTymLCf29LVX+gHKZgGvAa+QUp/LnvPNsLwA6mEGfHpnCuZs7R40K8Ec\nWCG4dcQ8pQmrJ1Z423+5miXgsNR8zWONYA/QSDM5wTTBq8cZqF/HXEW3Jyxe+VUUIQv9RlqnuQhN\nzLOuLhG5WkQ+ICKfFpFbROSV7f1HROR9InJ7+/1we7+IyK+LyB0i8kkRuXanDnYmhDJFPvBdL2Ct\nLEaRwxD2tqK2SD2CKkiFugFRV4mlJ5UL4BYZ7rmn/d0808ycrd2DZkIorUFYAO1RKKwfH/DO//JY\nFof/TIc3/NhT0QfupKhrZFKjaSXPXxJrXTlWIAm0ibz0lTeBQhmNMO1+m11gOyssAv/BzJ4MPAN4\nhYg8GXgV8H4zexzw/vZngOuBx7VfLwd+c8ePepqpNlhYPEJZDXBeSSmR0t5uWpJrKDRvxas60ikO\nUfbhkh95BdHJWDf36ewhM8zGetiZObPUPciypZgIJAfrqyv88Q2P4BBQEnMHekAivO1/fzYRR2OB\nOk5ms+tGKW4O9R4NDt9t0E7iyw665bCyS6f/iruDnHWVmdkXzezj7e1V4FbgKPAi4E3tw94EvLi9\n/SLgzZa5ETgkIlde6IFOv1UZ81zw1OfxVYPUNWXwFBNItZC2rVoKPXyxSNMkiu9+CXjNi1xocz4d\niTh16105fTDn1Jk5sxDMUWiFElGqlRWefHVkoW1EIcllHzOgBSwA3/TEDbxTfOEn1K4ir40yBVx0\n7cWpgy1cxkAhykK7ZhJIw8XSVOOc9oci8mjg64CbgCvM7IvtP30JuKK9fRT43Niv3dve98Wx+xCR\nl5Mtz4dl+kVyeHE1xAK4JQZxA+t16SejY5HkQXRva2oVB7FBxNGzFZZ+8EegWCChNDi6rWA6yY8e\nJcxPcHd1ch5lDuYMR0H0Dh3Fjc3MEZuNrTciKAEvxsbKCn/yuq/gAHlMRwTEO3yC4NdBSkgFP//d\nV/O//VHkb25bxSL53Mjerh8TqH3EAynUuI3AwtJRbr8TLnsMlEI+JtG2ei17wffz5nzbq01EloA/\nBH7CzFbG/82yop2TqpnZG83saWb2tId5zLn8yYmQ2i+xvL295d+/HC8RcYbTEnQBMcHZZDoBelOq\nsABrEYhYawUMN3ijZhmiEzcQtvoof/XNd53cPWiWZua0W+9NoTxxklBCtlQM2rkkPRIFOOgoPPuJ\ngfWH7su/j0fi5D4LogWUjo044Bd/5x8xD0HqUYG+t8jFMAtyWytORAqyUP6/Zva29u77htvr9vv9\n7f2fB64e+/Wr2vvOiVkQyrxWIsEAq1h/6Avc+ZlPk7qOhbVIoYoJJHHoBBv9akyw4EEcirWjx4av\nQdvu4m6y+mOn+ii3ljDOzMyc1qIcbr1PFUplWA6QBXPoEoEoiopy7RPgcr9Kocv0zUi+JE6oCsyr\nIyRP2Vki9ZZ5AKjFMAfmXfsYprz+68LZTjRcgP8K3Gpmvzr2T38EvKy9/TLgHWP3f38bFX8GcGJs\nu75/aCOGpTnQBhwsLi/xDd92PZcWCwRx9LVP9BVeHZO67jZJ+TtKWLgEolDg6SCtX7P1laETHWvb\np4ZWtLfOzJm57kFjFuXQR3kmoRy+3cNgiaNtyOIcyxjqwYuxbEZhESGRXL2Hr0UQk3zBDyArfdxq\nzb33QEGHitQKPFN/WnaC7bzEZwH/EvhWEflE+/XtwA3Ac0XkduA57c8A7wLuAu4Afhv4sXM9qKm3\nKodOh9FqF7BFSAt84c//gtWB4MMCqVcwqVVkoogzDgTPgabKe7syV3/74dvb+ivdBIcFaIQOBY2u\nQAP3n4DvePUnT5qZI97IWVjT/ok8NZjzlKNyGqE8XZ9QR6DBqKipiQPhD1/z9awXD1IFRV1DJ/o2\nrWcPLrwmiLmRb00B6ZaUl17Go6+E9bU1uppHTfiLZNjEWV+lmX2IM9sc33aaxxvwivM5mKkXSfK2\ntRFtAyN5ix0kIAncyv100gqJwP0llBpaC25vKX1JXyoK9WgDnauupsZToggK4lrfpSITW+hKIuF9\nASo4fwDzkUe4gNJnEJWDVqC+GZVuTtMI4VM4SzAnE84yILOgq6AC1gWfYGn1GHXvESSB5BPRlXiL\nhOSJfncWV5Z0h7mmdR8p4rpYE7G64Z/+yF9w4++9gCT7P6gzztRcqmdBKLG8kErINdWjODg4D1Sr\nrPcLuvUiS7FLSHv/9jpzrK/32agT640Sy5Kve9Mb8+IfC+4k3ASFEjDNkVaJozEFiQAO3v/apxNW\n7iD5U32UU7lOthHMsS1b7zPicg9pjX3UwfFjX8JrpEiBQoVeXdB4JTmGXVJ2BR0TShM3yhe2tEbv\nyJWckP3vo9zKVIjlddddN+lDOAccEPAmo2hmGIbED3S5ZjFQyTqN33sf5fCz05XEgmvo+cjt9z0A\nd97T1oHnAx0WO04KA5IEhlIYx4Jf4qBH5MbXPp547B/Q09RKT5VgbjOYA9uzwEZBw1CQtObt/+c3\nMZDAACFJaEWyzOGg3dq2SMSZ4oZjkM1hBkUh9JaWkXRis1P9RcRUiOXMMLyajs3SDrQpN9Lwxd/4\nr6ylGuc9lbRX/z0i58UBoljXoGs0KFdfdgk87qntMRcMZXKSW6dhPCC2UeBAQyK/lwXaNmr3/Mmv\nXEs8divKqa3BpkIwzyOY87B/LofacOR2eYUriUB9/834MMCVCYgoQrOLM5xymlsudXRaIOZxEnCh\niyW47BFPYqVufd9TcBr2irlYngcmtH4/YRh3WHWejZX7OLR4EAj42tHbI/vNtb7IoDntJIlDk6df\nNfz1iTXYWKdSMKZnhoowvPCMbbFxiDUghgVY1MQ73zCtgqmjEsZzD+aciSp/M0AdSJ8eEPylpHqZ\n1PTxrNJJtgdn0SGtr3hY/qgx0o/Qr4V/uJNcn3kRdU6fjk/OjCHQ+i8DtH62Do67/+5O0uoGC76k\n9AUpWR5Qv4sM8zgByuQIyRGdUGwIV8oyV3/1V0EoKJ0hTC46fzo2Sy87OBoSDSYdsgVc4J2yqIkP\nvv6pxGO30tcwHYJpucQvtT7KoUX5W6+4cksw5xyEUvPvgKLDNC7zLAKXLR/j8HJNsILE8m66Kk/C\nRBESIhUiTU4U8l02KHn7++4iEjDvyOUO+5/p+eTMGpLfvLLNUYxpnYe+dBvLBwMnWGWjrEguLyFn\nYaw5wc6Ru8IE+gE2ghvdJxpY7Cyz0u3Rv/c2KJneKDKAgFhB9qoOx/BmvFOiCB98/VPxD31q8hbm\nKVHvCwjmAKNmFAKMj5kVQBID4Lde80/YaAKN+N3zU54OMUwUEyUpeARvyoFykXs/+/mR/Qzj7qn9\ny1wsLwCxPBExbgxY8CWXLx+gHvQxS7lmts1j1F3UqdA2xnVA2Xrca1H6WtFdXIBQgO/t3gHsFAKh\nLQmNI8Fs55iPCeZEt+RbfJQXGswZ/VmpRjmvRmqzFiLg6ACNgWx8nsl8XF2+MFjEW8TZgCCRGIcd\nUbVNH9r/2/G5WJ4vRs5B1g6h14VY0ElLNLWjU3nKQQfDUSpA3JVyR22v+mWCblTEIPqIBcUL9FZW\n+Jaf/o+Aw5rBKHVoatmGYE7Mh7kl6n2hwZxNXPsaN4VWhwWp5ilizaKA9L+E1PWupgud8QjN4aXE\ntEClC6HEhWWoNvI2fbiupnjzshPMxfJ8sRx9NqdsiFKHhmbBSMsHMLdASIY0CXa5gYZCG9RRBkW2\nYr0ZiYqHJPHHv/CLgDJwDf026jzVjAnm5l2bgjmZoI+eEvW+8GDOODmQIgaeAocjkV0sLpT0gLXu\n5URLI//0XmMC6oX1ZKz2NygOXUblSyodBg33v5Ts/1e40xhEsj/SE4GGHkrZVCw3hqw/hLmafpGg\n8O1WfHfx6nAWSJIjl0UUtBC0PcYUwPU79Ojglen3LQkEArH9L981oaCPNW1z3jMHc+xcgzlnQmgF\nMyE0NC7XzAM89YoHWVooEaAT99Z1Oc5CIXQ7StcrLgRK193z5taTYi6W58BmSXWBJ+R8OOvkrVlY\nYCUmknicc/QQ9jIvfTgKQMyBdNgYVCT6fPsbXodH6SyWuaZYmH6xbDl1S57Zs6DP2Nb7TMGc8/FR\nPiyiCB6PJxAQzX//UY99KinWmCgxVCSnE+lk5QDUMUjLOEBTmu7g4Q4yF8vzJU9tAKCqFTSyvt4n\naUEymdiVHxQq43DnECEV/On/+mpqCmqBJAmTiDmdfv8lTDbos41gzrlFvbdLrpghudyHSKFs4A//\n5G/zYQFJ5Byvd7r95hvb8IkageNfujmX/Y695zHu7wSiuVieA8Nl4Wgwpzk1SJRumdM/HvWMZ7K0\nABISUpR77ot35ICPeGEwGHCos8Dzb7iBUhtKy6kfm23BZuTUTyLos81gTnt4O4+AeSVRQcid9n/m\nB78RizlRXLTTTmDczjlsm6dYcRYhVNT1R7cfDp+Myx/xVAaAc5vHsNeD+faaGfnETBECSkHC0eCo\nSXgsF4h/5h9wVaRDl9DAhu3dlTZ3zHQ0DsxXFAsFa15517//CbCCRoa7bzc7QjlkT4M+Osqj3J1g\nzvYQHAUFkHDW8Bv/7SacbeRGyEOfyzYxSe02KCe9n+YBbTdNUErUVZwxzTxUbJBIvQM4cv/RccHc\nz1wcr3KH8TiCQjc6PB6joCbwxfU1HixL1mIkNYmF6Cn3oBxs2Kkrulwl58xhA2V1dZVrv+slsPYg\nOeUbZvaUt4IZxz7EQ8EUJzskmFkMtwrlL4+COXsjlEMEB0lofMG//eGnU3Q9Sbc/IlclohIBJbkK\ndVX78xgm6CjCvrlWo5VgJ5frRhKqIM5TVRVNBdCQZsGlswPM6CdnCnCAz43FJEFZe1ylLMc1FtyA\nVCa0NPpheEXfnbfaETHR0XIVdfRcQKtERxa455abx9KXZnybNCaY41tyRzolSn7OgtmWMOppot6H\nz7eEcSfwOQK+sQpVKih9uY1fapPadRl0EaUA67Rb8TJ/1w65ZFNAGoQKSd3cZyCFnAFgHnXrIDWV\nQBl7dKuSkoogkYUAUdsS2mZX34WpYC6WF4KQpwy6iJUJDhWsrK8AjuiV6BVnea7MbrQ+MBlWB+nI\nHSUoNImy7BI7gaVj90Fa2/Hnnhi7EPSxsRLGvQ3mnJlhOo6q4mPkd9/yaRKdbfymgkREO7nNGsNy\nW9qfYxZHqcASSD+3YjOXB5NJn7b1LyJGHnXbXu5djfgBYosETWgDwUGigRCJWu3SuzEdzMXyAqhH\njv6AWIFbPEyvvAJrHJoCnaYczVeRXWg1IJbzKsXcyIVlArUl8NANjqY2cJ2ZSRfaFjsY9LFttllr\nn3bP8O28eeccGyHw9V/dY6HsML5VNrMt4t8Ku/qcgiR9cCsYFUhqxywlRFZR6yNiiOSiRXGKhgiS\nAEX8Bs4KxBYBEL9B0/sUG4ufoundy/HFt0I34jAKPCQIbjtiPrvM+L5skuTeiybQ4KiA5lCPUFVY\nY3gHcWODsrPbw8rc2P8zqQCtB5AkV4JUa2CXgOyjSovhllxO9mEaTbslh3e+4Vq+46c+TrjkSTjZ\nKpLZcjInmAUcyuDECk+5au+DOQ9LO9L9trvuY73zCGqt6bju6LVsXgg032fd/JP0cW6AaRe1HkiT\ne1O6QGIN8QVqXXDraOxgvg+yRlPeCdLgtUd15C245lGI/xxp8eP41COhbCzdTRkvI/FyChrMCiTs\nk3X1MMzF8rxxbU2sUjYOLZQnPP5xPHjXn3Bl7xLWm0RoKrSzuKtHoTDqgBQUom/vK4XghMsHBouH\nSS6gKVL4fbSoxwQztEt5KJjjQZ/TC6aiYtmfK5tC+foJBXNOh5FbRvZKWOORpJQoJbXNpk/OcUwW\nEFGcxPbSXKD0MErwG9C7HapHE90A8RskrZGwinbvJLn7cdLD/JfQ8kuIJvq9uyAdwdtHEemjGE3n\nASRcBvVhVh/8LD414DfnUe135mJ5jhhtFobLdYM1HidKF+Wqb76e+/76Q2ykRBUi5dLuCuU4w81i\nJynJCwPnKWLDclHwF//0er7xbz5M4QCLQMBkn/Q92GJhBkIb9GnHEyt88PVP5Rt/5pNjgpm33sk8\nJcbGiRXe9Ut56z1sCmEj8Z0Mla0j0qG0kLulm6cjHnMFmBKD4G0FrMCkS1q4CZ+O0LBG6t6MT5di\nnc8Te59E+o9HF27GJOKsi7lVcOuIHiYiIHX7mvPrdmkJs0WwHk14ENEit2pLB7G0RjCPpRLzfuT+\nuBiYi+W5YCACOCXmlGUM8KHN2XnMo0gCBQ3Luvdv7XAkgUWPd9BX4WCEg02kocJLnsST56coYR9u\nySNxJJjDLXlUNxJMf9lX4nBEdXhTNlZXtwilTlwoQdu58xUlgXfcCnRWSc7h3AoaPkdz6CbS4Gqk\nWCEW9+T7XY35Yxg9Ik2+qpvDureiUiF0SH4D0V47ZW9jM/DYpgk5A8J9iFRYqMkX1tRa6yVmdR47\nGav9s362yVwszwVhNEI2tB8lISfmdgC6y9x+rOKxZcCVUEzg05bHXBjeO5KDjS6c0IJAIOHx6hBp\nT/y+MC1btiGY73rDtTzvZ/6K8vDX4yRRr66dIpSTCOacRBuZdq5H10Hd6fO1T3qQhz74myx2ElI+\nQHIpq1rvk9kXKes0lIgtIDjUctAR/2VMS0QUQTB/HMyjvs3iMMmpRABSt2MktE01y5VCWSwVsw2c\n60MKeNflx7/zBzB09gocLoCL55XuEEKu2bW2jZYjf7BiGzt55MElukWg0j5r9M/y13YLRes+XVOq\ntQd5YmgI9Wr2xbnczg10d+NOk+BhK32E5T788eu/CTtxN2n1BE8+yilCmSYZzMkHDE5pyN2OPAXf\n/UvfzYFL7qQpPseGreVUJ2tAKnJuaIlqF5U+ySXMVaiLmC0ibOZlinZyipmFdo5UA67KQjl+DFbg\nUg+sRMwj5giyiMRDeFmgrpQ/+MC7qaa/4d9ZOZcmIHOxPA88DlGXK3mAkkhICkuH+EK/TxECh9IS\ni+XCRI5PDIIraNYrylSy4hMMaDNL8ik3aYeg2/7KKjpTpY/isJ6ynCJ/+Non8djeLdzwiitPEkpr\nL36TwMgFhoaSUByCj45VGo6UV2HJUThPp1zM6T7jdeHmEdncThshR4asOPVx2sv30xZLaA+xcPLj\nyD1SndRtTma+6DvpE+UhzBtaK37Yf3BGF9C5dkuai+X54vLiHrTmmQ4exErjWTe8hsVuwvcMqfPA\nsiZMphtL0etSdHtc1ilB17MVAXhzNBQwoePadU6q9GmyX5mGhj7may4r4Hd/9ls5SC4FNMLIopyM\nValoK9q5SjuPciDA+459ggcONmzEmsY1mG72uBxHqNp8W4e0iean6zQkpjj6CJZ/p30PTn9UtKLb\nyy3jLNKYEJaW+cMf+3WcD1kn25LKWRFNkVNzb7fDXCwvgAAkAn0CWi4gqcu973wvg0Fk3ZToAp1Y\nIhpIMhn3sOKItfKuZ30b6Aa1a1DLo7FqILWlkgnaSPk+QYaR8YxZQUmHLgttcCPkmvo2ELc3Q4tP\ng2V3iMMRzEGTZ4Z7IPX7XH6wx+JiRdeBN7+t/hmixSmW4qmP8bkE8my+GHO5h6aCeqXrPPrZVdZa\nkc3H07aVmwGxvJDem3OxvEAWAU2GL3KNbO9rvg6jxCdrW7j1CVS5zGwCOBQ1z9KRI7BS06Q+SRVR\nKMiuhBETEvTdxFuBoG3P45DFqXU9DITJp1C1hQIj75/kHYtXsF7ilW/8eSx0UVdSbMvHPD5z8SzY\nNi4RokQfqbVPKpSy06XfiXg0uw7aUt56cmb5njEXyx1g0QtKTZPgkhe+EImObujkaSomuXJiAldd\nh1KKISUcHqzAzbew6BdwIVsB0m6dhtMh9yWjBp6tBd1+qIVIoGEqmny71rIXRaWiocFcwwZG0y1o\nnMvbahTdw6CKQ/FmiDo6votVDRYT1YkNSkrU2vp1Wst8Gt7Lh+FCO7rv28/IXuNZpPCAKDf1clOB\nUBrRhVGvwL0kiaN2JUXoIrVxyRWXw2ANYvbhDT+gUWIrHvuZHOX2rT8wNzYJhG01ptgbOgBJcd4j\nasSUCJQs9RYoNEfiGhfzkLw96Q+n/397bx5nyVXX/b+/55yqureXmc4smSQTQthkk7AFCKsQRFYB\nZRFEZRVBfJRFWR4f/MHz+/n44E9FHkUgLIqPCAiIIIqsAQV5gISwrwFCJiFk1p7uvktVnXO+zx+n\nbncnmcz0JD19b8/U+/WadN26lXtP3Tr1qXPOd2sK36Ukw1YhIycLhj9+wksQDdgIrvRYxriMsUbW\no/RFK5briQW0Q2/Yp685oRfRMQxd1NR4V2G1ol8v4WwJ5SK9P34d9OZRquVj3SniamtJ01uJyad6\nInN72pQCzoUcsZYv1d9GdYALHrTGS8qfWnizXCP+RFBbQxSojKW2jiiGkCk9KuaHCwylppBk9KGY\n7P5zU405R2LCestJwPRWtp97S2odYow5QmaYm8paL5VBVMjrzrKfnIYUmHmAw3zosb9AFgLgsaeI\nUI6wkNaRJ3gYJBhsFumxxMve9Gq8BDBCEIcag43J4BJMxNv1V8zRqDVrPlo0YjTSGw5wnWnM1cK9\nb3k+meYw4XkG1ruQ2mSf7WYkRu7/ujeQ5Z3li3Vjafe7cUAR1+i4rseOwVVJOS4ZOVZrhpcOte1g\ne4auKdhZexgupFRcm8B6uZ4kK/OEPyA0JXD+i/98J6WDmGX4CEjyA41CckezYOP6z1pEYbaqsQSQ\ngG3EsmMcUjpmdm1nF7MQToFsv9ejFcv1RgzYLl88uAheb3RkWcsMA9nGQjZH3x1FCCVipE7RFnKM\nDtpk+zZqsAwQKYnUhFATpnO8Vpyd5/DRf4XYS+J688625QSw3/S4/MorGWw3uBhxYgmN07mLKeGz\nGZU9PgHURihtTGu7EumEmPJr5oFH3uPClEXdZVBPbkTDiSjP24rlOqL4tDCWTWN6C8zmM6s8/VYo\nXSRKwJqD5MyTa3kjlfpGURkZik2W9SO4hSTnk4hKIEiFagANOF8h1DjxuGiwmQMb+ac//h9QGkQj\nVeMC0jJ+ksGtZD+H+bpcwbao5CnYH6tJKNNIzxzTj/JmtUMMaEYw4GLqXSoRKss1V1yJVZsc4ZxA\nGOVLmBxOVB3zVizXhdjEijRoh52Peyg/mV6iFwfXsV4adRQhMhV6uHqazGdkXlOaf3PdKXlletTS\nI5iSYBSvUIkSt04vf6bXksp7SjVEV6BuipDP4q0QOkpd1PRMSb/ex+LCIa6uvs3jn3SQa//ulaCG\nQhcQX59cDumbjBTqqJSDQ8wz4Pfe9geIVAxMZLC8atBYp+VIj8v1R9RQ1CmssjKGIJbeFYd5+eOf\nR0cyKl+i3uOdZ0CgGpMf8XXavI7GnCNxzAUcEekA/07ybnDAe1X1/xGRWwHvArYDlwK/qqqViBTA\n3wL3BA4Av6SqV5yg9o+fJkuMMwGPUGZQBLjfK/6cn/zCwyHrgrWoT6O9gICpqTWnNnkTe2uJCMF4\nRA0WTwgB5zKsdmGoOJPh8GhH6A8OMeu3U5oeeddS+YCv+4gv6Nn96FSJ2dZjx62n2b3ldNgxB2fu\ng1hym6IDC9cyF94O8iqot6/0go0sMtMC0KTLC1iEqW6HT+/9AlfMDbDRkMVIEVL9nGAjUTZ+3SRK\nqgZgI0wXjtO1gxUoXIHUEaepdLCLjjHGi55QkRyxltXuErhQVZdEJAM+IyIfBl4MvFZV3yUibwSe\nDbyh+XtIVW8rIk8BXgP80glq/0SgBgJJzBwRjIE8cNmWLdxvsMSgD3QqNBhUwGiWyo/GIV1niSEt\n3Ds/C+LIxeKjpfaHmJrZRWDA0HrKeJiDg/346SG9HT9h7+F93O8ZdwNX0/v6kOkH9WFpB8xGYvUd\nTNYH/2MqGSLkuMLTr2qmt4AJFuSHqNlOZaCmZhp7SqXcGjfJJwFyhBhrhibwwg+8ji1zGUrAaFoz\n7LsUzy9jme0aQiV4Im9/4euokJU8lsawXI/8FOg2xxRLTdaJUXnArPmnwIXALzf73w68iiSWj2u2\nAd4L/KWIiK6P/8zk0TzQAmBxiEKIAVN0eORTf4n+31xEZUpwSjWELFOsBmoxqBpUHMEZYkw5Cg/G\n77N1ajdXT32Ps89d5IofHuKOjy/QawI7z4qcedfpFNRd7+a2piAMv41H6dwXqAJk+4lVhZhA8Kkk\nqigQhkQUIWM49KA96g0ATWoAACAASURBVC/+GVP3+p9YdpI3Z4B229HlBjDKMlRoBA2ogadd9CJO\nm+1Qab/xyhEqG3HRjCUCTAWsy7EswTBwGlsoNG9CNAED1RgzNY3YiFElrDH5r6QScJcCtwVeD3wf\nmFddXui6CtjdbO8G9gCoqheRw6Sp+v51bPdEIUBOCqGjdhjbxDQ86rHw+rfSyWcZ0MfFir5bolfO\nY2QL5pyS/jVDvFR0zuhzp/NvzxnbtsCuPezmljB9JXPze5HpnNm79uj3LCYI4j1GDmJ9hukoSKCS\nHBMjNhqCSVMnI3qd0UgMgvEGZ3LAcXjxg0zVz8Nl25uzSNEttp2On3BqVoQyGMHj+YZeyw6XQ1Rs\nMNSQkjWPKVTWEzm8cIDpTo4sVswi5LGD2mb1SSK2SfoxLjZKKGGNYqmqAbibiMwB7wfucHO/WESe\nCzwX4Jxzzrm5Hzd2RhEhIY/42pORI1NzTL2ooL7iR8zesw+D7WDmISvB70OzgMaAsYGynMbHaxE3\nTzSWuj6A9TVF11L6ComGzOZo7VFrKH2k6NT4GozJkiuJKNEqgsHEIz/vrauAGsSwRXL2f/z57Hjo\newj5nTCk0LWRryanWCbsjUBZTteLSno49aj43Q/8KTNbhIqAsTnep4fdOFwVDOmBWRnHbLeDyzJe\n9WsvZ4efo3YpFjwNDsbn37+RIjniuO4EVZ0HLgbuC8yJLKepORu4utm+GrgFQPP+VpKh5/qfdZGq\nnq+q5+/cufMmNn/SMCnVVpZj7IAq9rjaLJLd9zD9+lrmuZxhcZB+rBiSitkjOdWgi8YMBSqFEBVH\njvVF+kwDappsMupAMzJnCcFgTIFE1yRaAGI4pv+dkhFNpKoXmMu+Dd/8E2zcQ3p2xkb4R7keW9aL\nUUkSS0gO5hgqhKtZ4qv9r1BYj9GI9RYZx3ASkoCTUxmDbcpMlLbmgVvugnfJZcgQmiQs43mQjkMo\nYQ1iKSI7mxElItIFHgZ8iySaT2wOezrwgWb7g81rmvc/edKuVx6B9LSNSdDMFrIZT724SKfMKMgx\nQXBDjzUFYVASy4CRPsYcwrhDIIoQQHyKyW0W0I1aDHUyOKpLoYwUiK54chpVxKztp5agYAtckRMO\nfRDiZYjuXxZH4SSqMT4RpAS/QmB5yYNInyHPeMtzMV3BByFoTpQxueFIWuO2VFhqXPTksWb4/QNY\nOviQekSGJKE8xZZq1nI3nAlcLCJfBb4IfExVPwS8DHixiFxOWpN8a3P8W4Htzf4XAy9f/2ZPOqlg\nlEQ4/SFvhqKLNyU2DlFvcTqNrTuoNagJGFGMChoNJnZT8lYg2kEKS8Q3T3FpxiLJaCParJPiMXgi\n9rie9pnAoBTqsMDhi18Asg9pMoujhhUX+MlyOt58RJTYFCHJlqtHQs3P/ukT0C0GohCkgxlXzjiJ\nK1k1JUWCBaMccrA0KMkxqAGnfqxCOa5RJazNGv5V4O5H2P8D4N5H2D8EnrQurdvMpMEDMEc/3I2t\n8h9QOAbSxwRP3ZR4gOR2FPEg2appbzO6WJWgtUkwhpGSiG2OVYymyd1xN1Ej1kAMhq69lh//22M5\n6xFfRwhNUtxk6VRMU6umHWneNNLsQBSQmoDgVHn0m5+Bnl4QNBBLcMak5MRjIpDq9wxMjpGKchhw\nLufi3//fiBo6Mt417HEKJbS9fx05QoZqicBZbP2Z17KPMxkYR2agPkK2GCMuZfRukvGKGkzIbrD+\nGLFJKEWXH+43RSiXm6gRayISPNvd9zn4n4+iLtP3anKzZ3ntUpnYWOCJRAFNhXkFQDy+qdf+ngOf\nY99pAzQHLeVGk61sSDMFohi0aeeUr8j6Q6YLBwcHnM2O5YfnOCTjREfmrJVWLNeF1UK58jeEQAp8\nKvBxG3m9gJY3njQjCeUaRhZqkNhU8VtLaYC1YBxODIuL30YO/L/JlhQcStaMKgM6WktrBXMNpPU/\nL655lHkCiiPjB/T5q49chKuVIuS4Ij/6R51QkkXeRHAacTFS+h5xpsBIznue/1aUfGzLk5MgkiNa\nsVw3bvhTGmOIMQK3Z9d5v0XJ3DE/JTblD9bCkar33Rwiht32J/zoq++C+LeMVklZTgdSJ8FssxUd\ngwgxUolrVgE9imLI+DE9/tvfvZKeLBGNNL4345t6e5NCFm3zL1qPbJmhP6iIlw/YRpcOx04PeCrQ\niuW6YI6wbRBJ0yuNBnPmY1iomjDHCUbFckt7OT+8+C+Bf17JKqNZI5yp3njEt4J5RJJQDo1r/BFH\nrlgZBznM//7cO/hecSW1idgJKBDnNGKaGkxDowxjpDcs2cIWfv7eD2YbM8B4Ho6TNKqEVizXkSOt\n56TXEoBqJ2fc9Q8IUl3/f5w41AjnZN/k2s+9BcwnQRUVj1IQyEDAqmPZCNWygkZKk0SwE1ORsYCj\nosf7Lvsg7776EylJioxz6r0KTf120VlqY7DWscVO0T9wmN9+yK83ceARIVnIN4pJE0poxfKEsmwP\nyWoqW6FnPJzDi6dj/BRrDJ4aE5EYK04rP8b+j/0qwmcBj8QSF0uqJhONNE7srWvRyOE8rVEWQEdj\nCiSIDqXHT/2PR/BXV/wztq4nYkS5mgh0gtKtDVOVgYHyWw97Jt0ypuehGkaxOid6hDkpxpwj0Yrl\nCSSVLjUEMnKTUXI2Zz749djiMDFIE1Y4uQieLW4fP/7MmxC+QTAAhjw2ZWWBsJygeLLP5USiyyVq\nR+XfIkhEomFg+tz2/3s4W+94BnlZg5ksoVyNGKVygUNXH+Rpd/g5OlkX9dpk1E8VMlOdyRNzrSdV\nJEe0Ynmi0Ag6cvQBLxl56MKWu7FUd7G5go0bU9b0ZiAK26q/58qP/xk2fJFgUt1cC82t45takWm6\ndioSiVjNmmFXChNFHYdNj49e9Vm23fkMssqjMsGV0khXL1SR97/kdbjKgzjKTKia5YRRRv6Rk/2p\nRiuWJwpJ6dfQGmLEKajzEG9D2bk7MYCTDpvhEhix7DLv5MqP/BG2/DcqFdBRomJSrLOkJLan2k2k\npFyTKCuuVTiulSXe/72P8srP/iVxWI4vMud4iB47jJzrzqbItzMUiBYKs+IOl/L7mnV3Tp/0USVs\nhjt1kxOkSM7pCpYUJrb97i+hLzsZVgNUJ79KnqjBqOOszsf58cdfTX7w9VRCCotTh0UaH9GMeIqN\nOgQPEvAmCYni2MMSf37xm3jD199JYS2ZKcaWdGLNSCSzOU+49y8yyxZG8WVTMVn3r5u179QTSmjF\n8oSyHPEohtAkSlXpwfT9mTvnaWC2EEJBCAETCyb9cqgqpxdfYnDZKzj0mSekdTo5QGCYRpbim0w1\ncDJPyVcCmUY5eDKcGlDlMvbwpLf9Gh/b+x9U5XDyfwaJqESGknFwT4/fPu/JKIaOQgdAAsE0lSR1\nfSN4JtmYcyQm++48CRjlubQxrfMpU3jdBrf9Zfb6MzBWsWaKYMujf9CEoGJx0mem93EOfPg+QA+j\niwgQcHhZPQKZdKU4flLNHFbFyhs8EKTmv1/6Nn7j3S+hnPKogDNu4keUkQjdDgw8n37J39LFpmz/\nkh4FlVhg/evrbCaRHDHZV/JkQsDGiMQaawT87dl2+iMYuhwkoBLx1h+7NvgkIBku67Mz/xaHPnwh\nEr8MXEUeAAza/DvZ8mEqKcO5CxFiOscFPEN6PPB//TL/+I2PUpsKpymv6aQLpbFKNaxwXnj+BU9m\njgyJLoU/kh7uuaYyvC2tWG4cAohpKjmWeM3Ycpc/JA6248UySkPpJdsUmSQFy5Cajt3D0qefCz9+\nG8jVOJaIJEf8Ckcd9dhldkfDtTWw3rk8lhOErIGAkiuAoTSwQM0ifR78hifT2z7EFoEsuuQeNOFC\nqUKKLDOGcs8ST/iph1CQEUyKZV/OPXwCKjZuxlElTP49eXIhgFiMdgiuBp1h28M+AKVBAReSD543\nYeJvtmQJzzCmxulBet/9E/b+ywNBv4PhSjBg8eRBUnb3G5mSByJePJhja5beyPbNYdQqjz+qYcoH\nRZrFBm9rCuBDe/+dp7z7Wcg2Q2EMYiYkKucYBI1YMWiosXaWh976nszRIdBtDgAvca3Pr+Niswol\nTHYYyUmLiKWIlsrVKGdQn/Vsugf/glpHbjiRYEusnwGpiRPsvG7EYaNSx4qtsz+k/uQvIKZkQXex\n7cL3oNnpLPnTmDWjW2/VQ0DB+pCyEIsn0jw0NuIZHiJIquYKEddUx0lLkSvfP5p65zYlWa4l8KSL\nXsTl/lq2bjfUWpLbglKmsJsh/NNYrEZCPcRmM7zwwmfz+O33oWCGilS61VuPxazrgHIzi+SIyR6+\nnMwI5CFDmGHmri/k2vKnsJSIRiTalBDYpjo9ZsINJcF4nFqih2E2T4h9CvkGhy9+OEv0WXB9+k3N\nlhusyHoFL1AnI9hyBMwRho5yI9vLrHW4GdPvqY1QjlbovIdgzHVGVAHIKbmGRb60+D3u9KZfYU93\ngaldGV40xX2HJJTrnQVqvfFqcNEjMWJMRu2Vn99+L6brmeUwf6HGrbMf5ckglNCOLMdH41eUhy6V\nPQc57U4c6v2IraGHMRDEoVRgBgBNNvTJfbZFW9MlYxA8hR9QdcCZq/iZtzyZ+5z9OJ73iGdxpkRm\ncGQIISilEbSbUSDJaX/Zkt7Enq9eN7vh5jJrrtyraXopJhkvKkwq5SrNkqlLdXFy78EmA80+06fi\nMI///5+HnjvLzI4umR8Sag82RyRDZPKFMophJniCUUyWMawM8ZoeW5gCILimdhTQjqGOTCuW42Qk\nmNScfteXc9Xnvk/RuYyiZ8ldpLQ5aJV8F6We6HVMFRiYQK0eax157QjTc5i53XxtcClPeeOl5AuB\nqWGfj/7Bu+jYAld7yLpYApWsmLW0qUVtxadkHavU8Ab62QxMBXCrjlFGa4wrx1ahJnM2vdYkkqkA\nXGPhdoEaxbuKRXr885WX8pZ/egcLOwfkt8ippMKFAESiddgAIn7ik3tGNNUnF8+SVaa0Q9bzfPTF\nf4+qIC6l3EtisL65K0+WUSW0j5DxI81/Oren03ksZdhN7FSoWjKt8Cb5L0btNrneJhMV6NaRLZXB\nBcEaQ9cvwNWX0yuvpZg7jLuFx97+NB746ifzgfnPcTgL/IRDREpyAhmeXKFoFgprrwxCfYP4+drX\nK05JEsmIWA0QKkYmG+9TZnJYsaBnLqMcNgW3FJxC9AOg4ppwNYsMmafmNZf8DT/7J8/kzZ94O8Nd\nQ4x41IwS5EIQkwrMbQIhWG2m6efCdJ2TlcLfPuevGqu3pW7CNGWdx04nk1ACyCRUqT3//PP1kksu\nGXczxkwTUub/jQMf/Q262VVY38FlljIbEAVyb5enp2MqK30D1AQk2iYSRNE4RbQDYh056AqedODx\n5AH6bkBRK4PMUXvIHJTBk0vBwrf285uPfio/3jfPHz7iN1EqIhnTTDEYDOh2C1BHlICMijQEj7Op\nblHdPPOdKiJCrTFFhyAMyyHThQXNoBEFpaZHydXs49XvejNnbJ/jkz/6Is52yLowzxLYnDyzFHXA\nBaW0hk5tUIkM3KhO0th+9jWRFhWy1LOkxBo4tCRc8ox3sFU74JWQZU1G/PVjE4rkpap6/rEOasVy\nglCSvcN++1n0r/0Hcu0hdRdjoHIlNqZ5uzaWynGvk9WuRtTQrbpARbCBxaxLEXt0K/j4tp/mxd+/\nNWIc0yH1szpVfcUCLgRKa5MVvBqiOkUcVuyspjhr126e8ZincV5xaywwwDOrMMMcpQRyQBBsdCt3\nelTqUDLMA4acDMs880zjiBgGeP5935f5wmWf51Pf+E98ETHbZ8mykkotoooNnvlYggjTJiNTIUr6\nHqOGfpbq1Ey8UEpJ1G5TiKwkw7MUHB9/6ls4M8yCcWiTtf8UF0pYo1i2a5YTxGjdTe7wFg5f9RV2\n5l9C3QAfLLnP8NZjNGA14sWtayc/XrwNiBryeorKGHJqosnxZUWeFdTALaJnxuYMVCmtNAJj0DpQ\nNBlsbFQG1mGMhTAgbCsYes93+TEv/sf/iQwDtlKG8z22Ts3wkFteQDUTOXf3GZyz/Vxi4ZjqJP/A\nvIyU6rnqwH6++Z1v8rGv/QeuVxGdQadyirlp+lKTZzB1uy0IHtNPxpraKVPe4AJstTmxaW8USQ8x\nk0b+m0UowRDFgVRkGjFYXn/hy9kWpwg2xxCJjb9ly9poxXLCGC1h7r7w05T/PoezgSgBHyIudIim\nQgg4VYLIDUrlbhQuWLIAlQ30OktUEbp1RbfYQjZYIBPDu35k2FsathZVWuPDEMRgHVQq5DFSN87r\nmAwIZCIQawbVEp28wM3lqCpbdu2kHg742ODzDA5VTP9YsJ0cYwy+7gMwNT1Hb2mARodaT74ro2O6\nDKsBJjOo9umimACD+R5iDFiHkpPXgsS0HhklCaJRIUrjgK9pCj7ZNm+aNmZEOgQTKUJ6KJWHlHvs\nOg9LF0NECVhpjTnHQ/tYmUgMmIzF2RelgmERxKaOaEOOYpus5Yw1eXCwEExMwhlBYobxgawAnxke\nsHsLc/kiru6TxYAQmrQT4A0cLqC2cbmiZTSWYQwsSGBLd5o8CLGqqUPNEgtULlmiux1H5QJ9P6Sv\nJcHUeFtzqJxnIEO860NTg72MFdFGxAo2QscnASR39AjL7lhWlCBQOQsimOaH1Sb4dDMIZWzyTho1\nRKkoQoXVQOGnOIddjfdkQAmY1up93LQjy0lFCnac/3LgEdQfewpFOEiZK+JLjJjlqWD6M54s5RHI\nA3TrPEXgaBcrgYGBYWcnX73WY0OBzS0978ilBxJxIcPGZIlNJ7HyzM68wUlOXwNdHJVJ52ariMaV\nyHBLhohF6wKrefMxGY7yuiUu1GApCLVDDdjoUzgphmnXXXb7iZCcWUWRmH5fb8Abg4uROOFT72gi\nLqbf0Rsw6qjrJfx8xu8/53d5QOenmSFjJUXG+nAqiOSIdmQ50WwH7kDngtfgzTa0EqBA40p2gySa\n4xvzRInU1rPQCajtAZCpYaqMfOgnWzFFRgiBTMCELmgqSAFpanukZQRRsNFQm0iQFcvzKP/hSh7E\nCKakdp7aeURKbuy3EDzBxGZ0SiPWNzzORJo1vphGzZtgjVJlJJSRykYk5hiFpcGAx134SO7XuQNb\nWf98qaeSUMKEiOWll1467iZMMLth9mfJbv988mIaVQFjl0djy9NwSUWyxkXhhWhSXHtVRhaKjPkC\nSioE6IYSTImJjn4W8fbo01rRJFpFOHp0jGjEaI3ReEzvgCx6sgguHn2tN4ihMuCC2RRCOYp68sZg\nYg5EnKm58DYP4L/c6Vc5Tacn3nF+MzARYgmn3lPq+DgHbvFU+sMhxllijGCk6f9xVZVIHVscuVHI\nvKUMkU4mDIPgbZe8Dsvp5yLJqpzFtY2F67VaajVbW3STGmqxxzw2wtgMZ8dDM+vGKFQWUJcMU6Zi\nfmmJP7zwZZym3eZBur7ffSrerxPVI07FC7B27ojZej+Cj4goGuJ1+n9azbNNdMnGXlZDckgPNhDz\naYI9jT3xNDquYGAdi87QzyOqXVw0mEm3lGwCYpMExKriDdjQSUsVtmIwLPnUM9/Hdqaa2UY7/V4P\nJkos4dS9EGth+oKP0H3Y51nkPAwRo1ynamAwEZVAMB6jjo24vIaIEKhspLIwFRbxcZ5v/GQvtlpE\nQs1UiBTqmlIMrVLeXKIBFwNZVIJkmGiIsUdgCFdHPvGr72H7KDflKVwzZ72ZOLGEVjCPzk+z46F/\nzaF4HkTFqCHXTvIFHOmQBKIpmxHmibvEigPN8cxQeEO3JkXUuC38YHgatXbINcOKJcQmX2TLzUKF\nxg0LvNimumSkkprqB0v86++8k+10sOs8727vyQkVS2gvztE5j50/93YOxDuDGrz32MZB3cTGLUQi\n0QxO3AhTQkryYSrULoFEggFb5BSDAT9/zhQmmwGN1BOeXm6zkIx56YETyFbcx4yge0o+9Yp/YRsF\nwLrX9W6ZYLGEVjCPznmc/vC/YxCGaOaJscZIjaoisUjGH4Foe0Sz/hm8jUQcA5ScoNPUkrEYZihD\nJExZXvm1g0jX423ryrseGKkRInlUAk1SEIlEgSqWfOr3PsQOspTarl2jPCFMtFhCe6GOznmU3Quo\ngxBtk53C+GQVDY27iBqMKmrWN71bjBnEbrIaS0WhNR1TEbXi6spy5+1dtKzo6JCMwbp+96mGJZVJ\nVs2pJVter46qlGHIJ578PnZSrCrPu360998KEy+W0F6wo7HtgZ9j6mFfxm95MlU8DY9DXQ8rig3T\nKBlRLDZ0UwLhdfTFFDyWitwrxkMRIt1QUnR38IP5HG8sfZliyc4S1S5nuWlZO4aAly6eaQCi8cTo\nOTxc4Gy7g4uf8o/sWF5qWb9R/KluzDkSm6b3thfuaNyZqXv+dzq7nkaU04kBKvp408PFSKqhKJgw\ng9H1dS0axXQPzBSVyQmS8WNmWJw9m25IIyAlo1AhD+3K5XEhkSBZMzvwqCkxGlg4VHHBmffmLU/8\nC3aSr/u90d5rR2bNfVdErIhcJiIfal7fSkQ+LyKXi8i7RSRv9hfN68ub989dr8a2F/Fo3A6500uY\nucvvge+k2GlrCY0TuM+WCDaVphB1jeFnfRCNOKkRGWBtzVVyGtnMVtR4ZmKfqTiPmAWCLVlzgfBT\nHJVREg8QqVBTIkSyoeN5D382f/pz/5WdjErvto+gjeB4fuXfAb616vVrgNeq6m2BQ8Czm/3PBg41\n+1/bHLdutIJ5NM6FHU/iyupeZFYQDD4EQhRcsHhbJsEyPdZ9bUvBM8VAHIfn+/QqS20zBqZLjLMQ\npog6TWUssZ2OHxVtptOiab1SJZXWKPod6gPKM27ziGWrd7tGuXGs6ZcWkbOBRwNvaV4LcCHw3uaQ\ntwOPb7Yf17ymef+hss5XoL2gR2M3P/Xzr2L/zLPwoaY7M0WQiA05me9SWYMK2MpSZjcoTHuTiZLT\nNSVBIt/cu484XKDWDqXNmzroNUiJo8RMeCXEcRKbTEwmeiwpwCAYYL/lrHIHF7/o7zlN86a8RiuU\nG8la52J/DrwUmG1ebwfmVXXkk3IVsLvZ3g3sAVBVLyKHm+P3r/5AEXku8Nyb2nARYRJKYkwmF7Lj\nnjvg2jux+PWX0nEdiDXWT1EA/WLAbBnJgiWasOKbeROpQyTLI149dbaVPfUUZtaiBPIATitq26y9\n3czvOplRYyFGHH65OF0QQ/+HNf/lfk/mOfd7KgWmifNuhXKjOaZYishjgL2qeqmIPHi9vlhVLwIu\nar7jJqleK5hHQc6DM+7I7K5fZ/6SZ9Jd+ihIDxMinQqqTh8Xunjjk5UcOf5Su5KqKvqsQLXEaY5j\nhr7PKLTChT4RQXGYOJ6cm5sFbwzdUKMGPCCSIxT82xNez2nMYHA4UvLj9bZ6t6yNtdwd9wceKyJX\nAO8iTb9fB8yJyOiqnQ1c3WxfDdwCoHl/K3BgHdt8HdqLfTQykJyt93onV/Vux1B3IU6ahBvpCFFD\nFIuKHrdbUcQRdBo0w0YLQagwXDuzg6roMpCcoZmmNu1o8mgEKxA9IXjSY8tBz/JbD3kOu5gCH7F4\nNFUBGndzT1mOKZaq+gpVPVtVzwWeAnxSVZ8GXAw8sTns6cAHmu0PNq9p3v+knuDhXyuYRyNVgrzN\noz9Ads5TOVidRU1GjJHS+ZXEutFyPEkPDYEilmQMMNInmBopAm8fzLEvBJZU2dq1uMjxj1hPIYIE\nrEJXQbtdTJ1TXdnnXc94A7+44z5ARm4LJDqkLQUxVm5OL34Z8GIRuZy0JvnWZv9bge3N/hcDL795\nTVwb7YU/BrKLqTu8ktMf/ncsZfdnMZxOFE/pPMEEDJEsupRc+BioRKIIQWwqZWtSvsQqKA+d6VOY\nDlPDnGHfYdVgTjF3obVa+0OsoKrJY2ToLEt7h0zPC//xu+9mNzNM0wVMeoaZ1pdy3ExE3fCbumZ5\nJCbhfCYWBWIE+32ov8zej76Q2al9eBuIGpmtLZ4MxCJUR/6I691jQRxqBrjaQT3NJVtvzX+9+o4M\nPQzzPk6rNLLUYuKzjq8HKivlKgzxxhOISMRHTwyCdKfRfs3Tz38sL77DL1JiKegs9+XW6fyEs6a6\n4SedWEIrmGtCATkA8XIOfvC3cFv3kLFAHkvUCIMq0Mmun1XcrMrKnuKQRQ1iAqhhMdvOL32zy4Ez\nHwSVJ9NrKPV08pAR3EEAJBacjJimmqQ3BqtpOxhg2fl/ZT1YjNJHKELGbcJ2XvO0l7KLObrMEBDw\nAedSwVrTFhfbCNYkliflanFrJT82QTzWW3B3ZNvj383C599Mf/79WHsNMS7Q6WQEX6/KYJOs2aIs\nC6Y0AqDRApG6a7hlN/K90nNWXKQ2ClpQhAFKZGi6qE4j0hvLOZ8oDMkroLQWE0d1e9JvE2Mktw7f\n+JaGakDHdeEnFR940UVsx5LjUKapgQyIFoJGrJyUt+em5aRdeW+fokfH4AhuC0EVr6ez5V4vZfvD\n/4Gl6jyGYTc+AjYHRrHkzchIkjPQqAJQsqBHjBrmlmoIOacZiCGgpoNVjxpPv6jx7sarL25mrCoD\nZ6lNcvg3RKJ4QoyoDXgqkIiJiqssDz33wfyfF72D3UzTZQptkmRYQKixwroLZXs/3HxO6kdXO8K8\ncYQUeyy6FTGRoUYc57Ht4X/E4lc/zOEDn2GruwziIhpmMFlJlMjIYh6iYk3zSRKRaFiyHb7eF4ZT\ngbloGJSe2kb6WaQWRxYjXV2gEjvRyYBHcdlOj16BEpIxJ2hjhMHgoqfSimihMgarYFXwBzymUv7p\nt/+aOTKmmUbUECRdCxtJ6fWA9b4tW6FcH05qsYRWMI+Gg0Y1DWpSAbRgL2D27ucxyy8DP2Lfh5/B\nzk6AMCRK+h/EGIgexIAqEChNYH9WkE9vRX2PSMCYnGBqgjFonKFTe4Y2CdHIEDKJRh8VyEKk8FA6\nln1Sr48BlIhVMQa5nQAADVpJREFUyCNEIjZXlgYVql3iMDCtHe7SvQ0/87gH8aQzH0RGgcXiMdjm\nc0flQAJgW/egieWkF0toBfOoNPeSU4eXAY4CYhfinfHujux85Leh+ld0/wJ8+2WUOk2XATEMyGxO\nP0Cgh81BXMZ8McPs9FYWyz6lNczUA4gGLwVD69IoVGMzqkrmi2PV+94oTBNvFDVDJdLLUxG4IIbi\nesnmQ13iO+n2CZrG6UGUkoJONsfMHmGqk/POF7wRV0Wm8w74jKFLETo5LK9RYsATce2IcqI5JcQS\nWsE8JgJOu1QCmRkgkuGiAdnGMP8VOmctkJ31GPZ/9veYP3QpW9w+psolOjh6WmCM55CezrTbwqJf\nYqfvshgDRjMqk6bcTiMSV6zpjhT7HMWMP7mGREQDQSzBGKKmNplocKsMNgA+1rhORtcV9KsSUcOC\nDUgFW3uOdz/zL+jg2MZUMpA5YCBoN41Gczw0lu5RfXK3zpmYWqFcf05K16GjMQnnO8kokQE1XRq/\nSAUEekQyDBmLiFzFj774R+R7P8XcbB9nFgkUPOVLP81V23djpmpmKljqV9QzM6lkr0ayRhCjKKZx\n2IwYamOQxtJ+zPbJ2qfuRk2zznpsogmICuAIOAwRF2OTPBkqu7KWGeoBuXVYUxA7XYaHhixce5A/\n+7Xf5VFn3A8QCjqpmJxEXEjJL4JZKR2cfuvsRJT1boXy+Dl1/SyPxSSc88QSk2tlxGMwCAOIyVrb\nLNJRe8iyAcSrgGsJ//kCFvt7ePO+Gd47dQELocTZnNmhUpkAYnExogLBKMZaYgi4ZjFQpTF0rOGy\n+MbinIWji6CNefN91TE+12CcpYo1BjDRESUS1dNVEFWCiZTWQFVhXRfVgKrF76t5xgW/yKPv9TBu\nxRYMNiVdbiZsFZ4MTaGkYtKonbQMoeKIsI5elIlWKG8Sp66f5bFop+RHwSRvIGscaBpNRdPDagHq\n0HqJLJ8Buqi5HX1ux/QDLmOOD6Ff2sPMpz9OOD2n9o5y2kHo43wfbIE3Qo2S5Y6yV1FYh4sQQo2a\nVLxVxBw16sVpBFWMRKIe2RiS8mcO00RfAnKUjEpGwVclKkqGxWpFXyrc1BTO5fT6i2ndssyYqac5\ndM0iQ1fxz7/zem7L2eTRESrI8pS1fHUJ2gyTRpEClaRnjTRWc2iFcrNxSo4sR0zCuU8uESUgmi1b\ng+2NVA8Mwx4UXaLUGAqshwW3xAKev/rE2/mXL32Yqbmc4VRO0emgdUQt9OseZJYCiMFgRMgw9MuK\noliREhVDbTNcLImYNDWWCN6Q/JdWRpl1iEhm6Po0WrUky0wtjuv7eC7VfaaLjLKucDYnc44QAqrK\nTDmNsx36hw5ztzPvzKue8EK6KAVTFDgkWCpj8AKOSK4k74AjEFgllCdi3k0rlDeTdhq+Fibh/CeX\nuFwHBpYN50ekAnJqfFO7WiKUxiM4amr2cy1XMc9r3/56fnTtFeQzU8SdQsd2qf2AkkgkEmpPx2wB\n0hTaqsHEjH3DJU7LFeNyyjQcJXcdghekEcRq4MnzDjVDnHNJSzVQDzzBFMxkLMt9UE05JAuLxJzY\nq6gW+gwP9nnS/R7NYy58DLfgTKaAggKGJUXRaSpUmuVztoALw2UH/htFITTrp21d74mjFcu1Mgm/\nwWZHAVGPF5ecrDWmkV00RAVjYRAHYAoWmOeyhe9xly235SmvfwF3O+f2fOZbXySembIUWWwyzHSE\nnq3Zejij7ho6NlAr5FM5vcUFZrKt+N6ApWLAdNHBl8KgX7Fj21bmyz7GWaa9YFzG3sWS6S2QV47B\nvj7WdNCZjGfd/mHc4+735guXXsJv3O/JKMnXsRNzgnqCtUSE2nsKly3LnLDaWHPsEg/JGzXiWqGc\nRFqxPB4m4XfY9DTTzEAa0SERYsGwjnQKAxqoxJI3a6GBIUKHipIBNQbHJ/Z/lvvvuAc1wg/2Xs4z\n/+Yl3GfHHfmu38dcdBwcDpg6e4ZDV+5n2KvYtWs77Fdi9HC7nMGeiu232MLBQZ9tupWr9nyXX3/o\nr3DHu92Ni973Ri76lT+lMIYFDGcyTQ4MAENGh0hJpKgtZGl9VRCGMdAxFrc8n141nVdz9CH3CaQV\nynWjFcvjZRJ+i81OGkGR6sgQQDNUDErAaABxgEGJRGqiZmTNWl8J2CYfeJ2cdFAinoqcLiV95hmy\nnVn2cpiMKWYpWCIVXptC6AM7cdSN8E0PFelk9FGmKkOe5/TrHi5biceuafwfA2Br0CxV2aApheMD\n5JK8yR2rxHF8IZutUK4rrVjeFCbh99jsjARz9e08ilq5/i0emlXENKU1oH5ZqK5rCklrhRFQ73FG\n0txeR59Ok2zTQB2gcFSavjMIGFVijIQQyPNRve0I1HgsgsHGkD7zeiKoREoCBdK40q+/Jft4aIVy\n3Wldh24KrVvRzUcYZdBhuVKFvZH72y7nUU9/bWO1Tp8RCUGxdpVlHMVZ0xhamu8IKzkj1QlSOHw9\nIMsywCSDigjWWowxhBCazzSgBpHmGJPaccPzMTgiEl2TPGR8tEI5PiY39csYaTvkzUdu9MUNSVIY\nRz7vjLqlRlYJpWneE5A0wgysDCYxoG5lv8uKZRFejep1xRfJqJc/+8ZvB6fjH1e0/XK8jL8HTCjt\nCHOdWOP9LcuCNYqwTCO+Ubjl6L3UYSMWCM1odOTe2By6LHlHSkxhzA0FsVhLM2Xt53IiaIVy/LQj\ny6PQdtDxobAsUCOhXLFBjyJgIqszY6bt2GRwX3vXnvSr3PbDyaAVy2PQdtSN4/qDt1Eej5FIXteo\n0qxZahJHyyj124mJkBkXbf+bHNpp+Bpop+QbS8oOubJ945Znc52Rp6yKrjkZaIVysjh5etYJpu24\nG8valwjTuuWRjDmbmba/TR7tyPI4aEeYk8l6hxCOm1YoJ5OTq5dtAG1HbjmRtP1rcmnF8ibQduiW\nE0HbryabVixvIm3HbllP2v40+bRieTNoO3jLetD2o81BK5Y3k7ajt9wc2v6zeWjFch1oO3zLTaHt\nN5uLSXEdWgK+M+5G3AR2APthU3X85TZvIjZjm2FztnszthluXrtvuZaDJkUsv7OWfHKThohcstna\n3bZ549iM7d6MbYaNaXc7DW9paWlZA61YtrS0tKyBSRHLi8bdgJvIZmx32+aNYzO2ezO2GTag3RNR\ng6elpaVl0pmUkWVLS0vLRDN2sRSRR4jId0TkchF5+bjbM0JE3iYie0Xk66v2bRORj4nI95q/pzX7\nRUT+V3MOXxWRe4ypzbcQkYtF5Jsi8g0R+Z1N0u6OiHxBRL7StPvVzf5bicjnm/a9W0TyZn/RvL68\nef/ccbS7aYsVkctE5EObqM1XiMjXROTLInJJs2/S+8iciLxXRL4tIt8SkftueJtVdWz/SHldvw/c\nmlS19CvAncbZplVtexBwD+Drq/b9MfDyZvvlwGua7UcBHyalYLwA+PyY2nwmcI9mexb4LnCnTdBu\nAWaa7Qz4fNOefwCe0ux/I/D8Zvs3gTc2208B3j3GfvJi4O+BDzWvN0ObrwB2XG/fpPeRtwPPabZz\nYG6j2zyWi7XqB7gv8JFVr18BvGKcbbpe+869nlh+Bziz2T6T5B8K8CbgqUc6bszt/wDwsM3UbmAK\n+BJwH5KTsbt+XwE+Aty32XbNcTKGtp4NfAK4EPhQc3NOdJub7z+SWE5sHwG2Aj+8/u+10W0e9zR8\nN7Bn1eurmn2Tyi5VvabZ/gmwq9meuPNopnl3J43SJr7dzXT2y8Be4GOkGce8qvojtG253c37h4Ht\nG9tiAP4ceCkrZYK2M/lthlSJ46MicqmIPLfZN8l95FbAPuCvmyWPt4jINBvc5nGL5aZF0yNrIl0J\nRGQGeB/wQlVdWP3epLZbVYOq3o00Wrs3cIcxN+moiMhjgL2qeum423ITeICq3gN4JPACEXnQ6jcn\nsI840pLYG1T17kCPNO1eZiPaPG6xvBq4xarXZzf7JpVrReRMgObv3mb/xJyHiGQkoXyHqv5js3vi\n2z1CVeeBi0lT2DkRGYXkrm7bcrub97cCBza4qfcHHisiVwDvIk3FX8dktxkAVb26+bsXeD/p4TTJ\nfeQq4CpV/Xzz+r0k8dzQNo9bLL8I3K6xIOakhe8PjrlNR+ODwNOb7aeT1gRH+3+tscJdABxeNT3Y\nMEREgLcC31LVP1v11qS3e6eIzDXbXdI667dIovnE5rDrt3t0Pk8EPtmMLDYMVX2Fqp6tqueS+u0n\nVfVpTHCbAURkWkRmR9vAzwFfZ4L7iKr+BNgjIrdvdj0U+OaGt3kcC8zXW6R9FMlq+33g98fdnlXt\neidwDVCTnmzPJq0xfQL4HvBxYFtzrACvb87ha8D5Y2rzA0hTka8CX27+PWoTtPs84LKm3V8H/qDZ\nf2vgC8DlwHuAotnfaV5f3rx/6zH3lQezYg2f6DY37ftK8+8bo3tuE/SRuwGXNH3kn4DTNrrNbQRP\nS0tLyxoY9zS8paWlZVPQimVLS0vLGmjFsqWlpWUNtGLZ0tLSsgZasWxpaWlZA61YtrS0tKyBVixb\nWlpa1kArli0tLS1r4P8CwO8l7ZO6YfwAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [] + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "WjMdSDKlBcPh", + "colab_type": "text" + }, + "source": [ + "## Transform\n", + "This operation transforms the given image on the basis of the transform vector given by the user. " + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "HTh1Qpps8Rg5", + "colab_type": "code", + "outputId": "3badc7c5-ae57-44a8-b619-14ed7196663d", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 269 + } + }, + "source": [ + "transform = tfa.image.transform(google_img, [0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0])\n", + "_ = plt.imshow(transform)" + ], + "execution_count": 8, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAUsAAAD8CAYAAAD+D4bnAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsvXm8JVdZ9/t91lpVtYcz9Dxk7ETC\nJELAEFAQAQEBFdTPCwIKyFVRXwS5rwM4vCoOiHIN4sWL4icIvAyCijIFjAYVRQaTKJCRdEIgSSed\nTk/nnD1U1VrruX9UndOnQ0iapE/vs7vX9/PZ59SuXXvvtWv41bPWegZRVRKJRCJx75hJNyCRSCSm\ngSSWiUQicQwksUwkEoljIIllIpFIHANJLBOJROIYSGKZSCQSx8CaiKWIPFNErheR3SLy2rX4jkQi\nkTiRyPH2sxQRC3wZeDpwK/CfwAtV9Zrj+kWJRCJxAlkLy/JCYLeq3qSqFfBXwHPX4HsSiUTihOHW\n4DNPB25Z9fxW4HH39oYtW7borl271qApicSpw3g8BkBVERFEBFVdeX711VdPuIXrlrtUdet9bbQW\nYnlMiMjLgZcDnHXWWVx++eWTakoiMfXEGLn++usBEBFCCKgqWZatCObDH/7wCbdy3fLVY9loLbrh\ntwFnrnp+RrvuKFT1bap6gapesHXrfYp6IpE4BlSVGCPnnHMOO3fupK5rrLXEGCfdtKlnLcTyP4Hz\nROQcEcmBFwAfXoPvSSQSdyPGyBlnnEEIAWMMO3bsoKoqsiybdNOmnuMulqrqgZ8D/gG4FviAqqbB\nkkTiBGCtZTwes3fvXvI8Z8uWLagq1tpJN23qWZMxS1W9BLhkLT47kUh8Y6qqIoRAp9NhaWmJPXv2\n0O/3GQ6Hk27a1DOxCZ5EInH8yfOcpaUlrLV476mqirIscS5d6g+UtAcTiZOIZXehuq4JIdDtdokx\nUpblpJs29SSxTCROIqy19Pt9TjvtNMbjMbfccgsiQqfTmXTTpp6USCOROInw3rNz506uueYabr/9\nds455xzG43GyLI8DybJMJE4iyrJcsSSzLGNhYYGiKEi1th44ybJMJE4inHPs3r2b4XBIr9cjhICI\nJKf040ASy0TiJGMwGLBlyxa63S5lWeK9x3s/6WZNPUksE4mTiKIoMMawfft2du/ezdatWwkhkOf5\npJs29SSxTCROIgaDAd1uFxFhx44dAKkbfpxIYplInEQURQHAl7/8Zbz33HTTTSuZhxIPjCSWicRJ\nhLUW5xznnXceS0tLnHfeeZRlmSzL40ASy0TiJEJEGI/HRyX+TUJ5fEhimUicRBhjUFWuvvpqtm3b\nxo033kiMMWUdOg4ksUwkTiLG4zGqyjnnnMOGDRvodrucf/75yXXoOJDEMpE4ifDeIyI457jtttuY\nn59nNBoxOzs76aZNPUksE4mTiJmZGUIIHDx4kPn5eVSVG264IeWzPA6k2PBE4iSiLEtOP/10VJWy\nLLHWkmVZGrM8DiSxTCROIjqdDgcOHGA4HJJlGc458jynrutJN23qSWKZSJxEeO+JMfLgBz+YPM9Z\nWFhgz549ybI8DqQxy0TiJCLGSIwRVeW6665bEclkWT5wklgmEicRqkoIga997WsYY9izZ8+K72Xi\ngZG64YnEScRyZUcRWemSxxhT1qHjQLIsE4mTCGMMVVWtWJIigrWWEMKEWzb9JMsykTgJMMbwsIc9\nbNLNOKlJlmUikUgcA0ksE4lE4hhIYplIJBLHQBLLRCKROAaSWCYSicQxkMQykUgkjoEklolEInEM\nJLFMJBKJYyCJZSKRSBwDSSwTiUTiGEhimUgkEsdAEstEIpE4BpJYJhKJxDFwn2IpIm8XkTtF5KpV\n6zaJyD+KyA3t/43tehGRPxGR3SLyRRF5zFo2PpFIJE4Ux2JZvgN45t3WvRa4TFXPAy5rnwM8Cziv\nfbwceOvxaWYikUhMlvsUS1X9FHDgbqufC7yzXX4n8IOr1r9LGz4LbBCRncersYlEIjEp7u+Y5XZV\nvb1dvgPY3i6fDtyyartb23Vfh4i8XEQuF5HL9+3bdz+bkUgkEieGBzzBo03++m+6GpKqvk1VL1DV\nC7Zu3fpAm5FIJBJryv0Vy73L3ev2/53t+tuAM1dtd0a7LpFIJKaa+yuWHwZe2i6/FPjQqvUvaWfF\nHw8cXtVdTyQSianlPguWicj7gCcDW0TkVuA3gTcAHxCRnwC+Cjy/3fwS4NnAbmAIvGwN2pxIJBIn\nnPsUS1V94Td46XvuYVsFXvFAG3Vq4tv/5m7/aUeEPeAIsvyqRzAQDRDANMPGqoKIbd4T24+RCIya\n7fCgFcgYWILBfsL+mzh45zUcvvM6Om4/6BijC/TYh5FIZmqsiYgGREFVKVFEaNpEFzV9xuUMxA4j\n6VPGnKy3hXO+9Smw4VzonA5xBijA9IAOxBw0w2vEuognYMgwGGIEK4B4wKCYlR8kCsiRfaY45Hgf\njkTibqRSuOuGexkRkeXXa6y2CihCwKAGrILiUQIqOUEjyBCxQxx188njfXDwJu68/p8JS1eRm69Q\n9A5CrMgkMq/Klp6AVzRCFAfFHDFWxKioatsMQRC6ZCCRQMDKgBiXsN0DCBlzVaDIlSiR4VV/R6CD\nyhzjUUalM2RbH8f2XY+F7Y8Gsw3nZ4BZMu+JbgB0wHTwRBqBb24EFWAoycRi1YG6Zt/I8l0hkVg7\nklhOAQoEDBbTWHOt/4GVGqUiSoFGi4sWCDi5A/xXGe+5kr03XIKJe3D5VzEyoCeQzQCRIxZa+78G\norVgHMY4Yr2EiGAMaIyoSLOpCDZaNII1EaOBIOCoQCqCo5HoKBAhMyXEO+n2LBoFGdxMfe17qa5x\nLIY5hnEXp33Lj9I593EYNgB9bOzjzUYEh1UgRpwpQbutQA4IUqN0gQLb/pREYq1IYjkFCBFHIJA1\nNpaA4LFkiGZY9oIZwHA3C7s/hr/jI3R1P10T2CEjpCMEH4gB0BzjMkKo0WgwVogSUXxjoYmC8Wj0\niCioAVVohRKabnhN1QqtIMYhddP9N8ahDCBGRCyZWIiGqJ7oA3meEzQQVbEyZkNesiHso7jlv6hu\n6rE0PI8q38WOb/s+3JnfAcyBzILMQOg2BqRGoI8VD9QoJUJGsi4Ta0kSy3XD0Re6akBVMaZdHw3W\njJrtNAOtweyBweUMP/vXDOormJm/FR2P6BcWUUeFBy2anqwtESOgSsQjTkECimkMNTWAw6hphgmJ\nBFu3Xdyvt9miiQDtGCbgBFQIqljtIwTUVGADkYDRDLTAE1BXgYJgidGhBkZ4QmcR07uSWX855XUf\nYvyFcynzR7DtcS+C+YeA2YIyB3TRAAYH4tqx03tsZiJx3EhiuU4REUSEGCOyPIERb4Hhv7Hn8xdT\nxK9Q6GFyA8aMmc3AVR3EO8QpKp4ogDYWoyGiAqgiGttvOXL4owFRTxRtur1EVAzNm9pJFWjFU0Ht\n8jtBAkIAqZs1aprXtQMqqASQCswSVsGOZsBAsEqQCBIRNRQhtuJrUAkU3evJ9HqqL15KCID0udOf\nwdnf9fOIfTzB7iKoI4cklIk1J4nlOqaxLCGMbsQuvo+br7iYbfkim6sRVgzBRkpfYk2Os0pdjejm\nfYKHYDyWshG5aDDqWD7coh5MCTSWZRRp9E9MK1QAse3uwhGrN660bWWiadXnLtNYpAEYISpYNUjI\nMWqIEhl1R8uT9+24qaICUcGqRRFULCGO6HRyRqNFnMnInOFscxOjf/o5xrKJzs7voHv+T1LrU3Ak\nvUysLUksTySxGQKsgAIPClGExnasIRYrky0giPkih7/4R5RfvYyZ/oidxQijEZMZQDAChbWINt1r\nZzPq4JsxTY2ABTWrOvitq43QvEYzBwPL3fDl5eWlrxdJ2u1W1tzDTLREi0pcEa8IGIkEiURpvJyW\nv0OONAigcY0SASK5yYh1oLBZ8zmhQmKkyD25LDG+ax8HL/00G067AHnELwGPwNNnAMyMB5hOnxrI\nKUEzVAyBI99rj7TuHg9XIrGaJJYnEvGIQoFDxS3PpWAM1DZS1oG8iOR8Ee78O6rPv5tep8TOL5HV\n5crHRIk0krsscqvE7BsJm6zeBlYEYlVUv+jd3n/MfP17VosvLLe5/eZ4X+L09b8BwGgkmLhiQkoY\n0LeB8o5LKfd8Gpl7CHMX/i7z4XyGRUVXO2i0YCNeDELEsizipv1sk0zSxDGRxPIE4gUExS47lEvj\nJykGMs3Jii8w/tIfc/DAZ5mx+8iLRXwNnU5AySbd/IkTjxI1Jc8dMUY01MzHO/GjRfZf9nw2dLfS\ne8K7qOhhrMfXXWIGuQIEkGYAAjHt+Gwicd+k/scJRVGEgGsiUiQifj/cdSP/8Z3fzh2v+R46hz5B\nL96MGwXGOMpeoKo6k274uqSqa0QCyJhDec5QHbPZfobVV3nje3+Ax775+3jbFe9hmO0np6YS8FKA\nFtiYYYBxsioTx0gSyxOIRXAYbAUyrGBpgY+/8Ee58fnP4/xihuqLO+CrHaJXDlNj8oq+n6Pn7/uz\nTwVU2ke7nFkDGnHWUoQxmMOIKGq38f76Qjpnb+Yvr/s4T3zT83j3jf/AmGplMn9cN139DmnnJo6N\nJJYnEFEHpYLczn/87s9y7XO/l28/sI8tnR4DGTKr27j2PTVdHkq/GKOVoaoXidKddNPXCavHMoU2\nlAeAohBMDQsV/P7egrobUSrKHvTPPo83fe49vOGyP2efWWRASacwRF8RQ7jHb0ok7k4SyxOJLyEc\n4kPnfwePuOKLbOyNKbuR8dgzdhX1eIFNS9u47v+9niw8kpAZYidjFA9PuuXrhNWuTO0EjRqMGsy4\ng5s7jX/tPZpLwjZiBr1xZHPpkXI/SMmld32G733j8/jM8EoWWaAygWFI/fDEsZHE8n6ixCMTye2C\nZ3XaB0/jKwQED9USB9/3p1z6pCfw1E2nIUOH7FP6pstQBzgtWPIRU8ywcfxo9r5/DzbUZFbodzrQ\n5OJpvkgieoomj5B2H+jdsjOV4tjtzuDNN4DpbmMu9qiZY1QHDmVjxCg2LNA5M+d/f+winvPn/5O9\nxtNzrvExJaJEaI/r6nmf+1UKIHHSIU1WtclywQUX6OWXXz7pZhwzjSukby8g08xsK006MTWNQ/ZI\nqK0j83u54qd+mk37DzJPIPo2yiVGRI5YNapCML6Jp9aSSoSdzxPirv8m5F20NhgzABWiyds2RKze\nP2efaUTFkIeS0mUEU2NrobB9hjZwaXgQf3DTDIdndrEtViy6QG0sPR9ALbUxZNETjBJzR267dIeG\n7cMub3zZ69lBl+gt1mUrN6WIudfb0bTZpKvPt8RRXKGqF9zXRqeeafJA0cZlcTmTohAJxHZlIA4P\nQJURMkemAz79xKdy1p5bmfVjfFUSQiDGeCTmu8ViseKJ1FSi5PT40vsPYfRCMlcSbUkgJ9omuYWo\nAalPGaFsiES6QMAqOOeolwz/nZ3G628URhu/hU1E8EuIWmwElQpDTRO02TjEO2cZhjH74gK3d5d4\n2ut/gE/V1zOolrCxuQktby/f4JE49Uhi+c3SXi3L4XWBJh1ZhQHJoDeL5kPsTf/FPzzm4Ty438VH\nkC6IBWubuJG7W/TBjhG1uKgQ5igrz+n2NP7zjVfD4hzOeTT2iSpgRgge1Lbd8VMDpzWltZho2zSW\ngs5Z/vT6DmHbYzBlSR33M+hZVAuMOirpUq2aIBOgGowYjQZ0ZgvuigtseOgZ/Mo7XsdvXPbHLJoB\nHkcTD990y1ceqT9+SpPE8v4gQFQqhBpHRxuH54E1mDpy8+t/ixtf+eOcv6lH3TWEDjAMmChNEt17\n6A5Vpomz7vgOORHrFHzgdHcOX3rPYVy1iSK/m5uLdlBz6szmmrxDbj0mBsxIGFvPtd1N3NZ/FGZY\n0ZclQq4MTBcXAanwxgCWIG1go0KOYc7lxNGYIssYhYDMw5eWruZJf/g8lhg38VGr70OqxFhP4mcn\n1glJLO8XHoyQYXC+tTiqkv6B27n0Wc8g+/RnOM326HbnEXXkAZwG7nV0Mc42Y57iMYxBPEEMzhfM\nH3wk4aptiF/EWQuxi9F8VQz3qcFwVFPWY6wIxuUsbHwQr74mZ+QdamoqGwn0KapZci3BDHGMKOII\niAQDEcGq4IKQBcHFSA5UEQY20nnQDM9564tZ5DC1FYZ12Q6zLAeznzqWfOJoklh+kyg0XW51iDc4\nG4FDcPV/cPP3PYvH6hIzFSwuBnwM5CFiY87ICvfmpdL3JYpjqQjUxuGCQ82IKIZu2WfPRzMYPJp6\nIKipiFJhpELiqRMGaU1GZuHOrM+NGx/Gj362z8EtF5LZBVBDIMMGi9OKYIdIzLChQEWxGoliqK2h\nFtMk1WgfEmpEQciofaTeLnz3xS/h6Rc9j2FWU6FNAo679QhOsXvVKU8Sy3vgyEXQTt4sr9Rm8D+P\nhlLgsIsQFrnzjX/Cdb/wGuZmDN70GbuSQT5i4D3BVpTZGCXn3kLxMw6DZtT0CCYiMWtm3O0AEOZk\nls9ffCvduV2YVbkjmxRn8YjFs3p52pAmN6au1Bxq08ZJExqaSRMfLhu28urPLDHqnUZ++HYG1RJF\niOTBIFLj3YjKWCT2IfYpTbYSAx4xxFYkozSF0IIacidoGCMeykWlXzj2n13xtF/5Ye7icOMBpkfv\n13tIidy0mSSkJyNJLO+BCK3vXU2gZjn/QrO6BvEUCvOLB7nsZS9h4cPv4XRbE20PtMJWMEOXjs2Q\n6Mi8w8W4Kunu1zMyfYSanq9xwRCch9jF1X1gTBnHnDU4lz1/fQuZOYsY+/gI0dQYAkYVQ8RoU7FH\nTWhEZqpQMLENaXS4oGgUxsYSiYwF9ts5/uiLngM7HgUuo7AWio2ULlJLBrHABYPEAvCIDDAaGzeg\nCC5GjDbHwiw/jMFHAZOBBdtRYiZ0fM74sV2+/80/xogaY3MUgxKbRBzAkQkg3+b/9MTVN9nEScO0\nXU0nBKtN7kUhI9eMkkjlmtRgEguoDdS386kX/QiP2LvATHcjhi4LYbQ2DVKDDwF0wPiahxP+q6Kw\nIIXFhS5RCyIZkZxIBtjW4pyuCYlIRq1dihCoLIjx5IxxsYLYpWfG/NPwwfxbfj6+UoauYGRmKOJo\nxVn9eLNt2GfunJ384Ft+ggEL1FREDKK6Ukhu+YE0kUWW5VyZiZOJJJb3gJcaS2hrLRg6BAw1Q3xb\npKvmY0/6bi7Qw9jyIN2YccAHtrq1i+Euen2Mydgmm7jxwxvh8GZC6VGxRyeYwKGtY5PodHXJjRpc\nyCitYLVimMGSM3S9oaBiqX8ab7u1zyj0qbtdAsVKvkuzRv3eBY2MFpdY2DjgGRf9KHdwiEEYYZeH\nCY60vhHMlnvrRSSmkySW94CloMQSTJPNfFBZLAW9egyHb+OTT34c5+c5vqqpO5aBK+lYwZfjNWqR\nIZYBCJT1mC1hA7f9ySa6u3fi8zExlFhX4RkRZdSUetCm+Ng0YahwsgDqmnrnugEXewxdzY3ZJl5w\n5Tbob8LZg8yVB3GtK09tCoKsTWpW24loruTGUp/peOHfvJpf+cc/YFFGTXZ6BRvAxsaaPCKY07Xv\nE/dNOqL3gHgQDAYliCfPDBIqGBzmc095FhfOGGw+w2DJYkNGNJ4yq4hrNUYoHiMj0IzKBVSUDapc\n/YEBnTBHx2b4WskcWLc8htb4FzJlghkEaldjQ0Y+WkDiAMlnefMdW9gzey5jq3j6lCanGwcUwWM0\nNE76a4CLgSJEBlkjxoP6ILurr/Hy9/xvlmRA0DF+sNjscl227lP29ZOR6bqSThQCeYQRgkXJwggO\n3cEnvuvpPHTbDEM/pktGJ28SXAC4APWaTqgY0AJhjDclgwCbi9OJV+4E3UbmM4jgq9Xvma6uYBRQ\nLDYU1K4mdgtyzXnH0llcac4ls5FARm0MQztDFsC7cnUJn+OOVagMZKEtCSwZg2rIzYOvsB/P0Ch1\nrsRyuHaNSKwLkljeA2o9mEgXR4nA4QP82/c+h28/bSMDxhSdTSxpSVUErHpmS0MectZsd6ojxhlE\nRmSxsb78xjmwPW744EGqfQ4T5nHRkVlpxykbN5xpG7PMQ9MdH5k+Tkuu2PwdfHjvDoYRggiFDppR\n2RgZuwwXmpDGuEYVUkY2QwWM1BiFgh7DKlBuCLzw9f8XCwS0U2D6nWRNnuScumK5nAxDW68gBbRN\nu6Y1flghHgpqrvqR5/OdWztUo0NkY0tVVWQCEhqXlCAGldiWh10jpKY5XA4nDjMcY8rIlvxMrvmL\nA9DZgVAQbYEnNHXDNVt33XAVyGJs/R4NWWjisMd5TTQ1JYZSc3Rk2LflPH7nygMcyHaSS97MrWmO\nixGnTRIRbyxZ9Gs2G241YqIhtkMaUSLWNoM02YOFp/3RD7GfceNg5iNOPeAZx5SB/WRjfV1JJ5DK\neFybLchqE2ZYSkQUAl2qIkB9C599zHeys2vYOxyRZ33qfg40zuDLEXDLccdr1+1dTuTQJA8TbSYU\nRJv64Gd2Hsq1v70HsoeSa0EeQTRfl36WKpEgjtLkLBWeMh+RRU+v7GNClyp48npEZ9byPy7fxt7Z\nc1FzkGZAsBUtYPUeb/wm16a9K5+7ctNpjoWoIeZdzjx3By/4wCt5y+X/hwP+IOMgVCp0zPrb94kH\nxil8RA0QCASQJpytoInMqYFeOeRvLnwC586XmIVFutkcVRRG48GE2300ooZqybM1P4+r3ng9MIeL\nWZORR+p1l5VIFCIOGy1Ow0oEYcTiTaTbFfK5Lh/cN8fS7FmMXYcyy9DW6X49US7U7PcjbF7xNzf9\nI59auArjLHHs8VWK4TnZOGXFMleDRwhAhTS+ibWhItKt93PZc1/Ik7ZuZ1gPGWYwip6uQs+sL3fj\npmNuqXxkfnQuX33vEmgHI2UjlOss20aTJzKSx5rZEnIPlcnxdkR0JSM1XCcP46LD52HqRWwYEcjw\n6yBJ9d2pTU2oPaPxgEVd5LUffRORAd1OgeSnTsz+qcIpK5bNQKXBYFEctvW2md2/hxt//VfZ5Q9j\ngzBfbKXKHcbWjIua2Xp9jeJHQGzEmhqbdeHGs6F7OqbXWZ+T4WoIxhNtCQq1yamzjK7UuAHctfmR\n/PpVHWp3GrlEOlHoxiEinTWbxLm/xAJ64rB0sdaRzcPv/NObqaQk0GTDB6iq6t4/KDEVnLpiacFp\nk9piOZEvtoT/+Dzm8s+yIVPEZETbZ6aC0gVMhNKts10mkSiBKAHxho6b5/p37oZBRuFzbFxfljBA\nMAFvIqXLGNgMiSVUhnrHQ/n9zy5xa/8M5rMlxrbARtrclOtP+fOo1DauFE7r1ZF/uOU/+YfBldSU\nGGMIIZDn+aSbmjgOrLMr/8RRSgQNEJsZ8NIEuHM/X3nT65hVwduMwzLGaYkChc/IQgeJ68u6QQ3R\n5JjYJQslrjrE5lsfypUXDRB7BmGdTfJEA3mM5KXF+S5FHNOrSq7f8GB+6IrIF7c/CZU7WapLIobK\nZKBdvIFo1pdgOt+4LNWupBsCIWQUmzfyOx96Iy9418uBJjN+XU9XjH7inllfV9IJxQARTImLUCyN\n+fizn8asdxjpoD7QtRYfA2CwEeJKVcX1hMFGh5GAkYhBiCVsl/O4+X01uZkH28FYJURHDA6hRlWp\nRagweHVUwYCxED2iAUNENCCq0MabK66NQ48EamI7gSTSZoBXD9qs99RE8URfY4JgQpNbkpEwECFk\nisQRmfaIHfh/rl1iccMjGVeLGDND1zgcI4KtGWeesM6EEkClcVkSFYII4oRyvIRYw758wF/c/AFK\nhgQyIEIcgdaUk2544n5xn2aSiJwJvAvYTuON+DZVfbOIbALeD+wCbgaer6oHpamZ8Gbg2cAQ+HFV\nvXJtmn//sdB0n4ZAtsClP/o8HrVlE93MMRwOmzFMWHHVWX4W19eQZRObSQTNCVo0juh2TDcWyA1C\nfc0Q96BDLIlSdBxmXOK0Ty0R9SO6hSWqw/tIjIpkGd4rtfbZFzaBdnB2I53uZpSscZAXIYwdGkuo\nBxAPU8gCmb0TJ2N6cYnCGqooROcYE8hyj4YKLzAXhUoNuBynQ3Zv/XauunUrA1Fs9AQyotG2UiZk\nATqx8WVdT5IZZdll6cg8vWjTRiPwlo+8g6e/8hmclvWa32IMSspINK0cS5/SA7+gqleKyCxwhYj8\nI/DjwGWq+gYReS3wWuA1wLOA89rH44C3tv/XFeNygRlmwOUs/N07uLAY4Uq4bWEvs0UfF6bD6FbA\nW8UEg1GHKBhrKceBmc4WbnnnbZz7Sw+nf+Y1jBcX6VjLoah0+zW9BUMY9dk/mEFmzqO3/XxmHvIs\n6O6iS585tRBMk+cR2mJty+UxtF2hoG02JkZAgIXbGOy5mkN7r0QXP8VcdoBeHqirin5vhmEZUTek\nE4TSbuJ3/qOEYpZiEKDT+lOKIUoTbpiFJg9lZZmqKJmZHfM88Tefy02v+yeCZHgKAAqNbTq3xDRx\nn2KpqrcDt7fLiyJyLXA68Fzgye1m7wT+hUYsnwu8S5vyhZ8VkQ0isrP9nHXDTJE1A2hLB7nhTy5i\n12yBWzJ0Z/qNQEwJUQy1OLpUZIyJYhhoRlbkDKrD9OZ28vk3f5ELf7Ui61q82UxYVA6MHsX2x/4s\nbtND2e5mgQxMDuREcoQ2YsY01RSa+OtI40YQV8232BUBazI8RmRuK925b6X/kOcDSxD2wm2Xc+jL\nn8LfdS0b5q/HYBnnkT9dfBA397ZSmv0YN0tRNeU3VJrwRyVS2eYh6ywa6d6IwJiand96Gkss0mMD\nYJoLTkMSyynkm5qtEJFdwKOBzwHbVwngHTTddGiE9JZVb7u1XXeUWIrIy4GXA5x11lnfZLOPA7EL\nh+/gqmc+m/O2bWQw9pjMUPi1iwZZC2yEviplNSLrOMJ4yIZinqXBgHy2izpL5s9k760vZvtPPQc4\nj9l4Js7UCKHJkENGcyos18tuL2QZIOIAi7TlYZuEHjA2HsEggG3fsbzfVLImbFRMM0LgHkR99hOY\nOfsVWKCIB8Fczfs+/wn+9qqrCS6Qz29iaVBCzzXBMsFTtNnpKxuobCQLGSZOj8iEuiKaMd/9lhdz\n2c+9lw3M4wKpHz6lHLNYisiX46p1AAAgAElEQVQM8LfAq1V1YXU5V1VVkW9OYlT1bcDbAC644IIT\nL08a+cjTv4unzG3mwNiTAaU1axvfvQYYIjoakXf6jNWgheNQLAnzm/jKgZIn/PJL2PqMZzDun96Y\nO+USeW4pseQsIcsCuJq2h6302xVt2YTWqkSgo02iY5UjWyxnZreaYWkmQBoj3ZBhyKNre+7zwPl8\n/4WP50kXRg4w5CV/+DNs3GYI2mccPYGKAk+boB7RdeaFcAzM9WYZ1wOKXTM8+3Uv4FO/+deo7WBC\nILPJaX3aOKbbtIhkNEL5HlX9YLt6r4jsbF/fCdzZrr8NOHPV289o160vBnt52pm7OOyXcGWNU49l\n+pyHo4D0uwzwaNEhMsNet5Vz/uztPOHfP0H1Qy+i7G+hoxFkETrN+wQIZARsWxaB9hEI4gncfTJl\n2QItgO6RAm5tETcDKBmKXRFQUYeLBqcRoZnRLiU23f16Az06nE7OQ9jCZ375nbzrx/+Msw7PYG8b\nkWtOaQ2VNSgWE9dfUpB7x3DwwCI+QChHdB6xiT/+14tZYISznUk3LnE/uM+zr53dvhi4VlUvWvXS\nh4GXtssvBT60av1LpOHxwOGJjVcq6EpBqbhccwz8iMt+7H9wqKrI8xwnvrmoY2izi5/4i1Lbqox3\n//bl50bNSvmII+tju9ynR4cv3PgVzvjFn+exl3yQ+swzwc6RR0MRCrwYSplFZQaMJ481jgKrDqvN\nRIppI5rMylJsXGNoYtCPPMAbTzAebWfjhYjTiIsGoSZI3YiuoU3o0dSmKTAQl5rCYNpk5slj8xvO\nZo6Lf/IP+MDP/xln+o1ILCiNZWQd3qwMDhANR0ppyHr1f4v05vtEhHpcMgoD/ur6yxhTUlXN79bl\nP8rRVUQT65JjOc+eALwYeKqI/Hf7eDbwBuDpInID8LT2OcAlwE3AbuAvgP95/Jt9DLQnYU1ACSh1\nYzfGANddwRmLY9x41LrC5HgxeMmIy7kgTzDL3yttBp0jB6atRhgMah1BINQl9XiAiEKW85nbbmfb\nK1/Fs678L/je5+DtLBlt1EhbsdDR2IQCIO5uM9zNo/lnVsYiV0n1Udsh4HBYXDuW2Y5nimkbnmHJ\nsDhs+3msvg3YmWZR2vcaaLKB9uiELuewhb960Zt41YUvYeHaA8znXUK9hNYlNhdG1QhvI8GsR7/X\nBpWIj2OstfRNTm4UNy+86KKfpsxr6lA1N+8AaKAJkPRJLNcxousgQcEFF1ygl19++XH9zKbyHjg8\nAbDe4VnCHb6LL/zQj3B6blANqCrOOUII9/GJa4vVpgRsxDTWGKwk8RVtLKjgM2wGSoUpDdf5jCdf\ncgnMFVQI0EE8ZGLARioi2YpYTQHtqTikcXTvao1KzV3UvOETf8Glt36a2bmckgqtK2bpNFnTXY5T\nv66KhBkiQsBL4y4UtGQcambiLPVXlvjML78fKfoIQuEDiBBsY92vlXfU6nmGxFFcoaoX3NdGU3IV\nffNIk/QLxTU/UsBFzz8+4+nssLKS5ADA+/WQqLWxvFYLpY20SXIhGKXrAjMjGC0U5C96MU/+5CUw\nMwdlnzz2cNGQ2SN5F6fy4NaRHoYMoZKcSJctzPB7T38F//yTf8HC3iHW9un25xg7JZjY1t9ZP0K5\nTOP6BIHGo6AwjhFLdB42R11YHIHK10Rrm7EF1uOvSCwzldfTsSLtiKWGQGmBSz7J+Ztnm4QZbe1U\nEVkXd1yVJonwip1feayPZMFQ4OioIRtmXOHhWy79EJte/tNU+VzT9W3nPryBSiLaplBy6qbHqqTt\nuuYRNOJ8M50U2iR0HXpsCDN88pXvxtxSsziCUmFhOGBUHlrpJawXYns8lyO+HI4ejm7uGMuQf1/8\nEkLNrLN4AVzjUbTecnYmjjA9V9L9IUZsAGMziuoQn/qt38bXJaFw6y5sMdIW7DLNZEkhlo4tGAwr\n6qAMRp6l5z2PJ//LZejsBgKOXJtxQm88NR6oMe1UgWLWtJDXWtA4KCmYZrJLYiOYNVBZcNJlMzN8\n5NV/zo+c8zRG+wMbNm4lt+vwNFaDN7YpS9EKoI3NZJYQeeUbX8OAEggI2ggmUxWgdMqxDs+y44MC\nwViIChEOv/VP2bnJ0cvnqUfrz0XILE/kaGP0hhCoRdD5WfY4x42n7+K0V76CoekgFE33XCJok+Yi\nV8iDxUWLbS/PwHTNF1g1dLAohmCB9saR0ea1N2BxzIxyXvW45/H3r/j/OHz9fmZlFoddSeixHixM\nbcOf8gCCby1N17pBOU575BlcfMUHGePJVJYDRxPrmKkXy298ghlqYJwpjGoOfPyDOD9kWBs6PltJ\nlHEiMXrE/UUlHuUKJGpaFx0aq8pZFnod9uzYxvnvez9PfM/fg1p6bnlMs0lq4SWyEoFjDGoan0kL\nzaTHtHXromlrti9HC5nGLYmIaIUHJOswK/OcPZjlM7/8Hnr7+0jM27Hp5d/rUGmt6wnOmDdO9YHa\nRkYO6jYialAf5p1fuoQr482gNTY2/YKVekuJdcdUi2UTiXy0v9ryogCFCh1GcOPVzPlI1p1DXYVM\nKBfrsp+iSuMqFIwnSsSowWU9Qu3IgqMcjZFeQf7EJ/G4P38345kzyI2AbVxtVrpqwvL01VHuPyvR\ndLIcojglNLGWK4tHFhrhNOQ4hY5rfmG3P8+cbOITr3gHT9z+aGwpxBjweBRLIGtvRife06HpITQC\niVoK3yQDgYiJYPMOthv4mbf8L5bMQhsxBcm+XL9M0ZX09SxfW/c0ztMIpoW65r0vfSFRC6x6suCO\ndGFPMN5GBlkz/pYHQ682TR1w4xmZElvUzGwo2NDfzOf2Dtjx6leBK+jkU32Yji93O9ghBnzwvO6p\nP8vLvufFZFWXuq4QWaIXRrhg8NKdTFvvBdFIpyjYfvpOrmUfsW6STGs9WRe2xDdm6q9C+QZPIqBV\nDdd9mSdtnEHJVkoUSFsG4ERjtY3S0UYsbTTUBrwNxHKRaD37Dh7m0+MRz/63T4LZBM6tW8frE87d\nhLL2NdZYiJEePV52+vfyKz/8auQwYErUjJsNdf3FYVdVxXg84uDoED9z0a9R502RX3H5lIV1njpM\n/1FZ7rXc7UIygEjkoz/zcjY7bSNkHN74lddPNC56Oj7S8U0LRs4wds0seN/lqC+4cW4Lz/zkx6k6\n82Cb9BVTN+54gshcRojLLkMGFwt+oP8YLvrp38eGgtIK3npcXH/WWq/bxWukDBVs7XAzB3FENJKm\nxNcp0y+WK7kUObIQIuJr6o/+LU+Zm2UgBkNJ1KIpUTChk3GYmTZXY2SYRbyJzGiHzoJy+7jHjov/\nku/+m48Rys3k6ghSY6k5GQ7T8UDv9hDAGUsnL5AwJphIkIJnuPN5w/f9JnHQw4khmOFE231PhCog\neUanm+OywDP/8GUEypS+bR0z9Vfhqnkd6rpGYwQfYLDEZa9/HaNxRcS1/m6+SfUlk4nYCW1CidpE\nvPWYqkIXxuTbTuchv/XrsH0XZVVju81hsc1Ubhryvy8UsHkz4SegCt8182284Yd/Ca1yZm0x6RZ+\nHRGDCwajEE3Fhl2bWWRMmglfv0y9WMKR0yvLMkIIiMvgjjt43EyHBZsh49BMFsuI3BcYlrvlJ5Zu\nbejVYFqJz6MyNzPHFYN98KSngOtT9DPGUlIKoG79VZNcp9Q02ZA6eMR4LBnfOfut/Nj5z2KxWn+3\nmyitD6ZCFMVJzZ64Hx/WQ+ht4p6YerEUBUuNEAlaIRaox1z2Cz9FFEe/9fLWNjOOUBOxExlE12Dx\nviLmdZO4ozfLlw4c5Cnv/xDBFW0XLCMja7Iyr7gDTf1hOi7cLfHRUS9kgJUj2Y1qjXTp8hMPfwGP\nGZyDyTr4UGJdkzJOyamtIdPJjGdmWjJyhto4jArGKq/6k9+gso17UUVEqUE9niPucT5ZnhNj6q/C\nsGwhtqF/1mRQl2w6dIhoLBIqMG3AmbZOvxOabbQW/EyXzGdkalik4IK3Xwz9uVUHoskoaeEeVCHx\nDVkxHg2KwUnjVJar5Y9+8repv7wfi2FUDnGxSe3W8Y0b1yQwGgmmsTBthKoas+DGDO8hKUiSx/XB\nVItl05kNjWuIgMbmill817s4vdNbFwkyVlMyovKBaix0zCz28Y+HhzwUin6a8X6g3INbkSAYMXSx\nfPS176Cbb6DXmUElUgSP1clN9kVpxiuNRgxKHSrMfMb/+dz7qZqiw4ha7l75ZX2d0acWUy2WEXAa\n8W30g4uwUB3mjvf8JWggxogx5qh0bBPFwsYso9SCDT//Snb82i+jRY+q8mkS5zgiNG5FjW2p+HHN\nzriRlz7mhwjjHG8MlprKLs+pn3gUwcUmuzxAnucsmCHvvuIjHGbU3DzDkWQo0oZB2um+ZKeaqd7z\npv1rabsq1jBnhbksIsasJFYwZp38zFIZDBc5uGMLPPOp1Pk8opE81zQueZyRlf9Ct9MlRMcPn/Nd\nuP2B2e48C0RCXRLNZMQythNSTZk/QaMl62TIxpx/vu0zjKmhasYrLU3aumRWTpapvkJFIUiGaERb\n922+cgv5hi5xwpnP74loLKodHv3Ot1K5TRSASk0alVobVg/5VjGwSeb54Cvfym3X3MLs7AaczSd2\nARxJnxcJAqKCC4pxyu+9/SLGVNBxjVdU845VfxOTYKrFciX5gDQ1sKk9//Ubv81dd+7DrsP60kWR\n8QUR6G4nj6btWi1XTUysFQJ08wzFsZ2C333x/8LfNURmuuT1pOSnOT+XS4mIOkw0CIrZ2mFMABNb\ncWy64MuJYxKTYf0pyjdJU3ahHcsJY5Z2f5lt/U00PkQnniaO26DSJH1tMqAbnHoOB+EZH/wA1Fm7\n5yOiGaH1vEysASs7NrZlezOeu/PxZIvCYOwxPhxVIXJ19cy1xtA60a/6TlWQvuPSG/6lKUu8/IqY\nlVtrYjJM/b4XiVBJM3/4rx/kQZs6xAAxn8AAT1vO1htw0YAEUIfNcnxuOfd3fh9mtuCzmibHTDPT\naZO9sHasykDeiJKjwxx/+8p3YG8cMnBKELBecUFXZsdthGjimmWbF40r7myiYAyoCKihUwi/8e4/\n4jBLTY1ONXiypphZuqtOjKkWy8aoNGiWEeOQf3/L23ERxtSEqjzh7TFqsNHhbYUNGREhN4Idjfj0\nyMOjHkmwpi3+sPpHpLH7NaUt9VnRJCZBDZvIeNdr3oyzfVxb+kGImBgwND6QTXjqiacsS04772xq\nmtupSjNuuVzlMzEZplosAahgKAFbLdG7/RDGe9QZzCR64W3qtywqlQUbHeNygUEd+MG/+jvozRBc\ngWDJV5s8iRNEY8H7qJhhzbaywCw2Resqq3ijK5H4zZYTCl7o5JQ5vP8/PwStBSoASlthKTEJpl8s\nHQgO7trPGXNzK92UYE58fyVKJArk3hJsSRCHscr1eQFzmwlkWEwTmgdHZXZPvas1xhhyIg4QK5i8\nx85iG/Gri5Q+MDJKnRtY5UpkIxPp9oYQKKXivf/09yDhqHMjSeXkmGqxXI56yQnseetbsOLByERn\nwlUiiiPTmiCwqBlP+8uLIe9glZXHykWYLMu1py1LKwrE2OTAdAawvP1//zGzpkdQYakqKYNfmXRx\nE1ImYwxGI8MZpR04OFJLPsnlxJhqsVxOLuCWItf/y8cxeCoLog43qTg22to6GHIdkT/tubDrnKZ+\nTjsBtPxohDUeKVSWWDMMoKJgArk0t9kl4BF2B4/pn8XmOEvP9ujapkCTylElgU4oIXg6AmZ7hyF1\ns44mAYhNfZCJMdViCW0R+/EShURUlYBidNml6MTSFCJrvjuIIETOfMWrgKzNfNSMOSmRwKouVSoj\nsOaINhW6A9LWHIlYLI6MX/3+/xs9XDGb9ZDYHCdRJubXmGVZ46BeWIZ4HDHZk+uAqb5KRQ3RAdd/\nlrM6c6h0iFawhAkl0YhoqDFVQabKPteB7gZULTFUbeVFh+DazEJN2deUXegEIOBwWBxYgxVDFwEt\nmGUOUzqcifhQomIovMEbM5HZ50oFDQp1zeXcBH6EQdva4ulEmRRTLZYAop5rP/C3SOuD1vjHgU7o\np2VFgeZCdDk3LRwG9U19cJuS+K5LBBwZv/ezr2HfgQUym3Okdvdk7DkRQaxBQ83ff+wjK4mHVJUQ\nUzd8Uky9WFop2X/Nl7FZjlUli5HQzkpPglE5poqeYAqe+Yu/RLSCj4K2eRYT64sm9tpwfn4W9f4R\ncXlIZHlseRJtUmmSv0Tl+pu/zBjFooQQ1k9SmFOQ6d7zAowPsbmsqKtmhMnFpj63MglLzpDhkGrM\njQtj+L7vR/IMY8Cl6It1i6ohrwy/8ZxXMHYdXIiUJiIaJ3LMLEJQxUhkScYcxKPqca4tBZqYCNMt\nlgCLA3oGnMkIpuk6CTqRk1wUrBo6czNsftRjIJ85MhypaYh+PSKAEyjyHs867zsZ7R80qf0kTmx0\nUFVW4tWll7GXBUR1faUbPAWZ/j3/hS/Q7VgMBr/cg9KImcAMs1GDRuH2csBDX/OLYPpAM446yW5d\n4r6JEebosnFkiUYQseikrLh2XFIUTDfjS3ftPpLAOp1CE2PqxfKOj34MHyrqKuBN474zgeCdFYxx\nHHAe5vsQQIkQwTcFMBLrDAXqGDAiCI5nfdsTUCMTHTIRkTbLfxMV9u+f/yzWLSeznly7TnWmXCwj\nX7t6N5lGcqkwGqmMwYUCpDrhrQkmEvOAjXMwvx1vgWXXIKZ+Z5+0WGNBLYEOP/89P42pa4hjQjTo\nJJJpSCQaS1NjfswXvvallTR+YpNpOSmm+/qNESlrRAPOCEaXXYbMROqCi0Idlcf92q9RYRDaXpNZ\nznadWM8IjS9mJ+s0lqad1OWxnLnSgHgOLh2kbPvfqX8yOe7zbBCRjoh8XkS+ICJXi8jr2vXniMjn\nRGS3iLxfRPJ2fdE+392+vmvNWh+V3ApWHCKWKE328bAqoeqJxAALZQ2Pejg5R/JUBkl+5+sZA2Ai\nluZmWw5rLHZi1UFlRRgbA6DTySmJRPVIOosmxrHcOkvgqar6KOB84Jki8njgD4A3qeqDgIPAT7Tb\n/wRwsF3/pna7tSFGcg2wPCDfrFxJ1X+iEYVSM+h321i5tnRA8+oJb0/ivjlyVGJbDVIolypU7ATL\nEy+fyQ4wZLnlIEuYCeXXTDTc597XhqX2adY+FHgq8Dft+ncCP9guP7d9Tvv698ha3aJjpFM44v/f\n3rlHS3aVBf737b3Pqar76O50p0nSHZIQEwgRyMMYQBB5BDQoAoKKMMi4UAcHGVi4UFg6jKxxXIMz\n8lKRBBJ5OiRGGDIRBtCAqDDEBAJBAiRAIAmE7qT79u17b9Wps/f+5o+9q/p26KQ7j3tPVd/zW6u6\nzqNu11fn7P2dvb/9PWIkeEVNbLZAg1j2i4HObM67lnw/WzU52ay+Pw6DLkfEOFQVacDla7RAqdk3\nt9NxDMdGnVZhNsURXXkRsSJyPbAL+CTwTWBBVX3+yG3Azry9E7gVIJ/fB2x7MIUeoymzdRCw1hEx\n2Ny4m3AdCgY2n/xQIh1QD1YwtGm1pgMlAiWGHVt3UAdQbcg+OLa3j4qaRW4a3EKb9bRZjkijqGpQ\n1bOBE4HzgTMe6BeLyG+KyLUicu3u3bvv33+iSqdrUSOM3NBSLeZmIi+8MTzml36JGAqwqYyBoBDJ\nq5ntqGCSCTkC7Bcv/AWCKE2t7xwYzSYXJmPhuhtvaGcoDXOfmoOqLgCfAh4PbBGRUUzhicDteft2\n4KEA+fxm4K5D/F8Xq+p5qnre9u3b7+VLD7ytfkUUQsAEwdeKlyFlSHXEdR0WeEbfoZIN7xpYDhHO\neiyO/gHFqBaEVGxqbUU6tJykXIieUc2iVa+7X9T1eI1JQoxk80QCnvVGyTXnY5FmAOI564TH0Itl\nSozWgFeFlwIXPUKNUUsohVt230RbMrlZDhtALSLbgVpVF0SkBzydtGjzKeD5wAeBlwAfyX9yZd7/\nXD5/teqD50p7wKkieZ9LVJw40IhoTtfa0CN4GBVm5kA8aIlNObUaNTOJ+lQmWAAimJRJU3Ept2MD\nWEip6dRgR8oo1y9q4lpFwI7K4Yqyhc2ElQqZ14ZmvgZDyIaBFNCwUu1PsrTDy8Y4kmwTJwDvEZHk\nJQuXq+pVIvJV4IMi8kfAF4FL8ucvAd4nIjcDe4AXPJgCG0DR5EKRw8JGGVqaYPQcUFXq4LMmd4w9\nKxuuS6aSUg0r6fbJyH+PkdJaZ3nyP6O7ldagYyp9I83qAsn/Fhj6/Qo3ZxuTZ3Wkpaqyb9++hiRp\nGXFYZamqXwbOOcTxb5Hsl3c/PgB+8UGRDn6o92haI8xxaj45okvqZQ/iAPaIUT2QjrWKEZxFMROz\npCNaHHCIH40uiaTJ+fpP6wRAKnKaiDTq1TTybeJxNyodkcpIJMVdAt57JDRTJVTzjCRmAVWVpaWl\ncamLlmaYuoy0Y6dcVRgMiD6tWEZ07ES8nos7IgKavjtqBCtUQGdkMJA4Uu+NoAKhrnCFJURHyFlr\nSgxQN6CgLDUFSoWidJhJJR+kRvE4eusukWAYAmXe7yKE2tMrZhtLXHHQfTHCUn+ZALjWfagxplBZ\ncqAl1R4DBFXENP/MjQLYgrSKaZJCb7hdDxhSFOBjH2t6ydHZk1ybhLHyXC9MFEoRKgqcdel+GqgR\nIkVjDXJkHkiZ0yHGSIwxjXwbRkQIISS33eab+YZl6pTlQeT8ft5PRhiYZqObweTel2SKNBcb7mqL\nc4L6DlWZKhrWDrq4HN63zph0PWocUsNWB5aKLp3Vl2zd0fF7qqDofUjKckLyR6oqDUVftmSmQln+\nUIdWBREQGXsvToK7bhr1SiqKpZpXns3Ya6aJtm6tEDwMSnjDJV/jmh9sYoktGOngYgNO1+KJzlDE\ngrj3Zq544yPYToFTj6hrfOSU1pki3vs0kmuw8qZKKso3aj0phkcmYKy7MZkKZbm6/6hqWjUVoDSE\nOMQYiw8Rk6vxaXZHkXVo6KImLVUoQIAYsAGwSqDC+lmiGyXVWP9mrsaM3WK6RUl0W9kShilxu2lC\npkiIEcFT203MkBuhmBSJtc7SAOMnWVpASSVyxdnGLM3JKd2iEkA7CENMdESNmHZ42RhToSzvzjjU\nvOgQjckDzQOLO+udcSitiAtOBEyk1pET+mj5Mh6wta6zbKtVoSipLnaUlAVcfshTfB04kAX87t4L\njYyY8gM1/Zs89SuUbqfTWNahAxz4/k1z8xgxE+RnsfGY7hF9URAixCZrk5iUm0YFjCiEIdHkCCNW\nO1lP96U+2kmOTEmVD5isKoqi0O122xbUMFM3sjzoae8sah0mKtWwT1mOnD/Wt1lZa1MGJGdhOEBm\nITmk2Fx4anLcPVZfv7S93qOnpJKaH7VlDhE0MASKYnK6hjGGsiiaNudueCajBz8AxDoCSlGsn4P1\n3S9ayp8JhZCyt3PAUT2Opk2TsALVcgiSk75ZtRewdDqd9XXYvQeEZDqZn59vWpQNz3QrSzWIsyCC\nNDRtiqsih0prYWFhZK2EKDm+V9tU6ROMMtKLihIZAr1Ot8FQgoNRVbZu3tJWdmyY6VaWxjDwNa4s\nMObAtCk1/PX5aaninqICPWMYfuyj+KpmdSpEaSqWj4P7l0p2nJ+Asryj2Oem+3/KYBVHcZiA5bo7\nv4CWijSVmVxGmdJT+CWq7Dh+Z2rSTZXnbZlyZSmCKwIrdQV1blhELB4b1+6nRUZ5M8EEzekpBOsH\n3Pavn6XjCqwlZUVCseqycmpaNUwWTdRJOhSppXgUh1PHdd+6hv12gMZmQgmseiKGIB0KrTCV4dRt\np6ZUcpNi692ATLmyNAyGQzREhsMDpW9DY+3J8K2bboYwXsagnXtPOgd3ARW47vrrMMq62sHvjU5Z\ncvpxp66yhLc0wXQrS2NYqQIFBudG03CTlWUzo7gSoFoBTQltR+Rkco3I1HLvrC5uF4jsWriTsiwP\negCvLym4wmRb+LCKHMex43DalmaY7t4rhmBLrIId1QBQgzZka1JgtixgZR9En52H3LjaZPNrqy2H\nYpTVPq2ER6Tr8HVFcymHUqEywRMF+v0B3YPW61uaYLqVpRX2R4MGiOKzDcyMw9bWH8OO2XmqD1wC\ndc04a0SmiUHB3SN4zCjeucGYZzOJTw0FpWZAjdvcwcSAKwua6CKj4hqiNRhhZX/FfC6LO4mXbqMw\n3crSGNzMPN2iHC8WjKo6NqEsRSH2+3z5kx8bCbhKWbYjgokmh6wCdOd7GNGxl8P6M+qWaa3e2gIX\nNSdkadVlU0y1slQMZ551DiLCMBwodjUeQTWAjeCW9oMecEYfZeNumWCyfXDAgCoMUVVCaKgUbkZI\nnkI7jjuBYjwIaI2WTTHVfViATRc8gwUTKV1y/3ZBEGrCOO/1+qECVT3ghN4sLC6kZm0jFTEnA153\nkVqOiAjWoAT+8ebPUYjBmw5EaSaIRyzOD9FC6Pg5HnnKmRhVtK6bKjXVwpQrSwBOfRgBiD6gYoij\nHJINPIBFYX5uDmvgK6//z9joUZPTjrWNfGJRlBADniF/ffWHc0SW5Pj1ZmzfTgzBewhw/o+eC9ZR\nuCLVm2pphOlXlsduY/9yRS8WoI6qqDCxk8rRrjMG0GGkX61Qfe1LsPA9AganJuWObKCdT2IEz6QF\noWg0WGOoCNwod1Gpx0VFTEPTcA14K5QBBv2an9zyqFUJPybs4m0gplxZRuh1qVwXoUgLLCaCWg6s\nKa6rNCn5sCqbCg9Xfignth0NLNtFnkkkxshKf4nP7voS7phuOqhKbGiBxxhDMKABlvf12USZ3JtU\nCQ2065bElCtLg5qS488+l9paTG5I65Eh/Z6oDRhn2dIp+adL3wkhVVAMtDPxScU5Q+wJb7/qfVjx\n2FEq/oaK4GmIqCpF2WFTOcMMjEupTEpyj43IlCvLNFY7+Zd/mRUrCAGJFiQ0ojBVQCyYaJBhZHuv\nB4N9xNoTNdlSWyaLFP3DIT0AAB1sSURBVJAqLAHfWvo+neDTEZXGzAUqEaMpkOHMkx9BhwK8EmWS\nKtJvPKa792qubXPGGdw16GNkiAsl2oC9MhEJ9ZBZU+CDsJkO+AprhZI2eetEojCMfT76zc/QO24e\n2x9mJekac9wXEYiKJ/K0JzwZgiDiqGltlk0y3cpSIsErFJv4wXAFL13KEJEwxEtn/cUBumIZAlYc\nZR345DOfBf1dye1SIWW3qaly4YlUw3vtpuhtBA/ZtzvigQGAr2A4QImoRJZMn7ddeQl946kKQxEM\nKhGrsRHXIWuU2bkZhiJcuO3HwDqCS4pSWmNOY0y3sgSMczDT4eFPfgpRXHL7sIbQQKMaKYEokSAQ\njecR85vghq8iUkMEn1MDj1W5GVUVbFkzclSAAAWAtVA6aiKRyKdv/xzH7txKpyyxLsXyB4mNKSYf\nA7v37sXFglkMIYc5TkYOpI3LVCtLxRAVQvCc8opXMqiHqFWClVRGdJ2JGEQNNkLt0qhF+0v806te\nD8NFMGBHlc5DKnk6NFCvu6Qbj0Dyd7Ue8MIQg8HQp+KP3vUmVnxFWFnCWUsQUDGNRYEJBjfXY7Br\nkTIXB7akEN7QPlYbY6qVJaTpri0UHrKDvdUQa4rU0BuqnxLEYPVAmYL5QnikGPjM1SA1UsOQ7Kmu\naWRjW6P9mjLyRkhZx4HC4r3HErhh8dvMnn4cngFdK/T7K3gzSvAsjXUQcV1mhw7RlHpQUpg4zQZg\nbmymWlmOa3ETCcUM1dwxrFRDuq5MlRbXGRXw+YqWISlAjZ5NmwqufctbYbCcFCbgETBQEGkmH/fG\nwoxKNLhkJp4xJT70+Y0//R1WSo86hRjGCX8NzWVyr6qKuvL8+MmPyj7D5MZumPIuO9VM/5VXSMM0\nw+Mvfh/VXBdqpR42ZwhXibgIQYTSdbgz7uNHqor/93PPAfZQKrgo+FQsNyUJXiNx2wiepGccaVQ2\nlPRwCnHIbjtATu8ByVVHxeANuGjS/TNKbOA6daylu6L8wfNeCeLSJCT/EHsUdNlpZcqvfEQNBAxW\na9j2EG6841ZWBlUaua0zomA1KQNvI0Ecw5VUD8h0DGd2DHve9W4QTwypU7alAtaBlNsMRSmISIAl\nB//lo39Ot5sebFZTUr8gKczQhfGfrTvdssv3vn4LcxQp678mRR8kta+WZphyZTnKbp2NTN1ZTnj0\nY3DOYExTxabSe8jZ2kMIzLiSfvD0+3v4/nuvAK3wDqhXVchtdebakUfTEYtEBwoLBP75ezdgfBzf\nM80zlPVi9TRf5cA3rwwqfvYnLqBHJ8WkaVrcafVks0y9srRKimoQCwJnveYP0Thc0+qO90YEUIML\nho4HmSkY+BrrLTFGTuoJfObTIFWaGw4C9RpOw1sAPIEKg8H7wLLr8+I/fQmz2yxKhwjUYjBYukGo\nTWRowUS7hpFgBm/jOCKnEksZAAkEHK/4mRdSUuRm4bEaxrkHWpph6pXluJKjZK/v006l7Mzj6/1N\ninVIur15VjrCF3//DylX9oDxeHXUVWg7wRqiGKoQcTXEcsjlt36Y7qmOoo6EsrkLLzHV2kkPe09F\nBDEs377AiW4HqKFWZeQkaqCZ/JotwJQry1GhKYsBCWCFqreV65b6zJrJ816U0lBEyxk9w5cvfAbs\nvg3bU1zZW7MyGG0ED4ChjLN44G9v7HHJx5fpyyyVF0LVULIMGdWUzMXJQk1VKhoMr3n2y5hnBiro\nyGh+Lq2LWcNMtbKEVMZBYgTM2PH47F/7VWpvMMYQ4+Q0sLC8yDB4lqJysoV/edaFyJ7d9MQ3qryO\nVkZlIUTBKewp4J1XfIdi76/A7jOwTlOGoUZI7TJILkMWPfUwUO1e5tlnXIhTB86MFSrjfEOT0543\nGkfcQ0XEisgXReSqvP8wEfm8iNwsIpeJSJmPd/L+zfn8KWsjOgcajwKa/eP6ke0veiG31ElRGjM5\nSmg0squsI4rhUZtn+cfnPBvu2N2oK8/RSIwRa20Kf5VIKOEXXvVZSnccpi4oF55Db+VkxDUzAxmF\nUqoYooBF2TyzhRM4hlkKwGVXodGqvBn9YUtD3BdN8krgxlX7bwTerKqnAXuBl+bjLwX25uNvzp9b\nGxTU1GBDzkAEpvRgZ9hz8mmISONFp1bjpaAIaQrmxbJ//35Os8I///uXpkyvLQ8ao1mFqjIIhjsA\nu+0MQuyD7Mf4ndi9z0YaqNWUiKDJrxMVTFDowxte+nv0KJJStHFsnhnnYWlpjCNSliJyIvCzwLvy\nvgBPBa7IH3kP8Jy8/ey8Tz7/tPz5Bx+FCk8YTU0Ugq3B9LjgHRdR1/U4ImMSkDgLQCdWBEp8McM8\nMLOyG0KbAfvBxHuPMckUsxTgWS//MsPg2e9rbJxBouD9o7H7z2pEvlHHi6Ss6E4cS7sXOWPmVGw2\nKSkxVwk9MOtoFWZzHOnI8i3A73Lgrm0DFlR11MNvA3bm7Z3ArQD5/L78+YMQkd8UkWtF5Nrdu3ff\nP+kFCnqY0ZMYMHTAGOjNcb3tEQrP9k4njeZcjWHkbvTgo9kZfWS8N2rGLxcN3gViKXSxFFZQZ/ja\n4jLnvu+D4HprINHRhVKj1IxNL+PhVj0Omh5SEahwkvwSvxfhgt+5mmN2ns5MZ4ZeOZccvPE436O7\n+HTM8CSM9MEuEWIudieR5Nu1NmacIbMUWlNqRW0MAyzPfdRP0KMzLp0sGBCDYHIijbTd0gyHvfIi\n8nPALlW97sH8YlW9WFXPU9Xztm/ffv/+kxz+JaP8WwKSbT10LBf+zd+we0nZ65cIYlBsyvRjV7kc\nPYiIpgUnSP9/bSNDG6nzqxP79Ic1A2MQZ9m15QTO/5fPwM6drJXh/mgKdxQtEC1QDEE83ni8eAKC\n2lS+w/oOVguGtuT/fA1++Q9v5qSdj84+NybHfOvYCVz6P0px168jg0di/CasLUECqiVq+mtW+K6M\nkSCCjVDESK2GVz/1N3CM6oNDGws+WRzJnXgC8PMicgvwQdL0+63AFhFx+TMnArfn7duBhwLk85uB\nux5EmY8MY6A7w13bT2AYU5mJok4jzGjWbsprNaVpi5JGmOT3KBExMD/bgWKWm7vH8NjL3gudrahr\nym42ZYzyhRLRlLqXNIZ3eBRVsALRG668Ht70we8yV25hr78XU4wWuMEZFIvPwoXNWPqgFmX0Whss\nQ7w4wGJVWfrOXcwz16rGCeaw90ZVX6eqJ6rqKcALgKtV9UXAp4Dn54+9BPhI3r4y75PPX62pEPO6\nohiY3cST3vGX3NWvgTQVDkZTVdq1kkjTNC45mUdcjJQhUkRYGnoWFxe5c9vxnH/ZB8BtRSVSj3Jc\nttw7pkJltHqd5hQFaTTvKBGJ9IPyvQAXffROSrOFvplD7sUjwuAxeIrBmcjiBZioSOyB1KBd0lT8\nwSdKqskUMQQsL3jsMzEUuWxE6xkxiTyQHvp7wKtF5GaSTfKSfPwSYFs+/mrgtQ9MxPtHyj9gYNMm\nbipnCBqzN7TgfFwz1RSFnDw22UZlUONimk65mRnuCj3OfNdfgN2UpupESl1D5X0UoTn7JxgsBhMU\noWaUPnmI4QdWeO7rr0HoUpUFmEghBys8EWG85igBdBmtZ7FL5yP7noGhwlCD9JE1Kj0brMUiuKLD\nvh8s8ooLXtwmgZ5w7tNjU1U/DXw6b38LOP8QnxkAv/ggyPbACRE68zz7I1dxx4tfSL28TKyFTtHB\nx7XVTpIVYLecJRYF+6oBexaHnPd3HwG3lWGowFnKmG9BO7A8LJEORlPECwJiIxFPTYEHPvGFIW/9\nu1vYfuyjWImCmBVstEicTS5mh8SAuuTy7XdQLD0d5q/B2zty6oq1WXgTU1AYQYeBJz7sx9nBsVQA\nwyFl2ZplJpG1mWNMBBGRikAPO3MCP1jaw3E2ouVmVuoKtwY1oQ1gYl5IIa2GD4uShUJYmNvEee+8\nDOY3g1QUroMk6wBD28dSYNfgdtx7uGMzvp0pd+T9+ctIGhAGkCEBxyKzXH2NctHHv03NFow7hZWg\ndGKN+B6Vi/iiTxnu6dpG1CjEYVLC1UmYPS/GbH0/TjzRVLAGo8uZYhPL/V2EXQP+/LcvxmoHIyEp\nSjWt8/kEclSPZ4JxyQfTwln//WL2lbMsD+/CD2vKkFx6ghi8MeOUalbvfYo+cg+Cg12DTFZACwS0\nHtDRiEeR0nHr3DbO/cCV6NZjU7GsUVncnBGsoMAczc+tg4gIdR7xFQSgosav9i3Mn1PfB/VjF6FU\ntssABYFZVujwmRvg4qv3MCx3YIsOIiHVqsmJfEduW/dGciUrMRIxZpFi+Ym4hecisUC1RgmoXUk1\nlbSH0kFNSFP4eyBlzTeUMYLU1C4Sszwqhjt2f4fFhcDbf/ttzMQuKLg2Z/5Ec9T20NGCSUkypPOY\nR3BbpTxqZg6zTHIfMilDto3JzuhNCgY6HCP7YlrxTnZKk6fdxg+RmVlWVvbjnOP6LVt4ykWXomEU\njbGq444j2I7a23AIHKplKuuao65KOuOkUUgaVakY+hQYCRhJaXs7WoBULEqH/cBLX3sNvePOxXU2\n4Yd1Hq3qD5WEMIeJu1cMaBchgOmjwWGrM9D+o9G5zxHVoOpRHJgaZIgcZmh8YJFPx7KkJMORAMzN\nzTG7q8PDORGQdjA5BRzVI0uLAS8gntDp8bTLL+POfZG5uRlq56lzOFnHp1XrYO6mzA5B8qVMdaWD\nSa+Y35HIMbMdRIRoCnYvw1Pe+W7ozCGzjnaVM5GuVQCps/N1BK3BeDDLeeELrDOIljgtKHIgyy46\n/Ke3fI0XveGbxG0PZ19/meWV8IASPcYshdIBLYi6gtTH4hafjls5BysBCfOgNYJHpEoNQe/Ztphs\n1hFvBNTigqEMqRa5SsAvKu/77TfTC5FBdrW3OQSy1ZqTyVE7pMmeO2AhoBg6hO4s7pzz+P4XPs+W\nrmOlTPW9GdftTtNxcxilZtRQm+xHSbJTjrLDrAwqYuxz57LlnL//GJQzqcPQyc4uLUiFGo/aPjVp\nFdvaNMKzGCw1FiHUIIUQSeVpr/4GvO393yB0HoKUPfo1dEvBDReJ0k0jv/uMSVnUTZX2tMSZiNZd\nTP2ItDBU3I6Wt6KjsKHYOaz7gii4bA4IYnAxJk8JowQTOa17Ctvp0rM9qlUT8LaG/ORy1CpLSA/p\nCkNXA0HBdrbziD97I5947vP4iT19Ku1TuUgUpSuOnk/T8cMtlEeBaPIocxhg6Nm0aQtLUanLLj+Y\n3cZ5H/gr6jCDGkMBiDej4s/rSn5eAAcieGyTETxqkTiPskiRq2In061ZVSIkPXoWC/iTS7/JZ7+2\nQty8E8oSO3t8Np0oVi1xaIgyn+2Z94cIeDAVqCVqB6MWIx6ImP7puN2vgB1/AFITpUa1lxvA8B5T\n6xkiNiqVSW5j/XqZ5Riw1qGDkktf8sc4Svb7wJxziI+p1AiNNJOWI+CoVZZKmsp1MKARK6AKygzP\neN/7+cxTn86pnSI5qhcRL6lIVRQwcmR+j6IgUZnpbWJ5GFh0loWtD+G8iy7CM4/pSjIFBNLw8+i2\nehwZUhNjTddsZmk4yFekwocO0cCKwHW3wNv++lr22tNQewK940oYOIpqkaEFLw5vBMlJJmqrFOH+\nX10DoKP5RPJLUomI9CGCrU4nLv4MzH8UKMH0QQ/vUjQK67Qacb0u0QpFv8TtUTpDJZYFpQNqnyLO\nWFWTqWXiOGqVpRBTJT91qHEINQMp6FUlzG/iSX/8Wu5806W4hX0saWA4kxZ6DtfhUsc6MPWenZ3H\nR8N+Ve48/iGc/45340XAgYpjtL40lGrN3IOmi0hZRAb1ImFmljuBBe3w5r/8Ml+6eQ/V/OnM9DYT\n5TR6NqDU1MOAGoPHUYb0ABraQO2GAHTrlPDi/potRQ1ogQGiCUQMJudHFa0xMsAuPRUpvovp3kyU\nPnovo0rIs4/sQtYJkSBDiI7+LQt8/HV/S0c7VCgdFbAGb2JrpplwjuqeK5qjaUjxxAWAgwEl3ac8\nmT3/4x2cVPbw1jAgEPKIcrSSecjRZe4gRlMWmOVhzcow8PVhn5++/DIGM5vo+hW81Nj0jQSp8bQ1\nnwEkdhA8teylLjbzH/7gJlSPwc8+HD2+YtYHIoE6WkIok5uRRqgDsZhlUNQIfazC7DDFhQcR9H6H\nQGWfUy2zaSKX3NQORIeIgtSI34JbvBDvPpCm7OLT344SbYwUp6RFGsURJCKaAhpDrfSKWV72wl9n\nK6m4XolA5aGAITADo4FtywRyFPdew6hAvSO5pzgAC10Klt1Otj7lp/iuW0FLQ+E90QRMVLw52Jcy\njVrSezAQZkuMeowTBgRuq2t++oorGWzZSRfAzeBkZuxLaSmYodcqS0BNICJ0zVY2W4vO7oS5GZxE\nCi2ItgtSULo0BrcUIAWUJUbq5H4TS9ASbxze8AAUZZZJQE2VVufVpXsuNWrq5FKmHuNLipWz6Cw8\nB1tvBVFUO3lFv8q+ti7FlEtIPprDtFg1NBWiM4RbIs86+QLKnPpNALoOrGNmlA6uVZQTy4btvbMM\nOfY1/5Ud/+5lmFji7DxSF/gi+cMVfuQe5PHWj9/B0x32GbgSU1m+89CTeMKn/xl2Powu/aZ/1g8x\niQXLVqOqB70mDdEZRAqUIcIysnIO5V0vpjN4GCJ1GkEaJRhPEJsUKGBkhTnxSSXOHcu2/bN84jUX\nM4ttPcimlMnoMQ2gdFBnmHvBi7lh/hi0qiiNYNXjVjXmKMk3bvQSwIfI5trxhZ07eeJF74GyRzBC\nVbfDgvvKKKnFQcktJok8rQaQMINVxVRnYPf+PNg9EOZQnUFNQCXkxZ8CtCBQsRwC8dt93v+yP8X7\nSFSdmAdVy31jw961AIh6tNfjye99LzdUFRRCqNKiwThRLmB8xA48XQoESz/Cl058CE/7s7+AYhPe\nJOecjnQb+jUta4rpI1KAdCAajHfY6jTMyhkYyb5malEqQvCECF5LBkbomnkuf/mb2cIs824OpEDb\nqMapZMMqS6dkO6SH2R5P+fj/5bv7lzC9TYRccW+UTZvKM9Odp6qVWg3f7Xue+PZLqTuzDLNLkD1M\n/HHLlCLJWT1qAWZ57M1pwhzFvl/AeYsTj8QSEcGYeawLIBFTzLF4052cxDYMBRZDSVrMaZk+NnAP\nz6uZGJQCZrdx1pveRN0pUxTPKvNZb3aOoQh7BG7SyOM/8lHobcKXyd3E5VA8baisassaIjXEmXGU\nT5RI1B4xzlKunINd+jGM35YW0FUgdvChwtnIzF7Dx3//gxR0CJhc3z62azhTyoZVll7SaqRoylaN\ndXDOT7HvhB1ACk2TvBCinQ4LrmDp1FN40oeuZOUhO4kyoHDFOKNNcDXVBFruJ7EGzzRhwkxyKzJL\nyRYZjkkn7AKiSrHwa7iFC1Ii4ujwoaJr5hjc/n2ueOnb2EFBxKQHqknXvGwv/VSyYZWly0b2NNXO\nrbewPPLt7+aLDBkYpTAFnW5BP1bcsXmeH/uzv4LZ45gBjJlLEb25ppSloEunsd/TskaIYqTCxB5G\nHdauYI3HxFk8QlShXPxpOkuPo64jHdulunUvl7/qco7nWLrMp3BTA+NqkRu210037W0jjb761TA7\nI0cu/PCVfHt/ZGVYM7hzme8f81Ae++6L0BmHTk4Z8paGqbVL4Q0zw4Ji4XmU/AhxsMibf+e/cVJ9\nPyuWtkwsrbLMlJ0uUSMhBOgcy1M+dhXf6Ffc5oXHXPpO6s7x1FgGE+hL2dIMRpIbUDQ1Uh/HzF3P\n51U/+xIex6PoubZrHW0c1eGOR44hlSxwSMcQosNu3srTPnIldITYmcUgWHWMk3W3bHgKBtROCAhO\n+/RvLviVY5+NVUeQCtuaZY4qNm63X51Jm5QijFyD2hrwdAjHnQybTiRiU+W9AGqm6/ky6RE804x1\nXSR0idrlqacscs1bzyLgWInLiJ+udtJyeDZsjxl5BskhXoGI01xiIqfO6hIJrvWRazmAGQTKnuPJ\np+zlFc87CbEVAx/oWPcA8mu2TCob+vE3TvByt3ZtBcATrKEGut6AjQTxdFCgXeVpgWA8z/iRO3n5\nc3ZiCHgMm4wlDAx0m6mc2bJ2bFhlKfe4A6PsL6mO4CjDuWlV5FFKlAMZpiSlIMo1goao6xCHNU4t\n3hVUBGakIqws8arn7uDCszdTkErUW5NSr9mOsIEnbUct7R29RwyCST5yWZnK2K7ZclQxtt9GlAP5\nTFWgrkNK8CEeVDHDGh+Fwf5vc+HZqbZOUo2S2sc41XnbTo42NuzIcqMwcTV4Jg2VvLgX0whTYorU\nQUBLjASiGqxxdHVAXPw273nj2ey0P5knHIYQAta22TGOdtrHX8uGR1aPLHMteM3lcS2KiCHGSH/h\nJj78P8/mRL+CMKoDD9ZavPeNyd+yPrTKsqXloAh6gzFCPezjTEBjpGtrdsz/gA/9yXkcA5hOiR0V\nV8o4107SjnZaZdmysUklOvNO8j/1g4q5nsXqgK4MWLrjev78t05lE6liKLiUqWoCE6e0rB2tsmxp\nIbv5aKr6UzpLtbSA0RXOfrjlo296EpsLj8tF71qnoI2JTELdExHZD3y9aTnuB8cCdzYtxH2klXn9\nmEa5p1FmeGByn6yqh818MimGlq+r6nlNC3FfEZFrp03uVub1YxrlnkaZYX3kbqfhLS0tLUdAqyxb\nWlpajoBJUZYXNy3A/WQa5W5lXj+mUe5plBnWQe6JWOBpaWlpmXQmZWTZ0tLSMtE0rixF5GdE5Osi\ncrOIvLZpeUaIyKUisktEvrLq2FYR+aSI3JTfj8nHRUTeln/Dl0Xk3IZkfqiIfEpEvioi/yYir5wS\nubsico2IfCnL/YZ8/GEi8vks32UiUubjnbx/cz5/ShNyZ1msiHxRRK6aIplvEZEbROR6Ebk2H5v0\nNrJFRK4Qka+JyI0i8vh1l1lVG3uRcjx8EzgVKIEvAWc2KdMq2Z4EnAt8ZdWxPwFem7dfC7wxbz8T\n+Bgp38zjgM83JPMJwLl5ex74BnDmFMgtwFzeLoDPZ3kuB16Qj78D+K28/R+Bd+TtFwCXNdhOXg38\nNXBV3p8GmW8Bjr3bsUlvI+8Bfj1vl8CW9Za5kZu16gI8Hvj4qv3XAa9rUqa7yXfK3ZTl14ET8vYJ\nJP9QgIuAXznU5xqW/yPA06dJbmAG+ALwWJKTsbt7WwE+Djw+b7v8OWlA1hOBfwCeClyVO+dEy5y/\n/1DKcmLbCLAZ+Pbdr9d6y9z0NHwncOuq/dvysUnlOFX9ft6+Azgub0/c78jTvHNIo7SJlztPZ68H\ndgGfJM04FlR1lM5ntWxjufP5fcC29ZUYgLcAv8uBIPFtTL7MkGoDfEJErhOR38zHJrmNPAzYDfxV\nNnm8S0RmWWeZm1aWU4umR9ZEuhKIyBzwt8CrVHVx9blJlVtVg6qeTRqtnQ+c0bBI94qI/BywS1Wv\na1qW+8ETVfVc4ELg5SLypNUnJ7CNOJJJ7C9V9RxgmTTtHrMeMjetLG8HHrpq/8R8bFL5gYicAJDf\nd+XjE/M7RKQgKcoPqOqH8uGJl3uEqi4AnyJNYbeIyCgkd7VsY7nz+c3AXess6hOAnxeRW4APkqbi\nb2WyZQZAVW/P77uAD5MeTpPcRm4DblPVz+f9K0jKc11lblpZ/itwel5BLEmG7ysbluneuBJ4Sd5+\nCckmODr+q3kV7nHAvlXTg3VDRAS4BLhRVd+06tSky71dRLbk7R7JznojSWk+P3/s7nKPfs/zgavz\nyGLdUNXXqeqJqnoKqd1eraovYoJlBhCRWRGZH20DzwC+wgS3EVW9A7hVRB6RDz0N+Oq6y9yEgflu\nRtpnklZtvwn8ftPyrJLrfwHfJ6UwvA14KcnG9A/ATcDfA1vzZwX4i/wbbgDOa0jmJ5KmIl8Grs+v\nZ06B3I8Bvpjl/grw+nz8VOAa4Gbgb4BOPt7N+zfn86c23FaezIHV8ImWOcv3pfz6t1Gfm4I2cjZw\nbW4j/xs4Zr1lbiN4WlpaWo6ApqfhLS0tLVNBqyxbWlpajoBWWba0tLQcAa2ybGlpaTkCWmXZ0tLS\ncgS0yrKlpaXlCGiVZUtLS8sR0CrLlpaWliPg/wOMh6HDIWxH6AAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [] + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "O79BrK-bC8oh", + "colab_type": "text" + }, + "source": [ + "## Random HSV in YIQ\n", + "This operation changes color scale of a given RGB image to YIQ but here delta hue and saturation values are picked randomly from the given range." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "zZBI-9XvBSuh", + "colab_type": "code", + "outputId": "acbec9e8-b217-4d8c-f6ec-d8c272fd71fa", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 269 + } + }, + "source": [ + "delta = 0.5\n", + "lower_saturation = 0.1\n", + "upper_saturation = 0.9\n", + "lower_value = 0.2\n", + "upper_value = 0.8\n", + "rand_hsvinyiq = tfa.image.random_hsv_in_yiq(google_img, delta, lower_saturation, upper_saturation, lower_value, upper_value)\n", + "_ = plt.imshow(rand_hsvinyiq)" + ], + "execution_count": 9, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAUsAAAD8CAYAAAD+D4bnAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsvVm0bFWZ5/v75lwrYjenP/Scg6Ag\nKKB0AgqiQiKKqNimZlVq1XAMX7LGqBx1c9zKcV/uuG9VLzfvrZcaw3uzbpk1MjFNEgWFMsUWFEQU\nFKRvpDk0Bzj92XtHxFpzfvdhzrVixT7dPmc3EXH2/EHsE+2KGRFr/dc359eJqpJIJBKJw2OGPYBE\nIpEYB5JYJhKJxAJIYplIJBILIIllIpFILIAklolEIrEAklgmEonEAlgWsRSRj4nIUyLyrIj89XK8\nRyKRSKwkstRxliJigaeB64FtwIPAl1X18SV9o0QikVhBlsOyvBx4VlWfV9Ue8C3g08vwPolEIrFi\nZMuwzdOBlxu3twFXHO4F09PTumnTpmUYSiKRSByebdu2vaWqJx7pecshlgtCRL4OfB1g48aN/OVf\n/uWwhpJIJFYxf/VXf/XiQp63HNPwV4Ctjdtb4n0DqOo3VPUyVb1senp6GYaRSCQSS8dyiOWDwDki\ncpaItIAvAXcsw/skEonEirHk03BVLUXk3wH/Aljgv6nqY0v9PolEIrGSLMuapareBdy1HNtOJBKJ\nYZAyeBKJRGIBJLFMJBKJBZDEMpFIJBZAEstEIpFYAEksE4lEYgEksUyMFam9XmJYJLFMjBUy7AEk\nVi1JLBPji5JMzcSKMbRCGonjB2UpLT4N/yugSqi3GhRRvYLEf73HO496DwpiDSIgxsSLxHFJHFz4\nV0TCvQLhTyKxMJJYJg7JkURQo2kn8VnVNVWNogTqFIw0NhheZQTUO9Q7XK/krSf/yLYXXmJupoN6\njzWGdp7jvcOIwVozYEmKCM45BEFEUPXhvSW8P2IwNkycnHcYI3R7PVShcCUYQ5bnvP2cc1i39WQm\n1q3BZAaTZahEkVXFq2JMNQE7+LehA48GgVcUI/aov/PE6JLEMnFIDiWUQUQraZRaVKUhnuo8YoWS\nEovFiOBLjy9Ltj/xDC8890d6M3O0TEZmDFlm8WXBtLVgbRBAr7VoqfM47zFiojhGa1MVrx4j4d0r\n5VJVXFlijMEaS1mWZBK22zY5XpUMy6tPP8/LzzyLd57COVSEcy+6gI1nnU7WbiNiwEQxlsHvYPC7\nkvrsUlmvieOLJJaJo0Yaf6trA9aVCGIFVYf0Sna/8gpPPvwIc/v2YUSYbk3RNsLExCTOeYwxeA+o\nQTWKjUaxNLYWR1O9kQwORhC89i1cMQbvXLBGAbxiscHCtIbSObLMoiheHRbBiqGdZSDCtsee4Y+P\nPkGvLOiWPbac9TbO++DlYCxqTbBm43JBfyzan+6TpPJ4JIll4qjwPoibRnkUKlEL/6qCd3Ps3bad\nRx94mFYh5JllQoT2xNooMAZRwENGEEhQbGaqJUtUFCwoHh/eKLymmmbPQxpmn6piTTVt13qs1ga1\ntdbU2zBiamXzEhdLFVoitPKc6Vab2e27uP+WO+iWJaecsYV3XH4R2eQEJsvqMYVPEN5P0lrocUkS\ny8RRISJ45+v1QIDC9cjJeO33T/PUI48xmeVkxjBNBrYvbCpxnVM88yeqArVQEq+HKf2gDbtY53cl\n8c2/omFciu8LnwqCwfjw2GTWZiqfYGb7Th6566fs2LOTS66+is1nb8W2crxIvX5arVuKpGCT44kk\nlokFo6qoDw4PV5T09s3w8I9/gc52aFmLyQwTCqb0SG7w6jDW4r1HNHqhNTqBULRSyEh4rHKTwKA4\nHp1MHrim2LhPtPEc31/r1EFx0/ifiOBN5V0Ka52bptfzyh+e4sXfPYZX2FfM8c6LL2TLBe8MSwcm\nCeXxRhLLxBGpZ5oKWpbMzczxqzt+wGTWYsLm+DwPzg8HedYKrm4TbvenzJXzB6i95lCF+srAuy0h\nlYucweXF6oqoQaUppTrwYhUJywDe15aj8x4xhrL0gJCJsqk1yRuPPsPTDz/GOy+5kNPfdTYmC+ut\nKUTp+CCJZaL2bocbitYOlTgJVsU7x47nXuahe37JCVNrWZtPIkbw6vvT53rOrKgnTEOV2lqEWrfm\nvfsyxpZHs/FQ21c5XGT7gZP26v7gy9H6lgImM2zKJtn+h6d55Q9PMXnSJi64+nKyifagYKpHRQa+\nl2qUyT00uiSxXMXoPDGopqMiUk+R1Tkeu+cBtj/3Iies28jmqXWUEvqFqI+vN3JQp8tqw0XrE4Wp\ndovemzv51T/dyTsvv4gTzj4Dk2UEa9VQRd7HkFA8YJJIjjRJLFcx1aFZW5aqiIme4tIzs3sPP/nn\nOzh5eh0bp6ZxvkSsYEptWJHBQ548wFXIE1hr6HV74JWpPOelh/7Ab396L+e//31sueBcxGZxeg4S\nrdNqhXNps6ESS0kSy1VNFYsTnSoCxcwMv/ruD5gwFquGU9dtBiS4QXyYJlZRORVJKBsIIebTGDDg\n4/rEies38+aTf+TVx56m9I4rPn8T+WQrhvBLjC+NoUvJiz6SpF9lFeMRnPOolqgv2fXMy/zmtruZ\nzlq4wuEF6oDIIJP1qlpiAYjGoHkTogEUWjZjwmb87Jbb2PbQ42hZAIoIMZ40HZKjSrIsVzGGsDbp\nuj1+8j9uY/PadbSt4Asly1pxah4tH/FxjVNiiI0f9vBHHtG4NolHLHineAQxlk3Ta3jtyed54reP\n8P5PXc+akzahDoy1aR4+oiSxXMWUnTkevvteOrv2sXF6XVirrPKgvYCR2ksuA+E3fvm818cJdbxo\ndJR5B1ZMkE7vwQsGYdOatfzhR7+kY5WrPnUDMtlGsEkvR5Bk8x8nePyAhAV7RqnsQVVHo8oErtvl\nnltux8z0mMpbMbIlTBWNhBJn/W31UxuT2bMwqoD2mLsZIgziYyLxtgnPaWFZ53Luue1OtOgQrPYQ\n0uS8rzZYbzUxHJJleRyggCFM+bRyGVR1HeqDy4JC2e1w9999m60nnMK6iWnU+UH90wO3DYeLVEws\nlOZ32PxevQmZUVPO8rvbfsZM2eEDn/8Ett2Osa5ax2WmU9XwSJblcUAzBOgAURPwzuFdyVP3/4b7\nbvk+p6zZxMzM/n6RnMPFZSeWiWB1mjoSIRT/yLxhXWuS+7/1fV599On48/SXQxLDI4nlcYMOiGaI\nCAoebDHw43+4lX0vvML6NVMUOASDeh8ydWzaDYaBUQkXH1zhNrc4Kel0OuRG2P74s+zZ9jp4V7+m\naZGmc9zKko6S44jgqw5BkN6HKjquKPn5t7/HlBNarRZFUWIQrDVILKTrvU9LkUPAi+IlFBSxxqLq\ncXiy3JJlGaA88fMH+P3d96IurDlXFqZzjTXoxIqQxPK4IDhyhGChhOQQh5vt8qP//i3WkNPK2rhu\nOc9Jkw62oSLUYgkh4sA0kued87Ssxe3ax7/8f9/CF72YYqpk1lblSIYy9NVIcvAcF4QwFE+wVOgU\n/PCb32bT9Bo2T66llWV0tMRLP587HWLDpariJJjGdDrMDcSH+FcxoWpTWZRsmFjDA9/+n0xsWMtF\nH/sIYvurmYmVIVmWY0J1MHkNoST9pod9t44qWO+455bbOGXDBjIJnQ57vR4Gk4pdjBTzLfzGamRV\nCUmDeGZZTmYsuVjcnhn2bHs1Tsul+aqVHf4qJInlmFAdVkaCBal1bFA46JwvkbLgZ7d8j01r1lN2\nS8TaeOCRPN4jhjb+O4CGjmr9W8cfUODxe37N0/c/jOt164l4SBpIP/ByksRybNCYDiIxSxtcrCWJ\ngnEld/4/f89ayeh1CvJWm6IshjvkxLFxqBNbFNCWzdj3ynYevOunqC/rx5JULi9JLMeA0IWwjjKv\no+6sCF48rujxs2/ezinrNlCUHYwJj7dtXqcpJsaHwXVI7YtnfVEmTIu9e/eGSlCaJg4rQXLwjAES\nCh/W2Tl1yV5VtNfj3n/8HhvWrAHvQpqigHqXkuOOAwShLEusGGyW0St6WLHscLNc96efRjJbP9eo\npBCwZSSJ5big1OuPEoWy7HR4+J/vZkM+Qa/skdkW6omlwaoKQUMddeIYqJIKqhuZzXAudMQ07Rbv\nufFDtNZOo7UnXZNQrgBpGj4miMS6sASrsex2ufubtyImxFgaYyl9CDgPYSlSd1RMjBdVWFGFqtKa\naOOtwGRONjkZCgzH54pKiNdMZ8Zl5YhiKSL/TUTeEJE/NO7bJCJ3i8gz8d+N8X4Rkf8iIs+KyCMi\ncslyDn51oVgMHsX1Cu75H9/llA2bKJ3DOQUbWs968Q2RTFF448ZAFni8YjJLz5d0xHHxDR+ue/k4\nV6JeYxJCOi0uNwuxLP878LF59/018GNVPQf4cbwN8HHgnHj5OvBfl2aYqxU9oOya9kp+9g/fZdOa\nacqyR2bzsE7pPJnJGgeb0i/QtjqRxn8jR8NZJ/WJLdxZehfWKY2h1WrhgVlfcNXNNyKtFhDK6Fmb\n1aX0Uvm85eeIYqmq9wA75939aeCb8fo3gZsb9/+dBn4FbBCRU5dqsKuBWty0amRVHQCC787yk2/e\nyuZ16+mUBXnexpi0knIoDhvLOGzq8Nd+SQyNU+mWtbRarVDLUoQ5HNd88ZOYzGKTHg6NY3XwnKyq\nr8XrrwMnx+unAy83nrct3vca8xCRrxOsTzZu3HiMwzh+qHpGVzUovShGwwK+wyGF475b7mRNK4Oi\nRNTgSp9cdGNKmAHo4AJlVa6t3heE8z52NfnUJIiJ+f/JfhwWizZLVPWYQrxU9RuqepmqXjY9Pb3Y\nYYw9MiCV4R5iq1Sryv3f/xcmW23aWRuHx2YmeMfdauuFE62wererUgMbN5sIMILdJ+d7vNVpaDvh\nNbbzMPSmLPnUVBRKH513o/dZVgvHKpbbq+l1/PeNeP8rwNbG87bE+xJHYl5WYj39VsfzDzyC2dfB\niMEbC0iYstXOnFWC9kWxNsiqSzNFULXfnrdKmh9F4rAEQb3He0+W5xix7M89V9z0UYjprQZzDCZJ\nYik5VrG8A/hqvP5V4PbG/V+JXvErgT2N6XricDQW/Ct8WbJ/+062P/sik62J6Oyp0h5DKOVq0soD\nfb4Nr7/WnWdDfxs/2mktdVp/ZKI9iRqDWmG/OD5w00cx1oYYyipeVlLBjGFyxBUvEbkF+DBwgohs\nA/534D8B3xaRrwEvAl+MT78LuBF4FpgF/u0yjPm4xKnHiqAejBEKdUhR8sD3f8y6iQk84LwjEwui\niMooa8GSIPOmz8ZInHFr/98KBURqsfSh6Wy1mhHSRUeNxpAKV+KtMNud5eo/vTlkZ6lHxKI+LMdU\nFadE7CE2mFhOjiiWqvrlQzx03UGeq8BfLHZQq5EgDIJ3PYxkZB5+/Z0fsmFqGsSg6rHGHMSyGEER\niGbQYOeYeeMUUO/r54kxeO8xNlRu75Ulc0WP9tQkm086kVPetoXpk07AtsIua6wN2zSmrrxTxRyi\nHu886j1zO/ewd/sO3nzlVd58401aec5ke4JMDK4ssSZsxzuPEVPVKglroqoY6U++ait+oV95HFjd\nFpfGMqUNDW+LXo8sb+FRLrz+aiY2rI9rlo3NmLAxQxLJYZJ8qSNCdTgZmyOiPHX/b8hN6B/t8aOp\niYdAsKiE5rwmqo9SFasFjapjJMOVBXm7zazrMNvrcOKJp3Dme85j+sQN2HYLMRbvFaQqk0udxamx\n3mN4TMBEC1IsJg8itXZqkulTT2LLJRfgnceXJb4s2f74s7z47HPQ6zCRt8laOb1eqERujGBNhkFw\naG3JS/0baOPvYWiEB1XCWVdSc55Cw6lPjFAYYXLjOgYlNTFKJLEcFdTjnSBGmNu5l90vvUqmFucV\nLw5rxuenUnGIGkSjxx7fdPyG6bQxlN4x40ta6zZy+UeuxeYZ3lhE+g6uIF6memXzXQasPiDqZl3h\nsb5TjAEBkxmMbUE75/RLz+f0S89HnWfHMy/y5O8fBe9oWUMrb1EUBWpMUDcN7R9CamH/cxyJwfAg\nasEkbifLMoyxzIly5WeujybkGJ0VVxnjcwQe7yihN7R6fvat29m8di1iBZsJfszCg+pCtFJVSTJ1\n7KBB2TM3w/qTNvPeD15OPj2JsVk114w2aF8oxUgjrLzZRqHh7RYAP+gFr9C+1Q79qbRYAwhiLSed\nfzYnnncWrtdjx7Mv8tiDD7Fh7QZcr8TUKt+wJqUxkiNpW/WkxnQ8vH/o4767N8eHvvxpvA/fkveK\nNcmyHEWSWI4IIhYrnvu/80NO2XQCzjs8gGpcVxsjjKHX6zLZnkAE5rpd2pOT7Ni1m/UnbOaaz9+I\nsYIaCdZhQ3CCZGp9o4o5HbQpw99KuKpyyAfqZN+qmyexB2xJrMVOTnLyheex+Zyz2PH0C/zugQc4\nYc0mvPOIlVDMxCt4wUg/rfRQaPVHwJUO9Uq73capR6xhxhV86EufRMRgs2ppIgnlqJLEckTwZcGD\n3/8R+VxBoYpY+ut94scqRqhwjnZ7Alc6ds/s5x0Xnc8Zl7wbk4UYUZBoqEV7U6gL2FaWYSVB9adu\nzKxl8JEomzLg/Go+ergyEwcJ0ySbaHPyhe/khgvfiS96zLy1m4fuuof102vwlDiJXukBB9aBCNUa\nZTgpSCYU3kNmuPCGD9KankRsVlvdqsHrndYsR5OUWDwizO7YiXRKxAh5y0ahDMUwxu3gWdtqgXre\n6u3nw//m85xx6QUYm4OGqa+qj1P0hkjFIPsqT1qoemRX0+dDy1KV12KQ+tL/ry5aV9OPZdf6dYOP\nx/a0IkjeZu2pJ3H1n3+S2bZjptfBYshiDOThxlS/GdF7bwxqwBmlvXYtYgyqsfFYFEqnLsVSjijJ\nslxx+j2+KwtJvefXP/gpGyfX4koXg88bjoohNKOq0i+1Fq3a14FRwTlPnmeUvgwee6eYzKLO88Lu\nHVzzieu56MRNSGaReY6Y+rYc7B0PfmJYyjQ/Oci1Q72XRMvQtif4wGdupJjrcv9tP2B9PkkGOOco\nvGNyYoKy16NadVUEpyV57LDpYszkjO9yzSc/EVrdNmU6RqkbsUv6WRNLR7Ish4qgTlHnWDsxjfeV\nZdV0asDwPKRVJZz+MCqPcLud0y17iBjyLEcEdu/dx6Z3buWGP/8Ca04+AaxB5UCr7VCMpkhobdWq\nGvLJNh/88k1sfMfpoSq9EayYENcZ1zGN9oOlSh/WO1tZi11+jg9/8ZNk7dYhP+lofgcJSJblihJy\nfJsWJagvuO+OHzLhq2nhaBwsff9zNaa4QqdhStzpztFqtSkLBybDAx/+ys2YvI0Yqb3Y8xmNT3d0\n1Ha9IfyCVthy4bmoV1545EnWTU7jugVkFi9VXGYsjGEEXyr7WwXXfuHTIDGYfiy/idVNEssVpDLj\nK0H03vG7n/+C6QI8cYHfSAy2HgUUq4JoiP/zjUDDVruNd8q+2RnOvvYiNr79NJAMEcF5Xzss5nt3\nx00mPGFNU8KiYrxXEJuz9bILedul5/Pqo0/z/O/+wBShlYfG0CJVT5blXHDTR8gmJsIa5dh9A4mK\nJJZDoI4fVE/n9T1M5NFLXKXZjQpCPPgbkY4uZLPMdDrMuZIP/6ubsRMt1FiqxENbFSQ+DsJgmsWX\nm4sIYgQ89ArHKe8+mxPPfhv33nI7a9sTZHkenDkKs3hsqxXSOePMQn0dVpoYI9JPtsKEA0XQsuDe\n2+4kF4Pzo1rNO0ylvXgcDmMs6oVWq41MtfnIv/4M+fQUxobUwIV47kdlmWGhVOcuLzqv+G7ITMpb\nLTyGfGqSa/7VZyijB77oFux3Pd7/2RswWVbXoxwwUBNjRRLLFSQWxgFVyl6PiULJs5wwvx09mnGE\nRgXXK5iYnmRHbx9XfuZj2FaOU8U5R5XTMoKSvygqYQsS2A+ZD2uYIUBUrOA0xGee+/7L2Nfr0jGe\nqz9/E2IsTl1dj1IEPG5YHyexCNI0fAURJFY3V3526/fZnE/hyjLkLqtvPqsOZq6smOUSoWaAd/Uu\nMm/qaZSQm24MOzv7ueZPb0LzEBJkUTSmDlY0t3PgNseLysHVDDeqW7JH8RN8yOhRYdPbT+M9a69i\nzYb1eGvIjAzUo0y53+NLEssl5/AL+GXpmXv9ObacPMH+nSVZ1qYsimCl1DGNEsN0+lKzvAfZYHqM\nxp7U3ivGgrUZ+2bmuPxzH6U1PY0Y2/iI88qJNf4e+Oj4cdDxV19V/NfE+pLhtmH9SScd+PTG92VS\nPcqxJE3Dl5zDiIOCzYSZ3ds49ayNnHHuBjrlvlqcRKqcFY0ZPICE5mXLOVJlXoZMjK00Rmi32+yf\n6/CBP/sErTVrYsfBZB0lVh9JLFcSAePmyK1SugLTLjn30q14OxuyOVSjNWkahWZlGX+k6K5oGq5B\nOQEoyh5l4bjiCzdg2228hyzP64rdicRqIonlSqLQ2/cGqCJqQmwls5x72Vl0ir3BmpMMjZk8igYj\nbhkdQFWLBoTQMMtYslg+rD0xwfa9u8gnpwCDzaqQoLTbJFYfaa9fZgbjJgtef/45BHDeY2K7AOdm\nOe99W3FZtx8ETSjxgGisRLMMY6vzFwGFzASR9F4pvWP73l38yVc+h3f0m6VxYOGJRGI1kMRymRGp\nMnKUXdueYGoyD+mA1mPJ8WWY0nq6nH3+Jt5+8QnMuj2ohHAclViibTnGBo2ii0E8HR5ngLUTfPTf\nfDmkL2bNkg9pvTKxOkliuQIIgut1mN31JipVCa7QhMzYHO9caN5lDcbMcv5lW/DZbCgRhkHIBvzL\n/Qo9cFiH0oJGRi2YIoJYw86ZvVx2w4fxEgLo+0HY/XdNJFYbSSyXHB34qzGpxc2+QSsXxCvqJRa7\nVVRDZkzIjoHSecQ6zr7gNAr24XzRqMpNo23CAeVxj2GocbtVFThRZjpzXPdnn8NMTIS1y/o9kkgm\nVjdJLJecpmtZQr/nosf2V148rNyEkl4eVcF5i3Ml5168lTWbLHOui0ho5KVaIjhCNxshlHo4NjRG\nV3sNGTrq4L3XXk3WzlN0UCIxjySWy0QlmSqKMV1yKTnsel803kzshogoZTHHaeecwBlnb6bUWYyx\neAdQVa/RRdl7qh41oViGEcNsp8u600/Cqab85URiHkksl5j5cigIrz3zCMbrYa21ZjMFkVCVyGQZ\nZTHL5LqCd152Kpp1wdjQd1tDvt1iWvNYY9CyRIwy1yv4wJ9/ChUJDdKSWiYSAySxXGIGS0ko+C7i\nu6Gu42EEaOCRWgypA8Rd2eWsCzaz8ZQMj8OKoqr0yuKYx1qXinOw5T3nYFt5w5mTSCSaJLFccqqG\nAqEAQ2//Doz6RjOug6Na/RSh0nas0BD1VaIwdjlhyyRbzl7LbLEfsUJmjz29X70ny1vsLbpsfc+5\njdXPZFUmEvNJYrkMVOuJALu3vxQr0ngOZ7HJAeuPPjp8QnUbxZOZnKIsaK2Dd11xJj2/n5z8mMfp\n8XTmelzzpU8hdfHeZFsmEgcjieVSo7EaduWpLnuICVkxh7PYlFhotpqCNxAJ1ccVxRiDMeDdLOdd\n9nb2Fm+iPrqTNFQxH1S6ECep0l/ftBI6SrbyCboTYHOLEtvUNuocJRKJPkkslxoB8EH5fIF3Rci5\nzrKjM9c01Efs367/xPhwxZezXHjFO3jbezbRLWZi6wIf2gvWotevyCgKzpU4Df10dnZmuObmj4E1\njSK39MuDJxKJmiSWy4CqggquO0ueh4Bz74+9OnYQzf4U2XkfOygq6rsgc5x/1Tvo6f4gjWKjldrc\niGBQ8iwDCdWDLr3uakyrBRL6xfg6lD7ZlYnEfJJYLgcSuiHufes11Hm8c+Q2P+a5bSik0bQWw4Z8\n7N2j4lE3x3mXbaW91ofGWMYyv8mDAq4sUVW886w5eTOlV/CDcZVJKhOJA0liueRUhTMcnf07g0fb\nGPxip7YD5cjDFF2MARGMMThX4sseW8/ZjGeW0nXqmbuIBgdTtDhFhN1792FbOdZYjIn9Yao3mr/s\nmUgkklguG1ogrlvdWCLxaW5F5l2LVdbVce6lpzO13oL1YGKbCGNw6pmYmERFuODyS+JaZnOLjRz0\nJRlvInH8kMRyqVFAS2Z3vUFumvLjD5gWL3iTKrWwCVpXVJfYp6eKyUQ8Ko6yN8dpb9/A2y8+ETvl\n8c5HB4+n1JJ9cx1Oee/ZtSB6/GBbifqDJBKJiiSWS40EAdvz5mugvnaWCAx6t4+WeUalauOuhjNH\nwrw/eMt9l7edt4nTzlkHXlAxiLG896pL0Ua1cxPLDVfjZN610WKwx7rTgzvO9CCX1f68Y93WgY8e\n7PrxzxHFUkS2ishPReRxEXlMRP59vH+TiNwtIs/EfzfG+0VE/ouIPCsij4jIJcv9IUYNEQHfw1jT\nj/ZRWXS6tcZwoqboDoYX9R+r2kV4dUyug4n1JSKe/XMzbDzz9MUNZEjMPzS9dxgRvDuw8pIc5HIw\nVtPzjnVbg6wugWyyEMuyBP4XVX03cCXwFyLybuCvgR+r6jnAj+NtgI8D58TL14H/uuSjHllixSB1\nCCXe+9qTPbS6FOqwmeH0czdx6tZJOvQgb2GOgz46xhjUO4w1B7Wa0mVpLoOM6oxj+TliYrGqvga8\nFq/vE5EngNOBTwMfjk/7JvAz4D/G+/9Og0v4VyKyQUROjdtZFZSdOYy4YOVJPD9rs87lSmIonCe3\nBa31GR/+7EdjAeHQ7Wc8kfpfVWFfp8tEO2M1H8hLTfVNKoAqubEMZ/8dHY6qCoOInAlcDDwAnNwQ\nwNeBk+P104GXGy/bFu8bEEsR+TrB8mTjxo1HOexRRUAdO159OeSD+6rxmPYLXK4wPsZQFqWnKxn5\n9Dp8WSJWxk5b5g/Xoxhruf2un9Jas2Ew8ehgn+1g3/9qet7hnjPvMdXYZkSE3sw+vnzjhxEtMSav\nXybjtgMtkgWLpYisAf4Z+EtV3SuNeaWqqhxlC0JV/QbwDYCtW7ceR6cspezso21DP5vaxTOkFEIj\nMaMnE9bkG/HkYS11bAmHaX3Lg6tiW4/F97Canne45+i8G1rlVijWWJz6gU5Q83+H1cCCxFJEcoJQ\n/r2q3hbv3l5Nr0XkVOCNeP/DIGJoAAAgAElEQVQrwNbGy7fE+1YJHkMPVPHqwTQnNCu/c4V1J6Us\nHCdu3YqRflrjOLa07X+LVcGSEEOqGm34eFIaZQNvWM9b6LaqaVBwJoamelWpwNXMQrzhAvwt8ISq\n/p+Nh+4AvhqvfxW4vXH/V6JX/Epgz2par0QVIz5Mfxtz77Czrbx1KTZk+XgniG3HMbpYMm4cmWfd\nqI/lPxUIDjURhYNc5CCX1fS8hW5LTFg2EhPCtEwWHGhyHDgFF8NCLMurgD8HHhWR38X7/jfgPwHf\nFpGvAS8CX4yP3QXcCDwLzAL/dklHPOKologJsYvqq0M7lj4bwrqlKx1GwObTSD4NeIxUzc7Gmfhl\nmio8Sxo92kFSW4xFIQLqtd5XbG3PA3E906vDiD3sdo4nFuIN/wWHtr+vO8jzFfiLRY5rTFE6e/YE\nc111JGYtIgYxwvoTT63uYSQGlhgres4xYUPFqtFPXFgeVrddveR4ZvfvDnIkMmBFKiwug+cYUVW8\n97TWrj8gRyeRWCg798z0d+dota826z2J5VKiysy+PXXAdz9AILolhrBvWRObRdjWvEeOowCExLJQ\nhQ4BvLljd9DIsV3rXjxJLJcUxeDrdTNGwJZT9YgxiGmRBDJxrLy1c1d08qwua7JJEsslJVQDUq+1\nA2VITvABVBXk2BubJRI79+yprw8WHFw9J+AklkuJKmiJRs9scNg2zsRD2K8EwSvHgfc7MUzmOr3G\njGl1ksRySfFYHCbLUB8zSkSHal4qSsdDvyl4dSWJZ2KhKJ1eWUUMI/Tb4K0mklguMSGLxA9o0fDP\nx/3ujaOwjpoYP7I8H4UdeagksVxijAkpYoOSNMzYRmkEDieJTBwbeX7sDfeOF5JYLimDRTNCo7Dh\n7mHGGMRkDaNgle/xiWNAsMYuvunemJPEcsnpx1aGLDw9VBXVFcGrJ8tb84oPr+6dPnH0iDEYaebr\nrr6TbhLLJSZMuAd3pLqQwdCodvJmkHwisTCq9smB1SeSFUksl5j5AenDZ6na8CZWNas4GL0iieUS\n48NpOJSz0mBnDjPGUVXxrt8RMdSyTD97YuGIcJAYy1EzCpafdNQsE/2da/g7VW1brvIF+sSxsjoz\nduaTxHKpEQA/TxqHt4MJAt6TSrMlEosjieWSIoCNrhQ/uMwzpJYlIoJ3PaqqR6st6yKRWCqSWC4p\nAsagquHiD90PZqVQrSoh+Xosq3cilUgcO0kslxjnoGpxMCrTXoOfN5TkIU8cHau9iAYksVxaJHq+\nq0rSZhTEUhFWb8HWxNLgfdqHklguKYLO+0qbjXCHhZFqVMNfFkgcH6zGte8klkuKUCrYaFmqulAO\nWCVm8Ky8ZCqQGRPqbGoVb7kad/XEsaJAb2627u/oG/vxalrQSWK5pAhZ3qbahaTOpR1eWbRQ/FfB\n90Knx+TiSRwDzpVhP1rFu04Sy6VEhA2bTsB5j3o/IjHgQbiLuX1Q2was6p0+cfS0Mxur/6/eWUkS\nyyVFaK9Zi/OhSVjwIA47g0cQgc6eHYMZPEMt7JEYN9p5Vi/eDMrl6pHOJJZLimDbk1ibo973K7XE\nMm3DqG0pBNHeu2sHaJFcPIljYu30NP0lpX4q72rak5JYLikCktMrFWNMI9xiuE4VEZjIDWgHH6u4\nJ7sycTRsOe0UYHUX+cuGPYDjjeBQMag6MpvhxGGqOhbLpVKNKXXoASS18ehKB0bBFfT27yFbvyaM\nhePBvpS4tHCwSINxOJz7DsCBvzpqFdGEjRs3rPoYimRZLjVikHwCkNqyVGdQ8azYARy7SSqKsYIx\nFmNh12svIs6HR0bD+7QI4nerg3n40QfBoSR0dNDGGBVieJn60RDKKmW3YvOGafC+Ie+wugKHkmW5\n9Ihh3YbNFLtn+51wZfkOWql6k4v0TVcJQmIUHJ5MDc5AWXYRKQGDURMt0hE4Mo+CpnXj1WOsAbFx\nPVhrH1ZTcEbxgJYBO0UBj/eCmNGrECVAK7eI9h08qzFaN4nlkmOY3rCJXbtfJnh1zLLu9xr1TkVD\nmWFtvJ0arAgOBfW0Wi18sR/T2gBiGKUD8lgI4dHhM5ooMDomXn6ppaZqbgdeDWJDseaR+mVU8V4R\ne+BEdKTGucwksVxqVLDtNfQ8tKwgKvVkZTmWLAUQFXycigrgREK1IeMRsVj1zJqMXz7/HP6FbXzm\nus8zYS3jtqsP2sGClbD7fuHGaw7a+6j/utET0FCvXujnVIUq+799/EW2bd/R32eGOCeXRiZaZtKK\nXRLLJcarImpw5OC74U5ZPlnS+Fc0zPW9gDEWXxTBerGeYmqCnz/2FPnUFBSOni9o2daYLlgfmLCZ\n15lJ44PHYzAE+zh8nhLHK6+8BlnIAhsmUp9wDWXRw4iZN6bROwEtN+N5vIwwxgiIoTW5AZHKsbOM\niOLrdS4TVr+KAmssagy7BO575jmm1q0DSianJ/nlb385ZtJSUdlg0rhHG9PZ8blI7ckPVJZxp1eO\njAyJhFKDzrmwolSdmusBjudedKwksVxiFEVE2HzKaf087GXc+6tpOIR1SyNh3dJlOS8XHX7zwsv0\nsgz1ilOl0+2wY+d2Sl8s36CWk8Z36dXhtekQGbcL9b8KdHsFrckWo5BdVXnCnQtT8DAjX13iOJ8k\nlsuEySbolRZfWT4qx5zBI4S+4xrzck28LVJNvxWxgmqoB+Mm2jy+cwdPbN+BTE4hxuDVk5scaw1T\na6Z48oUnqMNXtArB6XeBHEUkrmdU36IRE6eH44fUfz2C4L3jj6/tCI4U+lbdSqHxpF79+mIF5z25\nEU4++cQ42jhqWX2ecEhiueSEA1oweQtvJzFiqScwx7h/aSwqbPCoCF4EL2EC6kWxeYuyKMjzjI5p\n8ZPnX2RnryTLBaP95mlVYWInnkeeejRaZdp3Dum4HQIy3getVBNyxYjhoUeewNiMYVhwTcNR8ahT\nrM0oyoIrLjpnxcczihxRLEVkQkR+LSK/F5HHROT/iPefJSIPiMizIvKPItKK97fj7Wfj42cu70cY\nLYKFFuy/zaduAR/P1ou2EqSyQcLB5aNzQAzqFdtq8cfde/jFyy+RtVuUWqLa/3l9NfNTxaunNd2i\nVIfzLizm07cqRtm6PJ7ox0gEWhMTMaRrGAzGaoiC856iLMmNHdKYRouF/DJd4FpVfS9wEfAxEbkS\n+M/A36jq2cAu4Gvx+V8DdsX7/yY+b9UQGtJ7VCzt9RsAiTp57AIU1iVBGjIWYioNRj0+z3n09e08\ntX8/Jm9FK5HQPG3ettQrFsFmwr2/uQcRok9W+wHuiRWhn8MjFN5j84yhnaq0eUVxpUOAouiRSxJL\nWIBYamB/vJnHiwLXArfG+78J3ByvfzreJj5+nQwzWGwISBXwLRZPKL4rgyfuoyLsvmE67QVKD5Ll\neHX0bMZDL7/Ea85hsxZGfVx7EkytmoPrfKLgCsf23dvpuu7A+7BK16NWnsGdYfvOvYSq+nrAYyuC\nDN4w1pBZwwkb1iNppgEscM1SRKyI/A54A7gbeA7YraplfMo24PR4/XTgZYD4+B5g81IOerSpQkMA\nLKX2Y9ZkEV7OcCCB8YI1EtaVWm3ue3EbezC0rMWrw0nla43On/5C1ACCILlw9z3/QuF7tXWZWBm8\nr9oTCz3vuP+3j/SdO0MYTz9xIu49Auodl1xwzkCI02pmQWKpqk5VLwK2AJcD5y32jUXk6yLyGxH5\nzczMzGI3N0LE3V0VJGPthpNATPQ4L8IbHl0BBqWjykNvvMG9r72Gyw1o2NWNgvgQ5KzRCeQFREMy\noIrixUfnEOTW0jE97n/kAZx3GJWB/iqJ5cMYi4jBO8/Lr+3AZS1EbH9WsuJoPwwUQY0ys28PJ29c\nN5Q6rKPIUa0mq+pu4KfA+4ENIlJlAG0BXonXXwG2AsTH1wM7DrKtb6jqZap62fT09DEOfzTxcSql\nCmtOPh3EYk1GXZL/gJC7IIbVbUGx1jQsPYOKYKywz1p+/cqr7Cg1Tu8NiMSTv8SDLSCAadSFC+ue\n/R1fVDCZ8PKbL+G0xKuLDimNVk9icTRPPL4OzFIU711IdxT45UOPItZSnaqGY8hFazLuIRaDdyVW\nDIxES+fhsxBv+IkisiFenwSuB54giObn49O+Ctwer98RbxMf/4mOfz2wo0RBYmkHM8nMXBfvfVUw\nvXEGrwgqaTRMnbXKmhCN+6lHRHlD4YEXXkRb7ZgpJBzowukPobrUb6fSvxCq9qhXpqYmuf0nt4eg\ndiOo1yXw3icOpB9daWKu9Ru79pG3J0OsbGXBDe2r17jGHS6XvvfCoS0LjCILsSxPBX4qIo8ADwJ3\nq+r3gf8I/AcReZawJvm38fl/C2yO9/8H4K+XftijzYA/y7TYfOoZGBPiLetg8sZOWaFSVc0Jzhkr\nhkI9CHTbEzz66na8zYONUtUbXESoiRcFrxRFD20JT7z0ZPUJjoN6l6NFZbE17HpK9fzwnl+RZVmM\ntqxmAcP77qsRFL0e55116tDHM0ocsZCGqj4CXHyQ+58nrF/Ov78DfGFJRjemVAUSEMEprDlhC6+/\n8Qp5NrjTNR3kGi3I6l71oZKQyXNe7XV5atsfKWxG3s4Q5/FGcCKLyowzxmDF0O11UXU8/OTveOeW\nc2jZNsmeWAqqVEZl8PtUSq/88bU3mVq/Eec1RC7Ex4YVjRBWhBRvwBU98nr9NIklpAyeZaAqNuCo\nzspqp5nccCJOQwqZ94qLVdSrEEwj1b+CwaBG2W2En73wIk++uRPyFsZanBqcsYhC7hY5UlWcd7RM\nTp5ltNqWO++9i1JLil6vfl6/l1DiaKlSSMPv7Kmkc+9ch4eeeD7IqAnl8rwJaQfD0KYg5zasq3vl\nyksv7ItD0kogieXyEUtameh8WX/amXhsmI6LYOdlRZSlw5qMsgA1hjlreOCVbcia6RhjqYTzvEdi\nxV+NRTMWNUyVaNUKZVHQdXM88dLjkFWin46UxSKYGB0RdKdU5bv/86d4MowRyrJA60BcGcp6cbMU\ncW92hjNP3YzGor9p/TqQxHLJqXZ6U98yImCmo1hmGDExL7tPZjN63mOnWryyfz+/fP4lsnwS1+th\n8zxOuUPzM0FRUVTMoiqDVyP18VjI8oxWO+eRJx+h6wqqCkomFX49RpoJAX0x8qpsPPlUMmsoS0du\nw4lVRiBEx6snM0KGiVEapHCySDoKlgWp/1UXG2uJ4aQzzqIoiugAkoFnK2BaGU+8tZ3Hdu1E2pNY\nJYRu+EaSoyi+burj8Iv4CZtWpUosGuw9U9OTfPfH36Xnezhd5Fw/ERDwPuRb3/XjX9Htlah6Wnm7\nPhkNUyq9L0M5tizj8ksuCHuVCGLGvFjJEpLEcpmorAmxtrpGNn0ypQqudHjAZibEZIqha3N+8/I2\ntnUKyNtYA2qCMKpKDDsKYT9S5SWqwSximqzE2EsN03nVELtZupL2ZM6td/8TxUDdy/BePk3NF0QI\nZAjl1wAwwuPPbWN/2XfoOFfiddD6HAaqsUyLgzNOqRLuzAGuqdVMEssVRCXntHe9D5sbMiM4pziv\nvKEl925/hT3GkktGpiUq4QALU+6Vse4k/leFTmdtyx0/v70uFOw1TMlSjPLCUFHUKyIWp8r3f/4A\nT738Oq3WxLCHdgDW5EgmFLN7sbGfcFWLKp0aA0ksVxCnHsw0Ra/EqeClxZN79vL77dtxhaOFAVeG\nYr1iEM1ATZ1pvpJUB0ipBXf+4k66vhs89SOwrjYuqDrEhGT9h55+gf2dEud0JE019Z6i8Gw9dRMH\nVvhPcglJLFeULKYmzricnjHc+8KL7HAOzSx5ZvF41BgUg3gTC/f6Fa/6UlmYxljyPKPQDt/72R2U\ndd0UoSjL+vnJ9hikiiCwJpRce377W7yw7Q2yvI2xdmS0shpnaExmMShXXHx+HWGhVOuVozLi4ZLE\ncoURgTMuvJxfPPccrJnAq2LLEBzkTOWZjpNh42L62XB21rCmKah4bFu49e5/oqtdFCXLsjRJOyJC\noZ77H3wsRC6gMfRr2OMKVNWwIOxxrrM3ntCjUCaNHCCJ5QriUQqnqFjETOGcByvkNsMg/bAgPN7E\n8CAE1SEUX62i5QHUMNuZw7YNt9x5Cx03h/MuVnxI3tImoRRfSD7o+pLv/Oh+Jqamq1SFxt/RoAoN\n897xJ9dcSaxPNfCLpl83kMRyBTFAbgVDm8999HN09s1gJcfFEmsSPd2iglGD8TbE3i13O92DoiH1\nTUAxZDZD8azfuIbv3Rum5E5LvC8pyjHtFLloDpQ9kVA76OmXXuM7P3kQm4X2HeH7DEssfii/Z6Bq\nTAbVNFxxzmG0y0kb1ja6OIbeTNWrEkksV5iQh2ONkJmcKy++irIoQqk1ojCF1nnBvhSWJEvnmIiF\ng6t2FkYMgqF0oWfPt3/4LV7dvQ1EyW0WMj3CC/tTc20cmEP4CEtCPX6NRdb6eO9Q9XhCCqt6pVTP\nz3/zBx5++mWMyXAq8buTWH6vqh2w8mhdVaiqPKVYI6CeGz9UWZX9GOH+OJNtCUksh4YCbzvlTGb3\nzFG6MuSFaxX3SEMghZFZ5GowMdHiF7+9jx/edzeFFvSTfPrTcpVY2Yhm/sqYIYNX+59DwVgQg5ax\nXpCB2374c7bvmsW78mBbGzrVGrRKaHVSOqXs7KOdZym19QgksVxB+itWwULJTM5nb/wsmckPqO0y\nKC6jd2Y3Vmi1Mzp0+Ie7bmHOdVAGp5d911S/zJdnvIpy1NZknQzQbDMWTgtF6SnF84N7H4RsLe3J\nNjYfvSZfzWVoUMrS4Zzjsx//CNaY2uGTRPPgJLFcQerqMxAcOgjT2TS2jDneUuV8R2tMR9cec97H\nbJ8eazZO852ffoeu68UA9jBiMxB6IvV948RA4IxUNSdj8L5TSt+lsB3+7vbvs68AMUqv26lq7o0W\ndXprkHubtXCdGfLGryIirLL+ggvmiPUsE0tHv7JLOORK58is5dPX3cx37/0uNjeUvqQoCnKb1Q6f\nUdQXE2MIjQKlw2TwgwfuYnbfLJdfdCVbTzyDlsmig6qKFD3Q0zryOAUbLEzx1EVQnHiefvUZHnjo\nl6zbtJZNJ7Uoe2+i/iQEizGDFZtGQoBUgnnkw07lfcmXP3ltCBdKHJH0La0gMu9aZkPhCkFoSxuc\n4EpPu91Ph+tPYEcLRes2GV6i51wdrTUtHn7qIf7xzlvY29sbp+bhU3h1I/lZDodYE1M8Y1FmHD3t\n8vOHfs5jLzzCuk1rcU7p9bpg9mHkTZQCjNZW2kgIJYRFcR9qEYiB3sw+rASnXeLIJLEcEk5DUzBj\nBDGWP7nqemb27Kedteh1+r28R9UME9VYASms4bkqvrBbUhZdpjdOcefP7+S3zzxML+aWGw2FZceN\nyivc04JfP/kg//yjW9mxdzuxKwfOO6wNBXzV7sdkO4G5oY75YCge8SHYvCgLPnL1ZXVc7/j9KitP\nmoYPgarGpdTreDBpJ7n2g9dx30O/ILSjUIyGkB3v/UDXxtGgOaUW8iynLIJoiBicd+TtjBdef57H\nn3mM8895Nxe843xyk1O1Wati/qoKPNV9Um91EG1M5uc90B+THHDnPCEQDsg6qrpkxmc3g+ydljh1\nPPiHX/PHV1+kNdUin8z7zhIhZDOpj7c9MItICeJQXYt6QXD1d1Z3+Zw3tvkzj2Oh/9kaLUckfEZj\nDA6Hlkpv315O3bweEDKbj+o5eaRIYjkkDrZznrT2JFp2gsw6CtcDEbz3mJETyshBnBi155h4jIpn\nYm2bl958mSeeeYJN6zZx7Qeup2Uy1Ck2y8JnNDHK74AOh411v8a3NpBq2Xhu/znNa83tHNjjRqX5\nHEHxOPU4dTzy9O954qnHWbNxDdNrJyldsJLrcc43yWL8okgX2AX0ELM5lNkDVE2d+aR1WFjfv46G\n9d1Fz9wbdTCqOHPnPFkrpzc7y5c+/SdkkkqwHQ1JLEcExWOw3HDlDdz36H3s2v8WJR4xFvVuBC3L\nQ9MPUQkB6qV3KEq+tkXXdLn93ttADb2ZLldcfAVnnvI2cs1DzGKtlX37sgpzr3zR1XscPM3yUBPK\n+ZEFjWsaAoScerbvep2f3PcT2msmwIRPMHXCNM6VuE6JzWK/HDxyuFUsUaCLUAIlsBbVSWLh0NoI\n7n+iGFgmwmIah9dbk74AV1vLspxud46PXXMJkzaP3UHnvzJxKJJYjgiCgBHarQk+eMk1/MOdf8/E\n2jaM4bk/JP9o1ISYwRKn22VZht6Xokyun+Dh537Pr/7wa9a11nDRuy9iy0lb41Zi7SORGIIEBzHj\n6ufOf2xQFOuiY0F4VeuY0K7r8rvHHuKl116mNJ58Imdi/USYTfuYteMc6pUsz2LReu1bo4fSNRWU\nsCRhZB/el/HJk4RuSo7+KSFkdoVTgm+kHB4LTfGVwfvVszbPWD81SbB+KyFNK5YLIYnliNCcHGaS\n8fmPf5E77v0OWnryvB3WxMaMqgeXjVaxaviUGqsHew3N1yYnWxT0eODJB7j/D/fTmelw4sYTuPzi\nK1g7uZZMciqDy0gscSaVzMyfZFdXNFp/0YutDqeeN/e8wR9feoHX3nydbtkln8jBQjZlydWGtE1D\ndH0KVk3YqAmNiqs57UK6MAbRK/FqEdNFdAfeb0BkGtWMqttj48ks5uRYnZyqOF2N031jDEYM+3e/\nxZ996jrwJRrXKQ++Opw4GEksR47g4pgwbbSj5NMtdMw8yEahnvbKwSy+KuecEIOpGoQTaGVCtn6S\nWb+fnzz4I8DgS48rHbnkbFi/kXVr1jI1NcXUxBStvEU7b6HeU5Ylc90Oc905du7Zxe69e+h0OpS+\nQHLI8wwxBmOEbMqgPUOWG3zhQviTic6mKhmgsSw5sHKqcc1SDlOgrv7cseSZCiIFxr6F+i7KiWHb\nKo2e4c2FhqNHBkzd6DwUQb0Do1x/zeVQOrJWK3zfdThUqhu1EJJYjghNT2glIp+9/nPc+qNbaU1k\njOPZv2qGFm7QdwZX1h5hvVBEaovRVc4eBcUhArZtsa1gye3r7WZmz150d+hfI1WaHn1LttYLC3YS\nMmlFwQr50MYYyqIMFYKch8xg4+tUfdA3lTqPuvEp+h/kSPn6A46qcGZQPKhFZB9CBkwC7ej88fG5\nph7nMXzj8S3D92GtxZUFBti3dw8nb1xHVn1H1Xc2fqs8Q2N8vAarDEHIbYsvXP9F9r05gxOHYCi9\nrw/A4G0e8kAPgjJvXNGKrCtwi1ZFjYKTRCUKFYgx9etNDENCBYPFxBYN3vsYoxrT9HyM2HHhet+5\nrBhrkFgxKQiywbvQF0dMcNaIp18gyYTu7CrRKlYZvBC2q4ezKud/G7WAm/haQWQXYt5AzB4wZdi8\n+jg19/V46nVWfLgc5k2V0EzOa/gyy7IgM5bZmb186ZMfipk6wdKt14ElWZULJYnlCCMIVjK+/Jk/\no9hf4HxJO2/Vj6low9c5YjQjYgavHvDYIV93sLCcg10OOYYjPT7wz5Hff/4YFkLzefVrqw07jOzG\nyA5ECpCske3TVP1qvdocNqaoWkc1cTk1MxmlK7nhmitpyegV9hg3kliOOEaEtmnx+Ru+gLUW9WXI\nWqsdDYmxRHxc81RE9iPyJiIhxIoqRTKsU8QQpUrsDp+aaEycxgPdssf6CcPJG9fWTrbEsZO+wVEl\nTqWq7JK2nUB7inM+1oiME7QRNSwTC0RCjKeYHmLeRKRD/7D0gEUxYU1zAeXtKg+4FQPdWT50xcXR\n4kw7ymJJYjmqiIS1tqrKtsKnr72Z3lwZ9vtYdzAdA+NKZSlKdP44oIOYHYjsjxamQSSsY2pcxFQ9\n9CGrQOl9aAGiji/ddC0TrTy+TZqDLJYklmOCEUNu2nz541/mpHUnMdmaJDPNNa7E+FFZi0JYjyxA\nuoh5A2PeQGS2ETQuMZj90AiKNVCWXT7xoYvJYkB/tYXE4khiOSYoinqPwXD1RddgfYuiU9ahLYlx\noxLJGMtTecshhA2YfRizG+ghQiz1BiKHX7PMBN5/8XlMZq36vn7weWIxJLEcYQadsiFtEA0ZPh/9\nwEeZbE+Rm+zAmL8R7NmTmE8IS1K1wUMu/V86/M1RuhjzJkg32pa+jlCtQor6t0BUOfO0U9iyeQNV\nNj2NLSYWRxLLEUbmXVRCHCKEY+v6qz7K/p1zWGtCMQilX5kwRRuPNnHtUSqnTSMkqaGbYDpg3kJk\nD32HT3w4LGuGEn6qnHHKZi4770x86ea170j7wVKQxHJMscbSNi0+94kvsPutfVgsNjeIhv7UIbDa\np+NkLNG4fulRzRDpYcwOxOwC6WFUsLHkW1F41Aub1k3wvvPfgXMu1tdM1uRSk8RybJiXY62ClsqU\nneTPP/kV3rHlHPCx7rWEQrRGj1zsITGKCGjMLpJQnUiRkCYp20Bm6rXqzBrOPHEd177vPUCIswSS\n428ZSGI5RlRpvRAiQWweUvtzk/OuM97F+979PorCh55U3oW0t8SYEjzkisS6kx5HLA2Xv4XXnXgc\np65bw/svehdoGWNyk0guFwsWSxGxIvKwiHw/3j5LRB4QkWdF5B9FpBXvb8fbz8bHz1yeoa9eDsjG\nU6XdmuCUjadx0zWfwGARBOfTNHw8qat6xDVNxWsoNoIIruxh832849RJrr7kXbEtsRndivrHCUfz\n7f574InG7f8M/I2qnk2on/+1eP/XgF3x/r+Jz0ssmn7lwfn6573Ho+QmZ41dw9tOOhNjM1qtVnKM\njy1aO+lCVaZQkxNVim6XS8+9iEvf/V7ECMaCNYYxLHk6VixILEVkC/AJ4P+NtwW4Frg1PuWbwM3x\n+qfjbeLj10maGywaP6/Cw4BoGgmOHUDE8p53Xsz733sle/fsPqCeZGIMENOI+gl54d4rxgjelXz8\nmk9y1slnxxbgLnafDPU4E8vHQr/e/wv4X+knp24GdqtqGW9vA06P108HXgaIj++Jzx9ARL4uIr8R\nkd/MzMwc4/BXD+aAQJ/PwUMAAA+tSURBVKLGYzEtEsBaQ24yTll3Gn/2sX/NzJ7ZsNYZWylksUGY\nSIzti5WL6jyPoyyqkzg26u+4UW4PtF/l3IeamqgD8VibUcyVnH3aOWye2kxVJ95II20ysawcUSxF\n5CbgDVX97VK+sap+Q1UvU9XLpqenl3LTqx5VxYqlbSf4yk1f5carPxXaJSD0ii7WCt73vawqghdC\nnUUFq4L16eBbTqzGivLKwPcPgnMF1oSTWaka2mp0DZ+57rNccu4lqdbUkFiIZXkV8CkReQH4FmH6\n/X8DG0SkqrS+BXglXn8F2AoQH18P7FjCMSeOQLXqEbyohkk7yceuuoHOvg7WZJTekbWyyrCsXgWx\nOqY2LJzE8nBgG1+pf49WllH4AmMteMO7znw3N///7Z1djBxVdsd/51ZVz4c92B6PPeCxCdjmy8oG\nzLIYsmhhSXbZtfnYD6LAmsBDokhJHhLxkIAiRcpj8hAlkaJsopAoD8mG7G4SCFJEvAubKFLkXVjA\nfAXwbpAwwTMeMDBje7q7qk4e7q3qmvaMdxigu2bm/Cyrq25Vd/+7pvrf59577r2fvZ3BaAhsXsq+\n8RPNUlUfVNXtqnoRcBfwhKoeBJ4E7gyn3Qc8ErYfDfuE40+oZcj2nDRNy5URATYObOLu/QdJ8gYR\nMVma0an+lTEmKpCLhmngjI+Ls69xp0kkzTKSqMHp002+8tkvc8n5u4iixNurdeL0jQ/TJPw7wP0i\nchTfJvlQKH8I2BzK7wce+HASjeUQx0XQ77+AORC5mNtuvJ1r9+zj9HtnyFFaWYsojkiztBJpWjXv\n40fKpTYkzG7u25GBPOL9k6f40k1fYsA1cOFvKVBZ3MzoNR9owTJV/R7wvbD9Y+DaBc6ZA37hI9Bm\nfBSozpvL0Iljx5Yd3HXga3z70DdxcUSWZgwPDdFutnCIHx1k38mPFdEi6vfRZRRFZFlOJDHrhtbx\n5Zu+QiRCpnmlE0fD75j9cfqBre64ysnFr3HtyhR2Px3YgBvgrlu+Ritr8/Bj38CNOr8cbHGOfSE/\nXgR8dqz/MWudafLJy69h97bdPruhiPClWIVHuybHMHqNZWatcqTsO+2kHTnnIxWHYzBqcPC2g3z6\nyhuYefs0WV6kruRh0UW/LGs1YUml08Yp899szXls5/rSScUKj0Xvts8OysnyFBf5nMks89tCxMz0\nLDd/6mYunbgUp2Fm/OKiU1zWNXZha4iZ5SrnXF8xDW2ZzkVsHRnn3jt+CU4pmgpRHJNnKRqGTJYd\nEiE9My/zMXXeHLZrauIOgVzy8roUV7ucXc2v/0EufpmxLMuJIkeWtVEV5t6f47KJy7j3jnvZOjIO\n+Ghzkbcy+oxVw9cw4kNE8tCsKZnw1S9+lbmsxaPfeZTG+phWs+VnMIqLirwi5TS0xevMn+Rj7VCJ\nrEPLRbE4cZmIJf46Ry7CxUKz2SQZSNix+SL2Xr6XBL82uoofyx87Sw2qK2aWaxzNMqIo8m1icYxD\nSFyDu7/wi8y2Z3jk8X8l2pigmvskanw06dQqhtLdC1ZOCeV3NBilokjkO87as20+f+PPs2X9ON2R\naIRDc0XcWr+y9cTMcg2TkyOR87PbqBBWrfAdQhJzXmMTB/ffQyptnnvtCEdeeZ6RjetDFJmTZilx\nnHQmmg3j99ZKdksWTLFYsliLpYtxNBoJOMeZuTnas21u/dwBNg5u8r3gIXosBgC4MA2bOHzvN87a\nKGuImeUappzfRDrZRUJnaQpVHxFFWcynLvskey+5kjffeZP/+O//pLEuYWBgiFbaIkkS0jTFhZ53\nVNdEldxpqIj7BEjyLEOBRpww12zROtNi35X7uHT75cFTBVzn+voshdD9JiECFTGjrClmlmsYmVcN\n1HK/XD41OKhEfpmK2CVcuPlC7rn1Hl77v6M8c+SHfjlBR7mY2loarFXt1/LR5ACz783iGhG33XgH\nQ9GAn1tUXMhb1crzOoZYLDlWNU+jfphZGpXUoipKWcssDwsiEQJcOnEJu7btpJ2nPP3iDzj21jGi\nweisV1p4jPkChrDgadXq/eJPXfD5cq6Di7zmuTScVSwhl8Qv79Futdm0eZT9txwglrhrIt6wjLFz\nC34AWeQvYNQLM0tjESpV9AWPOmJxxFHCz37iBto/3ebY9BscfvowOGVgcAAlJ3IRaZaV7XlAuUJl\n9dXOHpii5YMU05YtQe65DleD3mpDQXeEDT5SVpRWs0VjoEGW574jLM2IIz/NXfNUi/Et57PvqusY\ncAMkLllktvJOO+Uy5Rs1wMzSWBZaps34ymOSJ+zaupuLb9mJSk6ufg2gQ/91iFMzJ4kGY+Ik8ilI\nWU6e5zgnvve3SLFByohv/hQfxZvqwkGgnFXSpRVyXGiX1YprFvao5cTkqupzTEO7azzQQHE0Z+cY\njAf5xBVXsnP7TiKJcOqIIr/ut+bVUU/zusWNVYKZpbEspOIHIiBFHqYT0IjY+XWADtx4gFwzMs2Z\nfPstXnz1JaZOTRMnMRK7cvIIAXLNyyiyfPGqCUroJOmuGhez6M7rhq9Ejlq1XD8iqXNY0Fx8uk7o\npGm1M9JWSuQito9tY8+le9g4vJFIotAp5vxicA7aee6XdBCIzBtXNWaWxrLQeSNWOp1DPu2oc57g\ncOLLJrZcyMTYDh91okydnOTVH7/GiZPTNLMWLhaixM3rVRcpok/f4aTkoWoe0m1E0GLxmcWCS1Vf\nrdYcca4caJSFYYd52xvqecMj7J7YzRU7ryiNUXDhsfN5oTP7T1m9nlf7NtdcjZhZGstE5m2VkZsC\nIl2+VTlXfFtnrsq20e1sG53wwy41J9ecdt7m+NRxjk8d543jx8hJO2PUI78OjThvpiJCHozSmyGo\n5qgqeRYeg1FKHqFpzvrhIUY3jLJ1bCsT529jMBkOugpzdkTik8NR/16LjeIUnR/1VpPMzS5XH2aW\nxrKQs/Y6M393EmHocg6/4RffKkIxf0IkkOUZjXiAXRO72Tmxi1zzEDV2ItjFFBQU53RScypRb4hY\nFcWJbyboNGP69ylyTwuTzMPzuz+L0t2hPv/Hw1h9mFkay+JsU1o4qiqmfZNq+bwqtG9qVIVIorI9\nshhFdLbzLJyKdLaerqcs6GD+gIYlZxHxo5rKf5WFxOiY4/yZnBZ/XWN1YWZpLIvFRpl0ly6UZF3t\nhykjua7o81zv/EH0nPslO+2snUwpd9bxYmvp9mdGuRqxKdoMwzCWgJmlYRjGEjCzNAzDWAJmloZh\nGEvAzNIwDGMJmFkahmEsgVqkDqVpyvHjx/2kBIE8DyMznJ+sIE1TnHO02+3yPOccee5HasRxTJqm\nfmaYPCcLyyWIzE/jKOZb3Lp1a48+nWEYq4FaRJaqWhpiFEWsX7++NLo8z2k2mwwPDxPH8bznQBiV\nIUKWZeWxwiDjOC7NtHhOcb5hGMYHoRaRJfhIcmxsjDzPmZmZIUkSsiwjyzLGx8eZnJwkSRJEhKGh\nIUZGRgA4ceJEGSVOTk6yYcMGnHOcOXMG5xzNZpMtW7YAcPLkSTNKwzCWRW3MMooikiQBvKllWRbm\nPPTBr3OO0dFRTp06xdDQENPT0wwNDTE8PMzU1BRZljEyMkKSJExPTzM87CdIiOOYOI5RVRqNBmma\nrqmlDwzD+GioRTUcIMsyTp48SbvdZmhoiPPOOw/nHFmW0Ww22bBhQ1mlPnPmDMPDwwwODjI3N8fo\n6Cjj4+PMzs4CMDY2RrvdBnzEWjwvyzLSNC3bQw3DMJZKrSLLVqvFu+++i4hw+vTpMiJ85513yk4d\nEaHdbpMkCTMzM4gIs7Oz5HlOHMdMT0+T5znr1q1jdnYWVWVycrKMUK0abhjGcqhVZFkYWXW7MEzw\nvebgjVXVT/MlIszNzdFsNhERnHMMDw9z+vRpRKSshi/UM24YhrFUpA7tdyIyA7zSbx3LYAyY7reI\nD4hp7h0rUfdK1AwfTvdPqeqWn3RSXarhr6jqNf0W8UERkadWmm7T3DtWou6VqBl6o7s21XDDMIw6\nY2ZpGIaxBOpiln/ZbwHLZCXqNs29YyXqXomaoQe6a9HBYxiGUXfqElkahmHUmr6bpYh8QUReEZGj\nIvJAv/UUiMhfi8iUiLxQKRsVkUMi8lp43BTKRUT+NHyGIyJydZ807xCRJ0XkJRF5UUR+c4XoHhSR\n74vIc0H374fyi0XkcND3sIg0QvlA2D8ajl/UD91BSyQiz4jIYytI8+si8ryIPCsiT4Wyut8jG0Xk\nWyLyPyLysohc33PNqtq3/0AE/AjYCTSA54A9/dRU0fYZ4GrghUrZHwIPhO0HgD8I2/uBf8Mv63cd\ncLhPmi8Arg7bI8CrwJ4VoFuA9WE7AQ4HPf8I3BXKvw78Wtj+deDrYfsu4OE+3if3A38PPBb2V4Lm\n14GxrrK63yN/C/xK2G4AG3utuS9/rMoFuB54vLL/IPBgPzV16buoyyxfAS4I2xfg80MB/gK4e6Hz\n+qz/EeBzK0k3MAz8ENiHTzKOu+8V4HHg+rAdh/OkD1q3A98FbgYeC1/OWmsO77+QWdb2HgE2AP/b\nfb16rbnf1fAJ4I3K/rFQVlfGVfWtsH0cGA/btfscoZq3Fx+l1V53qM4+C0wBh/A1jndVNV1AW6k7\nHH8P2NxbxQD8MfDbQDEzy2bqrxlAgX8XkadF5FdDWZ3vkYuBE8DfhCaPvxKRdfRYc7/NcsWi/ier\nlqkEIrIe+DbwW6r6fvVYXXWraqaqV+GjtWuBy/ss6ZyIyK3AlKo+3W8ty+AGVb0a+CLwGyLymerB\nGt4jMb5J7M9VdS9wCl/tLumF5n6b5ZvAjsr+9lBWVyZF5AKA8DgVymvzOUQkwRvl36nqP4Xi2usu\nUNV3gSfxVdiNIlIMya1qK3WH4xuAt3ss9dPA7SLyOvAP+Kr4n1BvzQCo6pvhcQr4Z/yPU53vkWPA\nMVU9HPa/hTfPnmrut1n+ALgk9CA28A3fj/ZZ07l4FLgvbN+HbxMsyu8NvXDXAe9Vqgc9Q0QEeAh4\nWVX/qHKo7rq3iMjGsD2Eb2d9GW+ad4bTunUXn+dO4IkQWfQMVX1QVber6kX4+/YJVT1IjTUDiMg6\nERkptoHPAy9Q43tEVY8Db4jIZaHo54CXeq65Hw3MXY20+/G9tj8Cfrffeiq6vgG8BbTxv2y/jG9j\n+i7wGvAdYDScK8Cfhc/wPHBNnzTfgK+KHAGeDf/3rwDdPwM8E3S/APxeKN8JfB84CnwTGAjlg2H/\naDi+s8/3yk10esNrrTnoey78f7H4zq2Ae+Qq4Klwj/wLsKnXmm0Ej2EYxhLodzXcMAxjRWBmaRiG\nsQTMLA3DMJaAmaVhGMYSMLM0DMNYAmaWhmEYS8DM0jAMYwmYWRqGYSyB/wexAX269vCdIgAAAABJ\nRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [] + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "ruyvVnmCDBgj", + "colab_type": "text" + }, + "source": [ + "## Adjust HSV in YIQ\n", + "This operation changes color scale of a given RGB image to YIQ but here instead of choosing randomly, delta hue and saturation values are inputs form the user." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "vbCdwGtYChnQ", + "colab_type": "code", + "outputId": "9aa7cada-ffe4-4404-8a31-21427e9c18c4", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 269 + } + }, + "source": [ + "delta = 0.5\n", + "saturation = 0.3\n", + "value = 0.6\n", + "adj_hsvinyiq = tfa.image.adjust_hsv_in_yiq(google_img, delta, saturation, value)\n", + "_ = plt.imshow(adj_hsvinyiq)" + ], + "execution_count": 10, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAUsAAAD8CAYAAAD+D4bnAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsvWmUJFd17/vb50RE1thDdUut1oAG\nkBCTQFgSs0ECjMQkmcEM14BtvPB6y2+t+9Z9a73r9fzhrfft3i/Pb11/8DPXxgw2IDBgMQgMiEGM\nEgIEmhAa0NCSWrJ6rq6qzIhz9vtwTkRGVVd3l7qrKjO7zk/KrszIyMyTmZH/2GfvffYWVSWRSCQS\nx8YMegCJRCIxCiSxTCQSiRWQxDKRSCRWQBLLRCKRWAFJLBOJRGIFJLFMJBKJFbAmYikiV4vIvSJy\nv4j81Vq8RiKRSKwnstp5liJigd8CbwR2AT8D3qeqd6/qCyUSicQ6shaW5RXA/ar6oKr2gM8C167B\n6yQSicS6ka3Bc54FPNq6vQt42bEeMDU1pdu2bVuDoSQSicSxeeSRR55W1dOOt99aiOWKEJGPAB8B\nmJmZ4a//+q8HNZREIrGB+Yu/+IuHV7LfWkzDHwPOad0+O25bhKp+VFUvU9XLpqam1mAYiUQisXqs\nhVj+DLhQRM4XkQJ4L/DlNXidRCKRWDdWfRquqpWI/K/AvwMW+Jiq3rXar5NIJBLryZr4LFX1RuDG\ntXjuRCKRGARpBU8ikUisgIFFwxOJZ0x7/YQMbBSJDUoSy8TocBSB1KPflUisGkksEyNDaoCSGCTJ\nZ5lIJBIrIFmWidFCtTXnlubq0oIwImlinlhdklgmRgup/8QrS+bmSSQTa0USy8QIoYtEUltKKUiK\n8iTWlCSWiaOykiizQpwaC7LkAbWUSbzV3NbWTi0DUWhNp1VRr4SbYZv6MAVX71GveOdR51HAWIMA\nYg1iDMZIf/y1tSmCSLQ+4/WW/B75rhXCmwr7tYbLUpN20f2L33jiFCGJZWJZahHUWgQhCFX/Kqp+\n0QO0+auYoER4pxgj/ecEPB4rgnqHd46qV/HEXQ/z8IMPMTc7j3rFGkOnKPDOYYzBWhueIIqpIDjv\nEQQRQdWHMREF10gUUKFyFdYaut0uXpWyqsAKWZFz0cXPZeZZpzO+ZQqTGWxmo7iGZ1OniDHUstpI\nfuvkEAReW+/Ro6oYST+vU4n0bSaWRSAIZX2jTSMOtVAFyxJVanvOO48Yg6MCsRgMvnT4yrHrzvt5\n8LcPsDC7QGEyrLEUeYYrSyazAjRYf+pBMIiCOodzHmMsRvqiperxnsaS7A9RcVUQ2sxYyqrCmowM\n6NgC75WMjEfueZDf/eZ+vPOUVYUKvPCySzjt2WeRjxUggjXhdURM8/5UNYyhOXuYlpAb+meYxKlC\nEsvEUZEl08nm5y/1lDOamlEo66m2IGAt6h30HE8/ups7b/0lswcPYcUw2Zkkt4ZifCIIoBicV1Sj\nQEbLznuPMQYREy218DraDEib6bT32pptG7yvsMY2+1sszjuMtVRVRZZlKIr3jswYFEsnzwHhd7+6\nj9/+8h56VZdu2ePc55zPi6+6HG8U4nS//cGEqbeP16T5FBKnFkksEytGAOcc1tpm6iki4INo1bNy\nVy2w9+En+cWPfk5WWYosoyBj68SWvrj6ICa2ERVPlhmItpuiiI2WI54Qv2mLEX1fJCCm7U3UIJTh\nRnQXKNZK63U8KFhjwrMZ8PGVBaUwhqIYZ6ozwaHd+7jp41+hW/U48/xn8fxXvJh8YixO2eMnEy1s\n1SD+iVOPJJaJo9MO1mjw2RkxuMphMwMahK1XdSlsxsO/uJ87f/FrJvIOmbFMSAe12sRMFFBRkPBc\nzdNrbU3qogg3MaVS+rP+k1zFs3iqXlvGIZKuKD78VYn3CmGTMp6PMVGMc+iJvdxyw/d4et/TvOKq\n13D6RWeTFRn1Owrvw1Pbs4lThySWiaOySFhEQgTae4w1VL2KhYNz/PQbP8Af7lHYDJMJY2qRCiQT\nvDpMZoL/Uuspq6Evi7UaEv2dJuwn/dh5f+p/4mMPD5f+1satoK2940DU0L9Xw7hMEHkN0RwyLDNT\nW3no9t9y38/uxqHM9g7zgssu4fyXXIQYG4NCiVOJJJYbmZa11ra5+vHmxalBoPjKMb9/lu99/uuM\nF2MhWJLnTUAmz4swJRbAEa3GaBqqNAGS8GzS0q8oLlJPhfvpOKtDbS3SUtIwpsUvo/0/IqiRMD3X\nMBqREIU3xlJWDkTIgZliml23P8Bdt93BC654Mee98DmYzMbPYrEVjfT9rs14Fn0Pyd85jCSx3MBo\nnWWo/ZCExuWEHo8hpN6ogqsqnrzvUX7y7R9w2uRWpjuTIZASHZUK0XUX04Q0pNxo7RCM9x9tGr1o\n+r0GYqFHe+WjRa1bqroow7IObi3KJxVUwFrLTD7Nrtvv56Hb72XqjK289MqXhah6tDQVjT5ejRH0\nflgM9eEbWZqwmhgKklhuaLTxRWodoGh8iSHQ4Z3jFzfdwmO/fZjTtmxj+9QMFR4rpsmzFDFHF6MN\nhHM+iqgy0Rlj4ckDfPefv8YLXnkpO597DiaLP7dopeNrV0RwWVgs2liYiWEjOVY2NK3och3JDfnX\nUMHBp/Zz/d9+kkOP/Aczk9M4VyFGMCr9qbmEFJ9EPyJvraW30INKmSjGeODWu/ji3/4zD/z8bnzl\nohoaxNCYqKbOzdR00hlWkmW5oan9c9FvJ0L30Bzf+dzXGTM5FsuZW04HFvvtliaAt9N2NjwieFXE\nGrAa3BQinL71NB6/6yEe/vV9VK7iyve/lWKiCA/BxM8XwBNsmPSZDhvJstzAeMB5j1eHOsdT9z7G\nzdd/i6l8HFc6fJOz048W16GXxAqRGLzSkFta2JyxrODGj3+BB2+9B19VaPOpKkQ/cWL4SJblBsZG\nX2O50OWr//gFtm/aSsdaXOnJsgLR+ONVUPEhR7JJr0mCeTwEE4M5DmNCFB0HYiwzU5t55O4H+dWt\nt3PVO9/EpjO24h3YLAnlsJLEcgNTzs/zkxtvZm7PLDNTW2MEm1Ddx4eppMQARLCO6rXfKZxzPJow\nWdQ+7z1WLAp45+NnKmyb2sJt3/gRXVPxhndfg9gCg011OYeQNA0fdeIs2asP/jElLrvTZlu47UJq\nSix9Vi10+cY//RvMVkzkY016jHrBGIOxpvUS7czLdMishCbpHgCJGQPxlomrgwQUT0HGtB/n3z/z\nFbTXIyzFVFCP8y58xdoPwtH2jrTSQtMJbG1JluWIo3H1S10STfFN4Yk6hzJkhocfa7XQ5YaPXs85\np5/FpolpNFo5R3OT9de4pJ/iydL+DDX6g4Mr06NOGfcFP77+e8xV87z+/W8h63Qw8fsFGmt0OZIn\nee1JYjni9BOmtWXN9H82IuCqkNpz149/yWN3P8wZm07j8OwsWZY3YtvsnFhHBBOT0lUUK5bMW6aL\nSb79ya9y8eUv4ryXXBSDRM3iH3SZr6leNZpYO9KcasQJxSli2mOzvplG+BRBDHz1Y59j/4O72Tw9\nRUkVq+SEiuPGpIIPg0BUEK3zViErLI6KhYUFCmPZdccD7H3kSfB+iYUf8zHrC82diTUkieWIszih\np556hyCCorjS8Y1PfZlxn5EXBWVZYhAyG+pEipEmFzCxvqhoLNAB1li89zgJJeQyGyZ9t3/7Fm65\n8WbUOaRenw44Vw1y6BuSJJajTh3gwceq5WHJseAp57rc8P99hknpUGRjVN0K0TrhOXm5Bo/io2AC\nfSszTqidcxRZRm/vHF/6u09T9Uq896gSxDSd39aV5LMcdSRMtY3aEP3G4+YdX/roZ9k2uZltE1so\nspwFX6FGmwK9SSYHi8TKI3Vfn7oQCdCsqDISouBlr2TLxGa+9y83MrF1ipe//cpQmCMtIl9XkmU5\nAtSpIyEdKKaSeG0qc4tqKDerivHKN/7pi5y5dTuZWIw19Ho9DAZNS7iHh7iufvn7YqQ8KmieF2Qm\nI5eMcv8Cex/eHabl1MWL62pR6RS4liSxHAFadbuD6KnvT90kpIl7X0HluPHjN7Bt01bKbonJ2tWA\nkgkyTCjt7IXj79eeC/zyplu58we/pOp1wz5N++C1Gm0CkliOCKHlQTA0pBHPkHAe9hDn+fzffopp\n06G70CMvxuhV5QDHnDhx2olAdZ5sXzCLLGffo0/xg3/7DnjXPCrlwq4tSSxHAI25eHXb2VBVXOPa\nbo/rlnzt77/EGVu2U5ZdTKyz1rFFKsowgiz9zoRYK7je4JUxO8b+A/vD0tR6cxLLNSUFeEYBEUTD\n9Fuop9ZBOF235Juf+gpbN20C55Ei/KRCjclka4w2IVWoKmP/8ywL/mex7C0P8LYPvgMTO0yqxpqY\niTUjieUIUNeEDX5KH3KDvKdc6PKjT3+bLcUkvbIky4pgaZjYw3q5pR6JoWdpi43MZqGUHorp5Lz8\nD3+fselJ6rImXjUWdkvf91qSTkUjQh3IAUGdo+r2uOHvP4+xsV+OtVSuDD8ZraOk6ccziiz93rwq\nxViBWjATlnx8HKchr1YIuZka/dqJteO4YikiHxORp0Tkzta2GRH5lojcF/9ujdtFRP6HiNwvIr8W\nkZeu5eBPSXSZm7GntSH6KHslX/+HL7Fz5jQqV+GcxpU4rr+KhySYo0idd1nfArC5pfQVXal4xVuv\nxGYZiFC5Eu8VTP0tp+96LVmJZflx4Ool2/4KuElVLwRuircBrgEujJePAH+3OsPcAGgd99RGMOs8\nyjrfDgXfc9z4sS+ybXozVdUjszliQk/vzOYtCzQup9vAXktp/Td01KlfrfHVeZOVc1RlSWYMRafA\no8y5BV7/R2/GjIWVO1Ysmc0xNp4axaQamGvMccVSVW8G9i7ZfC3wiXj9E8B1re2f1MBPgS0isnO1\nBnsqo9IvrItoE+UU6bcZqLoLfOXvP8/2LTMslF2KvBMj34nlWGku40DQutBJP4+yrj5UZBlFp6Dy\nDhAWKHnTB67F5BYzjMK/QTjRAM8OVX0iXt8N7IjXzwIebe23K257giWIyEcI1iczMzMnOIxTiMag\njHUpJU6nvVLikNJz08e/ynSng5YOUUNVOSSF6EaS/nS7XoJT99mM6WExA+LSt7+KYmIsulk0SeUA\nOWmzRLWuLvuMH/dRVb1MVS+bmpo62WGMPHXl7H5lyuYOMoXvfvEbjHfGKLIODo/NbZhmuyG0mtaS\nRRXDdfnLEoZxdtp8w1Eo1SsiFnU+VlI3VJNEoTR4DUWdk19ycJyoWD5ZT6/j36fi9seAc1r7nR23\nJY5DHZiJyUFYTPiRq+OeH/0aPdgLPyZbF1DwID5EvjcIqu2Ff43Tom+RRyFRr7T9lO3E7eFCG8PS\nO4/3nqwoEGOZy3u89h1vakroidhlTwSJ9eNExfLLwIfi9Q8BN7S2fzBGxV8OHGhN1xPHQGJbiHaH\nB1c6Du7ex2O/fZiJzviR67xVNlQqZTtcI4tuy+K9YmHjo1maw4C0xB2E8fHxUGbIwpz0uOodV2My\n20zJqduEDKP/dYNwXI+XiHwGeB2wXUR2Af8X8N+Az4nIh4GHgT+Ku98IvBm4H5gD/nQNxnxK4tRh\nMaFyuRhKrdBexfe/+G2mxydCj29XkZlYx1DNkvIKpx5Lo9jGhg6TdcaAtoRQ4/4SQsN475vosEjI\nVRwqlpRXK6sKtcqhhVn+4IPXhffpHWKzaCmDJ6SGWUlBvUFwXLFU1fcd5a7XL7OvAn95soPamIRp\npSsrbJFhVfje577JlolpMMHpb61dJrI7ZCIA1Mv0wCw7XgEQQo4gtciFepzWhv7avbLHfNllbGKc\n087YwVkXnM2mM7ZhiyxUX6q7TxrT2FvqfWNVqvN45zm85yD7d+9h9yOP8eSTT1HkBROdcTJjqcoK\nayygeOeROrNAaiEOJ67+yPv/rsh3GBvmNG1x6Veyx0Imhl63R1bkeJTL3vwaJmamQ3Cn5WgVU6cW\npfYfgyTFUoeE2nVvshxQ7rz5NnKTIQgeN5yaeBTq9esqLlb+7oumIGj0HRiBqqrodArmqnnmunPs\n2HEmz7/0YqZ3bCHv5IgxOB+jw+EJGnH0ofFQfypuTYwYG0wOGUIxOc6ms7Zz/hXPxzvFlSW+cuy6\n4wEe+M190OsxlnfIOzndbhcFjAjWZBgEF1fFtLMhl8rmUWmlB9G+JoBTenHNjTGGnnFMzkyf/Ief\nWDOSWA4NinNh2ffhPQd5+ndPYMnwXlGpsCYf9ABXjBIKfojPYgqUj3WSwsUYQYxQOsec6zK+eTOv\nfePryIoMtXEJX2znq6qY2nqsXbXxH2PC0s5QkSncZ5oEnL5lZowBEawVjC1AlfOveD7nX/F81Hme\nvPdRfn3b7eA9hbUURYeyV6KtHFYvPlb+iZahLJlHL8Ny6UHtcFRmM0xh6UrFle95E2JisZQN5Ice\nJZJYDgux9gVe+fon/43t01swmWCsULrR8lGJSqwpVldJMlHAggW9f+4gMzu28/KrXk0xNYaxNkR7\nib2E6gQqH4SyqeZ5xPLN1tJA7SdcCbJIm0LOYishS8DYGGCxhrNedAE7n38uVa/Hk/c+yi9/fBsz\nm7dS9Vzox462ipCHXt/1W1zZB0Kjl82/Nmw80J3lmj+5FudB1OM1+SSHlSSWw4IJK7+/+9l/Z+e2\nHTjvmvqE1o6Wr0qsodtdYHxsHBGYX1igMzHO03v3svX0bVzz/ncFsTIScgfrep1NjFhRDS18GwFs\nC4jq4r8x4EVLLFX0iCDK0ZY9Kh6TWXI7xtmXXsSOi8/lyXse5mc//AnbN23HVR6bxZH58KSmLdRH\nod+2NmQ2qPd0xsZx3iHWMOe6XP2ht4NYbB4r4euR404MB0kshwRfVtz8xW9h5x2lV8RKU59QxY9U\nubVeVdEZG6eqHPtnD3DxZS/kOZc/H5tZ2vmRtCw9iEEVqVNqomI00ZtaNFv71y+o7WWDUXiP0LH6\ns1yyWfuPqsW0GO9w9ksu5OxLL8T1ehx6aj8/vuFmtkxtwksV/JiqiCwXwOpT580KodCJMTmlr5BM\nuOytr6YzNY7JspDmBKFHkrRWbyWGimTvDwmz/7EPFsIqjaITggvhhzxaQgkw3RkD9exZ2M9b/uLd\nPOeKFwZRiOIXihjX4tdKHpe6YHG/P/Yi16BI+55+YCcmpxqReDHR9ou90THRLdCySOtLLHNWuw6U\nuDooBo5sXrDl7NN505+/jYVOj8O9OSyhEO/xhLLxECBYmyHWoEZx4hnbPBUSzuP679oN47Qa2tzQ\njU6yLNeROi+wFoP6B++95+av3MTWiS24qsJVURBWEERYK+ofe7ByaMatAkYNznnyPKPyoYamd4rN\nDN55Htq3mzdedzWbdmxFMosYE9+KxgBPyBE18TXqabOoOcrblcbIPMq9zb/LZvZI/0qIqC99Jmnd\nr6jEchXR36ko2ViHq97zZsr5Hjd95utszifJCL29S1cxPj5O1esR7UiUIHy5hPfv1GHEcsj1+IP3\nvC28mjGLxq6iWGyI8iWGjvStrCdLfqQiof2DuopNY9N41y/HNhS1cuKUsD0SUUEUOp2cbtlFxJBn\nOQLsPXCA0y8+mz/88/eweec2xJojRU6b9PHmNWorri5m216jQ+vWSsd8VMFd0WOkfZU6hh9ciYZ8\nosOb/uRtnHbhmWHabEK5NF+5RiTrfjkCVN4hRiiyDvvcLNd84NqQEiWy+IWbzyDVFRpWkmW5joTp\nZz9fz3tQV/Gdf/0GhZpFKTCDpp0b2BYtCAK60J2jyMcoq4rCGBzKm//8D7FFJ/rnBL9sC60heYPP\niPiNBbMUYzPOv/S5qCr3/+I3bJqYpuqVSGZi6w/FEERUDXinHM7necsf/yGYfiAqMVoksVxHDIJK\nndYiaOW45Vs/YKLMwkK2WFlmCGzKiGJ88PnVU3BilLnojOGd59DsLC+4+iWc9pydiGSICE5DKnd7\nqj2qeBQjEq3IOF1WxdiMZ7/shVx4xQt45Pb7+M1tdzKhY4116Qknx9zmXP6O15KPhzJrwbAexRNG\nIonlOhLy/hTvFWPDj+nw7oOM5TnEQMXwCGWgTqyuLSatQqR2dmGOBdfjmj+7jmw8R40NOYmqWNMq\nJdbyIQ7XO1sZ4eQFTdc4gBjdRqHXKzn7RRdwxkXP4t8/8W9s6kySFXlMMFfmqLBF0ZRZM2LwXpsl\njInRIfks1xnVkIfoy4pvfuYr5GTBbzmkUqKieONxWmGMRRWKooOdLHjLh99JMT2ByfJgNccoMrFh\nmrbzIGtGTSN8EMngUugvfQRAlaIo8BiKyXGu/tN3UEloSdvr9ph181z13quxucWrC8Lrh8fVknhm\nJLFcV+IPRZWyW1KUhjzPUT+cX0OYfodwk8FQ9SrGJifY293Ple+5BlvkeO9xlWu8mqFlLyDxvdbp\nOHXSz4ilQYkEi980aUj9ZPO6Fq9koeZkMd7hRa+5jIPdObqm4o3vf1vouuldvx5lTJ1KjB5pGr7q\nHCPdJ9asVKd8/dM3sK3YRFVVcUrXijjTXosSnnOt7M5FqStLXrXJC1chMxlqhL3z+3nTB94OeYhe\nG0BtvfwvjrxO9I4nhn5y+ZHrtocdjQmQS5uBhRNDmE6LV4wEq/v05+zkiulXs2lmM5qF921ZXLh3\nWGcRiWOTxHJVqZeqtX8Msujusqo4/NgjnLtjEwf2lmTZGFXZC1FSVfqurFBvqJ+Evfo/MGH5cdZW\noFPFmpBQffDwYV733j+gMzWJ2MWdBNtpQEcIYVtklrt/yJGlOY+t91B/BtZkzX0gzOw8/ZjPaVPj\npJFkOOd/I8sxMqcjWWaY3fM45zx7O8953ml0y0NNcVeRII+q0UqLllpY2bIGhPlynUlIyMKub4VA\nzVhnjNm5Od74J2+lMz0VppTJMkpsQJJYrjPiuhRWcK6H7ThedMX5qJ2nLs0QMnOkla8sK69u84wH\n087G1tafYDuVZY+yrLjyP11N1inwXinyApeW4yU2IEksVx1ZctHWBboH9oQiDGpDoVqd55KXXchC\n7yCKYkyGeh+qfdcVaHTtvqZ2CzDvPZnJyKxFVRgbH2P3gafJJ8dBDFkWqh8t9d8lEhuBJJZrTLuz\noGjF4/c9AIDzDmvCOmJXzfPil52P2l7wUkosXSaCiCJrZFouascKIYijYcVJ5Sp27/8P3v7n78I7\n8OpbUe0klomNRxLLNUZMrIGosOeh3zI53sFYQaxiJcNVdaWdLs970Q6e93tnsFAdQMUB9ZR8bXyW\njQZrbQOHfEJvPWZTwXX/y/vJOkUoF5esycQGJ4nlatN2/RGTDkWoegvM7n0aFRen17EJmclwzuOd\nItZgWeDSKy4AO9+KvNojbLlnVFziqEgTrAmr8ASxhj2z+3nV264MSTMSq2q2S00m3UxsQJJYrira\n9wFGf2PdgtXN7qGTW8RrnZtMaJbgsMZgrcV7pXQeYz3Pu+RZVH42rJyJ/WiIU/T6tQIno1z95mF1\nFPzwwmHe9qfvxo51yLPYdjcmZse6OGkantiQJLFcVaKQ1NalhD7Wvix5/JGHjy0xGkswKDgvOFdx\nyWXns3kmZ76aj0UqQNUhoSdg9B6e+BRdxYNIDDQZvIcr/uA1ZJ1s7SLwicSIksRyFVGaZdFxtU4s\n1yU98traPAp1q1dp5ryespzn3ItO59kX7cDpYYxYvIMglHVdnxO38tR71ATJNWKYm19g6zmn4TQV\nekgklpLEcrWpe8W0dHHXPXdi9djCFir7hER0QTFisFlOVc4ztcnzoiueBVlY6aMqoLFV7EkM1RqL\nlg4RmOt2eeOfvw1EyGyWyoglEktIYrmKyJK/oIgvMb5qVcY+xoOXWp7R76kKVdXj4kt2cNoZHbxW\nWACv9KreCY831GUM0foLLr0olBaL25NUJhKLSWK5ykjjsgzd/7oH9yFaT5mPYQfq8oGbJqCiSq/s\ncsY5U5x/4Qxz5SFMFq3AE8R7T5YXHOzNc8FLL+pP7NdmKXoiMdIksVxNmiB43bdF2fv4o6HgqzqO\np0CL+tLEddrNf+rJbEFZ9hjfJLzklc+hV82SSXESw/XMzS9w9QevRYyl7v4SQk1JLROJNkksVxsl\ndgQMJSq0KjGyeCXP0R7YCFS7/U3EWoOqx1iLMeCreS552UUc6j4VWh7QXh7Zft6YSym+rsuBFYN6\nT5GPUXU8tuj38w6vq8lnmUgsIYnlalKnI2rIpcSXeFeG6W52kmW5dMkVUbSa4/JXXsxFLz6dbm8O\na2L/HukrZl1BEjUYVZyrcBrqfu+dP8gb3/MWxNaFO/p1LFPqUCKxmCSWa0IIkFQL8xR5jrEW7092\nyWJfyJxzGBHEgPc9kAVe+pqL6LlDKL7p/9IX2JhQrpDnQbTzIueVV/8+WZHX+Uqx+2QikViOJJar\nTG3PARx46km8c7jKkWc5x4yGr/CZ69U0dc/x0EfQg+vy4pddwPhUjHIb2yS5txZf4soKRXGVZ/PO\nGSrvCK1zFhfpHbHuD4nEmpPEcpWRuCJG1TF3cF9I+DaCO67PciVPXr9GWG0jJlQmMtbgqgpflVzw\n3B145qmq7qK4unoHxuA1dHTdd+AAtsixJsOYfrsHrdejp5qVicQikliuJm3jz1eIK+OG1p0n89S1\ngNUNbrSduxkKc+Adl1xxHtNbcsR4MBoDQ6EV6/j4OIrw0lddFh/Vag9Rt9Bp+y8TiQSQxHLVqQVr\nds/TFGJYHNo+CcGM4iVxDXkjk6oIJnyToqhxlN15nvXsbTzv93aST2icrguop/IVB+fneNZLnx2f\nt138o5XrmSzLRGIRqXPSKlNbavufeoIidgasG4CdFNr3O7LkGiwO5ki0ZL3vceHzTufgvh677j8M\nYhBrufy1V6Cmabu1xIiM24cwHN63ruPSURG8r7CSHeljXU7sl7OWN9J+J/pcEoKDIfXMh8LU0Reu\nsnFqUB3XshSRc0TkuyJyt4jcJSL/OW6fEZFvich98e/WuF1E5H+IyP0i8msReelav4mhQULOjQio\nKzE2WJb9ifI6HFbNce5BFa+OyWnDxOawPmd27hDbn71z0XjaZddak/K1H+szZckyTO8cRgzO+fqj\n71+W+2/pPhttvxN+rlYO8AatmNuMAAAgAElEQVRmJdPwCvjfVfX5wMuBvxSR5wN/BdykqhcCN8Xb\nANcAF8bLR4C/W/VRDytKXMLjERzqfeOvHNT5V70nyy3nX7ydc86dZkG7SJ5jsEOphysjDNwYg/cu\nrG9P/63hfxKrYsXjeIPq5nGn4ar6BPBEvH5IRO4BzgKuBV4Xd/sE8D3gv8btn9RQrfanIrJFRHbG\n59kQ9OYWsHhUDWG6WNuX63+UiQilq8itYXxzxpvfd01sEeHRZoHj6CFI8LcqHJqfZ2wsH9gJ6VRH\nPeQ2Q1Sb4ziuC9tQPCOfpYicB1wK3ALsaAngbmBHvH4W8GjrYbvitkViKSIfIViezMzMPMNhDykC\neM/Tux7FiKBewMhA3X/ehwO79J6uQDE5jauCNTaaAe/aXxmq0Bub8fmvf4XO9KZg1Nf+tmXe3HJv\nd7mv5lTd71j7HHlf+JyNCN3Ds/zJdW8F7zEmi/dq/TVsGFYsliIyBXwB+N9U9WC7HaqqqjzDFoSq\n+lHgowDnnnvuKXGSqs+65fxhxqyE2pPNYTiYtxhy2A0mE6Y7m3BiMVntfYnCM0IsDXCpKhVQxKZw\nssyex9q07AnjFN3vWPtI808d34m+97jAofKefLljZfQOoRNmRWIpIjlBKP9FVb8YNz9ZT69FZCfw\nVNz+GHBO6+Fnx20bAlHFaBkC1N4TCk9Cv7TG+hIOfE9ZOnacf1aweOMoRM1oHugS0qCMGLwPf3Wp\njbTSD3oj7Xe8fVr3h1oqgvcem+XRddMPCGrT2mTjtEZeSTRcgH8E7lHV/6d115eBD8XrHwJuaG3/\nYIyKvxw4sFH8leGQ8VjRsGLniGT09bcujTVgBO8Ek8VybupR9SMnlO2IfZNGVEfIRUH8MqHddDmR\nS9DGcN3YKIoiS4TxlJgQrpiVWJavAj4A3CEit8dt/yfw34DPiciHgYeBP4r33Qi8GbgfmAP+dFVH\nPNQo6j1iwBCWJIazkdT3rvuInKswIth8HMnHgdoSG3HqjpcmRGelWc1Uv7MROxMMGwKoNla7qYut\nENwdGIN6hxE70GGuJyuJhv+Qox95r19mfwX+8iTHNZKoKvP7D4Qpi7ba1kor8LDOiBjEGLacUcff\nzGJNSSRWQK+qGMuWeC032PkoLXdcRQRl9uCBYOks6s0wOGXyXvHOM7ZpE+3+34nEM2HP/oP9hT1C\nk7y+kQ6lJJariQaxNGbJxzrAiGFmgyUpNqe/Pj2ZlYmV0D+xPvn0niY6vjiqvnGOpSSWq4pixIOP\n3slF+RgDGpEqxghi82gNNPcMZkCJkeSpp58GiNkUg/G/D5oklquKYlB82z9ZC1QzBR7AqLyCZGyo\npLjEqrJn3754TRcV2thIxamSWK4mqhDLoYmYpgxBc98AjixBCH0l68JuYWsSzcQzYb7bbRkB9XG9\nsY6hJJariKJYPCbr99xRdKAtGhSl63xIMoaQYzm44SRGlPluD6hXhUuTd7mR3DlJLFeZkJ7mwwE1\n+GB4IPbs0SXbEomVkud5FMil92yc4ygV/11lRGSZiOEgkdCrJ1wldSJLnAh5USy7rHEjHU3JslxN\ntF984AgGpJ5iBGOyRTlyKX0o8UyxxuI1tnM+eqmiU5oklqtMs+huqWAOKGyoXsnyoqXVsrFCmIlV\nQYwNaUMb+NBJYrna1CsbhoZWutAGPtATJ0ft85bh8S+tO0ksV52gSMN0UGl72t0I5vCMLzECDNHx\nPCiSWK4yLhQCRGgFVQaIquJcf5mjx4ex6UZcg5E4UfRoqR0bSESTWK4RS32Wg5+aL80dSiROgHZs\ncJD9UgZAEstVps6zHJaIYSgq7vo3CNlDgxfvxKix0Y+YJJariQgai6SGZvRLy1as/+EmYvBV2bx2\nI5KiG/7gTySeCUksVxGBUEFaQ7dl7+tgz+DG5L1i0VaOnER/ZZLKROKZkMRyVZEQTJHhiYaHvM+6\nf0q/wUUo7DHQoSVGiEFV+h8mkliuKrGyTxRKkWH4eBXBt28mEs8Y7/zxdzrFGYZf86mDsKhtw7BU\nlDa0LMp2QeJE4gTQ+jjX9W/tPEiSWK4qQukVW1eTVheCPs0imvU/tBTIrUW0CuXitC6yJRsu9SNx\noijd+TnqTneqPhw6AujGsTiTWK4qQlYUULcLbdZhD86SEwGvHvUlYEJVpCjaSSoTK8VVVQxcblyS\nWK4mIsxsPw3nfFP8tzkFD6wHT3jp8vDhWGOz7qCykQ/7xDOlk2cxSFjPSmCjnW6TWK4qwtj0NM47\njDGh901zz4BKtMXeP3P79jZFiVv3DmRMidGjU/QrVy0uJL1xjqEklquKkHXGsFmO962kdBmkHRfW\ngR/cuwei37KW7o1zmCdOlk1TU8EP31bKUIZogKNaX5JYriYiIBnd0mOM6U/FY1BlUPIkoozlGfhe\nk5y+sSZQiZPl3LPPDldqv+UGzLtMYrnaiOBj/x2bZU01yTU/tJbTYQHnHN4pVVnSO3Qw6Dkymj5L\naV9tN0Efxcrvy413WN+DsG1m6zKupGEd79qQxHK1EUGKThBN72MhCwPi1zhVRxc9v0rsNmkEsRZr\nYM+uR8BpLNQ2knIZCHlZgKA+FJ5rrQVo7zS0lyZDUeqAm2CGqoj94hPQ9q2bwGn8kLX/YQ/PgNec\nJJarjIhhy8w2TLvq0BrOvgVBVEAtqA21KiX8GI0XvHiMghqlLHsYqRDAqBm9A12B1qooVYexAtjY\nlr1f5lilfxlGtP7pKUEw1eMVjBnO76ST55j6oJahUvV1I3V3XGUUw9TWrezZ83j8EUhfLNfg+FIU\njATrCgkGbLRbFINF8KrglU4nx/XmMJ0MwQ7N+vUVI4po7FlNKIUnYrAimCiiy64pGcq3aaBdzsQI\nXgUxoC58l8OCoDjvya090q00asfQSZDEcg3IxibpeaUwAhp+FGs3542Wpfi42lJwRlD1GPEgFqOe\neQM/vO92/AN38q63/DHj1rLmZu8qE4rQx+m3gjE5AvzxdW8GRqtGZxD8MF6PYhC8em65414e2b27\n5SQZzHsKn2+soOU9mbH90Wgtl9JfybMBSGK5yngN1k+FJVe3eCn2mghmTAbSYFl5wBqD67mQRGw9\n1UTO9351G8XUOL7nKH2Pju2EqfioHeja/FOrJ7nY2MpjdPDqMdi4PCAIZ4nyyCO70CJn0METiT53\nYw1lt4cVs+HXfSWf5SpjjCBi6ExujlNDv7b+HQEVH0MGggq4XhV6hRvDPunxg9/cweSWTag6JqYm\n+P5PvhumUqOkLsARlrAECxqaOMnIXOrVVP0OxYJBmC97q/JJrQYighHBVVWcHPnWwh0dvRPtSZIs\ny1VE0cbaOW3nmex78ECIguvaLnds12M3KKrgc8Ouub385rFHkbGc3INTz/zCAgfnD1H6HoXtMGpH\nfD09FQHvHR7FRsvyeB/xcu90ucesx36NcdzcVuZ7JZ3xzlGeZb0JJ6aqclibLVutagPNwIFkWa4y\n/dJspujQcyG40qynPYlnDRWxDMLi9bmiIX3GxCCPiuLGM+5++jHuemIXdnws+p4cuS2w1jA5Pck9\n99/ZjyD7eFEfLzqcxV6lft8hwT8Ed/qBKmldFj1smW1L7zvaY9dsP6kDf2EJqvOOBx97IlbXP96j\n14AlaW1iJAZ1DGeesSO4dCQU+wvX22vENwZJLFeRcPI1KILNc7ztYCQ6ybXZ4xlTz94E3+RP+nrq\nLUpWFFRVSZ7nLBjLTfffyd7ePEVuQ3S4FV8SIzgct9/9y1BCDsVRL82Ml/hjGDaaE0R9+xjjHIDc\nPGPC2E1I5TKGW355O8bawQxGpRXl9qhXrM0oy5JXX/aiMN5632WszI3AccVSRMZE5FYR+ZWI3CUi\n/3fcfr6I3CIi94vI9SJSxO2dePv+eP95a/sWhgutk6UxnHbW2WidOnRSx1ZIGNSYFCQS8iQFjxdB\nvZIVOb/b9x/88OG7yYqcSivqBJTwI4g5iNGC7EwWlFpReRcLI7QUVf1QTARPdWrrvf6sO2PjA62u\nv+h87sF7T1mV5CZ562BllmUXuEpVXwy8BLhaRF4O/Hfgb1T1OcA+4MNx/w8D++L2v4n7bRjqFBDE\nMrZlU3/6fRJiGSedQRxp+7lCWpAWll89/hC/OfgUJs8RIyEsLnZJPlwQViMGmwnf+8lNQXiDp3Mo\nrclTl+b0BUDpHFmeoQMryNxK2RBwLqw+6/V65JLEElYglhqYjTfzeFHgKuBf4/ZPANfF69fG28T7\nXy8b6Vcoiml68Nggbk3U88R+COEn5anXdFdeMVmGV0dpDbc9dC9PVHPYPEdiAUsViauIFkcSrDGI\ngisdu/c+Tq9aoA6N1CksdXOzxBrS/lpEeOLpvSE7wfcFdBADqrXaGENmDafPzAxoPMPHimx+EbEi\ncjvwFPAt4AFgv6pWcZddwFnx+lnAowDx/gPAttUc9FDTOAcFxFB5DStsTiZgEoPpdQ6njcEcKXJ+\n+LvfcABHJ8vQGB0WBdOftLesy7piTLBUTSbc+O2v0HO9Roi1ZWAk1g4fg2mCULqSH9x6G95rXBAz\niFNV3zNO9IWr81zx4hemYyGyIrFUVaeqLwHOBq4ALj7ZFxaRj4jIbSJy2+zs7PEfMCJobUR7RcWy\nadtpiJiTS7XUOovSYBC66vjF7t9x82P34jOhLptlCCsqvISLiqISrcWokioubAfyLKMrC/zoFz/A\n+SpM9DUEjlJ/nrVFxCBicN7z0GO7cZlFJK7tH8iAwok4rI4SMMqhgwfYObM1aWXkGX0zqrof+C7w\nCmCLSOPMOBt4LF5/DDgHIN6/GdizzHN9VFUvU9XLpqamTnD4w0Y9YdYgNgqbdu4EMVhrOSKppOWd\nCAZFPwXGWtu0hJA6wm6Fw9ZzyyP3safsBkvTmNhnp84/lP5QYLHPVAXiSh9FMSrY3PDI7t/htUK9\nJ/oNgiXcmhEuzatOHI/WZ4iGoFmTluXx3jW+4u//7LYQBVfPwNp/qeDjCTLE6A3eVYtSszY6K4mG\nnyYiW+L1ceCNwD0E0XxX3O1DwA3x+pfjbeL939GhTNpbA+KZOViCYSmh2A6zc/M4X8emW5Kz6GOp\nrT+DilA5BxI7RWpwtj+lPX78wN1okSM2rNaRVhOpvpAtVrh6SR2t/SAkqav3TE5O8K9fvx4vHmvr\nosUnH5hK9OnLUPhrTVjq+OTe/eRjY0B/rfigEOkfQQK87Pd+L7qT0kEAK7MsdwLfFZFfAz8DvqWq\nXwX+K/BfROR+gk/yH+P+/whsi9v/C/BXqz/sISXOXtsdFDEFp519DtZkHH+NCYTlkWBEsMZQqkMM\n9MYyfr3rITSzIWJaa6058Wmb4sFBWfaQHO5+8E6CQ7OO6J/wUyfopyMuFsqAiuDU8dWbvktms8a/\nPGjbPUxAlF6vywufc2448W8QW+d4HDcnQFV/DVy6zPYHCf7LpdsXgHevyuhGEQHBNhafU8/0jrM4\nuHs3hZUj9q1/F15C0jkQZ21BsEyR8Xh3lnsefgRnDXknj0VYwRtOqm2zsRaDodvrolrx8ztv5bnn\nXUwnG0PTeoWTRKJQ1me12sUigKd0jgcef5yprVtxLrQhCfcH7/N6E2Yp/ePTlT2yuIw0HGTpzJl+\nEatK/GGoorHikApgx5jYug2H4r2PPiu/yICw8Zg00fGP8RwwFd998C7ueeoxTJFjMotT8DG1J3Mn\ndwCrKs47CltQ5DmdsYwv3/QlKl/R63YbG8d5t9TbmlgBWh8LEE+MSijIBgfn57j1jnuC7ziu2lEZ\noCgpGAl+dVHlNVdcjtE64S1945DEck0Iy/JCHcvgxIet55yDx2BMOCDtkmVtdcGCsgypRvNG+emj\n92KmxkB9mKQpzZRINaR3nOwXKDFNBKAsS7rVPHc9+Gti+cIQ9EmzsBNC2tZkTAtSoFLP9Td8DS8G\nI0JZ9qinGYNsmVynji0cnuOCs84IJ3Xnk88yksRyVZGYOlQveQwfsIgBO4FDwtS33fkxYm1G6R35\nRMFjh/bxg/vvJis6uF4Pm+c4EQyhSKxBY/qPcFILE+s4kISfaZbnFEXO7Xf9nAXfDf4qY45cr5zE\ncwW0fI+tIIkn5FhuO2MnmbVUlSPPMmqhHORHKwjeO3IjZNgmYUNPxtdzCpHEcg1oci0RvHOxu4Sw\n8/zz6ZXlMmfqcNsUlrufepg79+zCjI1hvMFgm6k9hElcPQ0PhTVO/KxvaqtSCZF3Y/AaouNf+Npn\n6bku3letR9T7nvBLbmi8D26PL33jOyyUJXhPXnSwdZBugAVGva+oXEWe57zy8suoi5YYc3IVs04l\nkliuMo1fL1bEMVmoBWjEkE9vp/LgK48XyHMblkKKoZcZfvbQfexaOIQUOdYIWI8awfu+VQI0fjCN\n/qUTxdfiF/+qB/VK6Uo6Ezmf/donKX2vv1QziqpXP+ig7fCjQsjAknDCEUGNcMd9DzJbVeFEBbiq\nwvnBf5DqY6k4p5x/5g6AMIdRUp5lJInlOqKS8axLXorNDZkYKuep1POUn+Pmx+7hgHHkkmO9Ao46\nZxNx6zPAWunjbzcvLF/85vWUrhcKCqM4Da6AFOk5NioKJpQ6M1i8Or5003e456FHKDpjgx7eEVib\nYzKhd/hQqPAvNCfJjZImfTySWK4jlTrUjlP2yhjVzvjN/t386okHcZWjkAx1ob2rFwG11Enq606c\nbjst+fJNX6DnFjBxuWV9dzIuj436UCdUBW69+14Ozfeo3HD6/7zz9ErHeWfuCMtj0daaifQtQxLL\ndSU3GWJg1gk9C99/4C72VgtoZshshuJiS1SDqMWoYvDr7/YPfgRELHmeUeoCX/rm5ym1rNdfxggu\nJMvjSLwPvWqssQjwwBOP8+Cjj5HlBdbaITLI+2u/jLEYhVdd/tImLg/ErI7hGfEgSWK5jtSFgS94\n6WXcfO+vMdMFXhXjYqMx0bA+VyCspAmVzAfn+JfYNVLJOsJnv/Iper6LqifPcprF6yTbA2hOGk3V\neQkrsL7/05+HU15cTihD0ylOWmM2VPOzZMbSik8mmWyRxHIdUZTSeTwWsWNUlQMLuc0RFYzG9eGq\noWOjaLQrB1eJpl6qNzc/Rz5m+OQXP8ZCNY/zrr+ks+0m2MCqWbePlbi2v+t6XP/1bzI+ORljY0Es\nB1fg90hEBDEG7yve8vorg5slzh5q/3UK8ASSWK4jBiiswUrBe97+n5g/NIeRAtfksbXSmNUg3kYf\n4fr7uUISvMYcTEOW5SierTPTfOmmz1FpifMVzrtmSt4u6NFcGXXLc3FNksXbtV+ipE4qECM49dzz\n0MNc/83vYLNOaN3RZN2auFJnUOiS64pzDuNLTt+6hXrRJbE6UthtZL+9VSWJ5TrSVKW0htwUvOqK\n11KVvVi0IvzsfOMrUrwJ5d4GcV7vC0BYoldPLUtXIQKf/vLHeXzPIyCe3GZNkn1/LXTwZTb1jupl\nfyP2u2uksFVircZ512z3zuO9UnnHTbfcym333IdIRqUa+8f3v0XRwfzs6vqmdSqDEnJrUc91b7gK\nKyb4KIXYM9w2/utEEst1pWnZquFHeN5Zz2Z23xxVVYWljNTJ5vRFRYZTX8bGO3z/lu/y9e99lUrL\n0Eoj9kyXluA3GaGxnNyoFRVufSM0VYTiSUBMsBR9FXIUMcpnv3YjT+w5iHfVMZ51MITlsoZYtBQj\n4LynmjtMp8j7DdSSJbksSSzXkX43v1ip3BS89x3vJ7M57QTH8POUvq4M4YndWKHo5Cwwxye++DEW\n3HxjF0cp6S/2qeuL0C9SPCp4ojWpArVPub8kAICy8jhxfPk734d8krGJDjYfUEvbFRDKripl6XCV\n473XviVUGILh7Rk/BCSxXEc0TqlDO1uDIEzk05gqa/m+2oV6h9SsJHT/U/W4smTTzBSf/8an6bku\nlQsrfkJdzzCtC6uZgnCaYVT+Y7BoFiqLT2feKc73qMw8//Nzn+FgVWEEugsLDOUZrk4Kistns7yg\nmp8jpx8BF5GmXFxiManH5TpSr7fVGByoXCii8K43v4d//dZnyApL5Sp6ZY/C5s0UUIfQ2RfaIESX\nQuWwFr5285eYPXiYV17+Gp51xvkUpghmjCzOFB1GGTka6hUxsQmxD10P1XuceH77yN386Nbvs2Xb\nJk4/vaDX24eXGUQsdpEvBYbiXauABXWh7bGrKj70zreHVreaXJPHI51C1hERqedAIEKWZXjnAEPH\njIETqsoxPjbWWiEzrOtkQsm4YG2FObb3jrHpgtvuvIVPfeGfONjdD9LvdO7UMcAuMyeEWIOnDtKE\nCHHPL/DdW77JHQ/8ki3bNuGc0usuYOQwVvcBJbQCKUMhlBDG5OOsxkB3dpZMwgl6aFI/h5gklutK\nX/S8OtQ7jDEYY7j6yrdycP8hOnmHhYXu4ocN4YEsdVGNmA/q1CNGqLqOquyxaWaSG771BX529y10\nfQ8IDdLED+GbOQaidcBK6WmPn97xQz77tX/m6f27wwlClcpXmMyGb1cOY+UgaPc4zzwAxIMXxITa\npW+68tXxnfmUHrQC0jR8XZH4v2A0nN3DVsN4NsmbXv9mfnDLd6hb0oqGNKM60XnYCN6BMK48zynL\nCmstJhOcryg6OQ/uuo877vkVL3reJVxy0UvIbQ5aNL5Z1dBvqI5vaStwUr/nft7m4nzUY/28609L\nW/+2SkMsfh9Nek98fTFNxoLXkEv609t/xIOPPkBnoqCYCIsIokEdlqp6jeNSYAErDsXhmWzcLq2E\ngeNwMt/10aOCRgyVerT0LBw8xFnbt6EiIcA4hMfXsJHEckC0xU8ImnP65jMosjFy4+hVPcQI6j1G\nBlsUdjmOGE+TvN1fnllbLRPTYzyy+yHuvOcOtm/Zzh+87q0UJsc7j81CjqZZsmZa4vS+Xv0ZpMg0\nQZY6fYfmvvi4VqGPdmRmcSm71mdfB23ii5jYTbNUh/OO2+++jTvvvoNNM1NMbZqgrMqWtXnkBxD+\n9Qg9BAeUqNkSdb5eP7g411Ea5+9aTtmVqlKyTkbv8Dwfeve1IQLepHsljkcSy2FBPQbLm3//Wn74\n8++x99B/4NQhMaAwUmd+7dtYKhp9Yp6x6YKeLPCFb/0LgmFhtssrr3g1F5z1bMTnIKZlRUpTsQdt\n5Wtq/4ctEoJf7Yh1fbu50TydLNmmzc062dyrZ/eex/j3732D8alxxIQk7untE3hX0VuoyKxtci2P\n9Z2Ed19icag6nEygjAVNVGLb2b6495uaHc9mPh714+tLf4x5ntNdmOetV72aiawT+s5Tp77K2un0\nKUISyyHCGEOnGON1L38Dn/jCPzCxqRN/2yN2FJs6pzT86I2YIHJiqKqYwI4ysWWcX9x7Gz/+5Q+Z\n7mzmshdfxtlnnNc8jcSScEbM8vpRW50tfelfXfwAUWlN/RVPqBHaq7r8/Fe38tCu3+GMoxgvmNw8\nHpNCw3P76NPL8ixatIBoDNgt/xHUSWIiguEwaBVSwWUsOhp8PAHUAmlYTuBOjPgpxJMJQnQbKFNF\nh62Tk8H1EBqU1B/QKrzuqU0Sy6FBoqEiWMl533Uf5Evf/gzeKXlRjGgflGA32ab7WfTrSbA4VR14\nmJgYo6LLT+74Ae4X32f+8AKnbzudV17+aqYnNpGZvPnhmzpvM/5XC0E/SzW+FIpXF6buAFrh1PHU\n3ie4/6EHeOLJx1gou3TG81DMZDKj0CxY8SaO3QpZbDinzXRd+pbYSgxAdXgsRkpE9+N0GiPjeM0Q\n+pkCi72sJyNa/dOF9xrF2mCs4eDePfzpu64N79Ga6J+tXzEJ5fFIYjlEhABzONA72RhuQelMFiMX\nqFxcgmyJnVf75uJUPSxN7hdtKHJDvnmC+eoQ3/zRjYDgKx8ae5mCmS0zbNq0icmJKSbGJ+gUHTp5\nB+89VVUxvzDP3MIce/ftYd+BfcwvzFO5EsmhyHOMCUnX+bhFM0ueW1wZgjEYQUyd3tWX3qUBobC6\nSmLTuOW/HGkeGz23CkJFZvbjtIfKTBMgazkWOHnrri2+sQmadyDwlte/FipP3gnFW2yYAhCHkeTy\nOCSxHBbqyK8EPTEY/uhtf8z1X/0UxXg+4ME9M/peM20bOgENQlMHOrwP00UTC+V670K7YAXBYYxg\nOxm2CF7Qg929HN57AN0DlaswxjRtXJvgjsbrVsnHhFw6cZYbthtrqMoSg4RWrzZYXyixunl4F8Eq\nbVn0jT9Uj5uX2H/LbYvRo2owzKFkwBhecrRxSSv96fiJ0rdMsyzDlSUCHDywn50zM9jGzxut8TT7\nXjEpz3LICL/TMD0qbMH73vZBDjw1iw+NdHHeN774phfOkOFpWVyNL7Gfr6jS2kP6Aheybyw+/oaN\ntc39RmyYgkeBU++xdYqP1+Bf9Bqua+0zBWMNEjsUilgQg3MKxoKx0ZqXfnQ9Zh5oM856/Is/6sXL\nUo/O4jyGOj0JDAcw7MFwCDGhyHOwrv3STzA8Qny4HBNd9F9VllibcXj2EB96x1vJTL38lH6fndhY\nbwgPo6EjieUQI9F/+aH3/hm92ZDv18k7zX397ozDz8rERZe5HPveYz3jyp7hyGn2ot2P8siVokv+\nXeyUcFhmyTiASIWYLFrJYR9pv6KCquHYBfv67g0IvehdVfK2119JbjL699T/DufJdlhJYjnkGBEK\nO8Z73v4BMmtRX2G8hPYTpGN9VOkLoccwh9W9iPhYv7R2AUBthyo2WrfH7vQZMgdCq+Ju2WPzWMHO\nma1kDG8VpFEhieUwUk8LlSYCPJaN4XtKVfmhakuQOEGaeb2GPkymwupeDF20JZSqlnoesZKK+T5m\nHFhjYGGeN7zqlcE/OWpRwiEkieWQIpgYuZWYBA3vvPq9dBfK6JivK3cPeqSJE0Ex4aRIXEWkDuhi\nOYCVueifFER8sBSpk6OO/pNVgco7bBZSoD70jrcz3ikAmqW1iRMnfYTDSGN1xMRmYrqL7fCB6/6M\n0zfvZLwzmdb0jjginlP7ZVQAABNySURBVP6CR4NQIfSw7CVjL8JC3LEOWR/75yoKmRGqsssfvuE1\nWLH99fXppHrSJLEcEtoJJsuGITT0ozZYXnvFGzAup7dQocfxYSWGE1FFtT4ZLg3peITDZHIIKNvh\nmDgdPxpKJsJrL7+UiazTOudKOqmuAkksh4gjDufaaVmnwZiQC2hNxjVXvp2JziS5LVopzYufISWE\nDDPBWlRMDOksjdtnQJdM9oGUzdbFeZ6L4/MCXHD2OZy9/bSY+hRltnUMJU6cJJZDhrQvdQ6cSGMY\niI3rrBWuufKtHNxzGGtNXAXTzy+URc+WGDbqlT1187Cla4TqfYQuln0YmQ3+SwnFiOs16sYIzntE\n4bydO3j5C5+Hr1zMSY3PFY+hxMmRxHIECS1MMzp2jPe94wPsffogViw2M1Ee6/XM7bXHiVEhNEyu\nYiatRSix7MdykFDJyGI1JOT3qgr1sH3zFK+45IVUVUWe5yn6vQYksRwVlh77Cr5SxvMJ/uzdf8GF\n514ctsWVJwKhMvkgxpo4SQTFBstSXDPRFg5j9UlgDo1VjzJjuWDHabzpFS9DAGtDPmWyJFefJJYj\nQWtpoGozNc/yDFEhMznPv+BFvOySV1GWDlFwzuG8T3blSCMxCBTDPupBlMzuA78PpxVnbtnK7//e\npTH1SJNIriErFksRsSLySxH5arx9vojcIiL3i8j1IlLE7Z14+/54/3lrM/SNRxM11f7639r11elM\nsHP72Vz3hndixCJicBpy9RKjRrsMR1ypriAmJKhXZY88m+e5Z27lyiteSuUdXgWTVumsKc/EsvzP\nwD2t2/8d+BtVfQ6wD/hw3P5hYF/c/jdxv8RJ0V/DK0tiNnXdRlVPbgoms2nO2/lsrM3oFJ0klSOK\nNIlEYemjEZpljL1uj8tfcDlXXHI5RgzWhotPfso1ZUViKSJnA28B/iHeFuAq4F/jLp8ArovXr423\nife/XtLc4KRQYpS7zgDRfuqcQKyqQ1NZ59IXXM6rL/t9Duzbd/QiEYmhpV0sQzGxEpNijOBdxdvf\n8E4uOPO5CBJaj8QKvulXtras1LL8f4H/g/7i1G3AflWt4u1dwFnx+lnA/9/eucbYVV13/Lf2PufO\n0+Mxw4ztMnbNm+CCCDgEwkMVDQkQBMQxyIgEWh5JHx9a8aEFVarUj+2HqqlUNY1Ko6iPhELaBlFF\naQhElVIJwjs8QuI0FBg/xhnXHo/ndc/Zqx/2PueeOzPY4wHPvTOzf9b13WefM+euc+bc/+y919pr\nvwsQ9h8JxzchIl8UkedF5PmJiYklmr828GFExoeAlKnOGi8J2cMRsIklMSmb+of5wmcfYOLwMX+8\n+nCTNElDBm3TODc+3ZvQdNrIKUQr97n5/vuhFVdkD1KfmNhYy+xUxrlbL2CgdzD8zgl5QCXGnS8D\nJxRLEbkZGFXVFz7MD1bVr6rqDlXd0dvb+2GeelUi1YLM3deo9L01H1rUmXRx3+2/zS3X3U4eJvrM\n1mewVnAhe7ZvqIYVYcTndjRqMC76/k4l1knTcrqu7AMIeZaRGCnXY09MDWYsu268k8u2f7z8Q9c8\n7SD+hTvVLOYbcRVwi4i8DXwT3/3+MtAvIkWm9WFgJJRHgC0AYf96YOxDtDlyAozxv1bnfBBzV9LN\nzdfdwtTRaaxJyPKMWi1pXtI1DIYW6RqImY1OLdX767sOgBfQjjSlntex1qK5Yfs5F7PrhjvoSrtB\nohOnVZxQLFX1YVUdVtVtwG7gaVW9C3gG2BUOuwf4dig/EbYJ+5/WGCG77GSZX3LBhF/x+s4B7t55\nP9bVsKTU6z7UxKNNWW2daJkvM3JqcKKNVHt+ULp06mRZRprUOHZsmjtuuJPzzjgfm6R+vHolrlu3\nSvggfa0/Ah4UkT34MclHQv0jwECofxB46IOZGFkKSeIb/UXnzoVZPzs/fQdXXnI1E0cmUVXq2QxJ\nmpDn9dJJVP25yKlDtMgp5actiiGk4zMcPjTBrht202E7sUkKYVzSmNjVbhUntWCZqv4A+EEo/w9w\n+QLHTAO3fwi2RT4MVJsG/kUsWzdu4+6d9/Lok/+ISS1ZltHd3c3szGxYmbF4RcE8VYiahuNNBGst\neeYwYunp7uX2T9+FEUOuuY+b1WIWT6RVxNUdVzkaXNzFUlmiilOhlnTx+VvvZTaf5R8e/xpmwPpu\noYTUYVErTylFKjaMv8mTU3U+9mtXcu6WC7BNDhw/jpzjMGLCypWRVhBdnqscKbPO0Agvsj4JgxFL\nh+3kN2+/n2t3XMeRgxNkeTFwWZlFMi9kyYe6GBrdyOLT1lLbR6jelwWirqRYTce3IPM8x1qLc4rL\nM2xiAMPhgxNc/4kbuWDrhViMDwtqBBOBNLbibP/WEcVyldNY5rQ5NtPH8rkyg9HG/s3ct/tLuEnQ\nHGySkOc5LqyjrVKs/eMThznRMrlYQ1ZPdu3DlY1K87RTlcYdEAhdZ8WFVyGWeVZHgckjM3xk63bu\n3/0lNvZvBghTVJn7q2rM+Y5a2TJiN3wNU8iow/myE+687fNMZ1N86z8eo3Ndysz0jE84bI3PdBN+\nyvkT+DNowy3U+H/1Uw0Y8PEe4ocxyv1F1krBiCVNDTPT09Q6amzZeBY7tl9OYpKyjZ6rwxobByfb\nlCiWaxyXO0xiUXXYxK9b3WtqfOG23+LY7BEef+IxOjakqHMhLtO3lqSS/q14X1vtSqhOBChzA2nz\n/TCFXFqDy2H6WMZnPnkzg32by3MU8mrwSZwlerzbkiiWaxiHw9gwtw7BSFgyVXwGm76OAe753APk\n1HnpzRd46fUXWd+/LsxFdtRDollXrjIZplDK2vAOucqyDn7OvgPnrz+tdSBGmJqaYnqizs7PfI7+\n7tPwSTF8YHnRhTcEx41ATk6j/R5pJ6JYrmHKhQfC91JDNgYN42a5Or8cb55w+fYruPQjOxg5+A5P\n/ddTdPakdHZ2MVufIa2lZPXMxwsGj+3ql0o/NbSYfSMGssznm6ylNaanZ5iZmuGqHddw/rbtqJPy\n/ua40lljxJTdbi3yUUadbEuiWK5pGqEpRdoaCV9YBWz41lprEYHU1Ng6dCb37nqAn777E3704nNI\nIqgVTBhrc7p2ppgU47VF0t3uzh7G/+8oNU247VOfpSvpwopBsKjxDrFy6EJMGPMs7n7RnpSGCEfa\niiiWa5jSTy7CvBUGi/V3CS5eEZ9oAy+c52+9kLO3nEeW13nulf/mnZF3SDpscPpUv+nz25jzaxY+\nfq5eLLW1upDuLPZc769ZYcYNQp4r9ak6mwcHufnWnaQm9S3swu2linM51tiG86dI3By8ZKKUqdai\nULYnUSzXMjK3WImYlMp29U0aAptSI01TrrnsOuqX1Hl39Bf88NkfIqJ0dHWgOIy15Hnmu+ZBH8SY\npgW1mj5rvmlNdccTuapj5Xg0fPYLuKjCDxtjUJSZmWk6ah3kLoT95I7EJrjcMTU5w+aNv8InPnYN\nHbaT1NYq45HN8ZbWJse9LmJoUNsTxTKyJFR9uJGqn7qXaMrZmy/gzFvOA1Fy9QL5naef5ND4GEln\nQlKzPqO3c6Aaxjd99u9G978hXAvNVanunWfT+9kKqDTau41ZMOEnRL0XGu/gsjYpnS9prYYiTE3M\n0pl2cclFl3H21nN9yI8aEpv4FGu5hqmiFdtigslVRRTLyBKpdNrFJx1WFELYSyIdiMCt1+/EqSN3\nOft/OcKrb7zK6Ph+klpKmvjM7oWK+QD4+d1wnbM9t6zzauZjdO55pdFSdYIxgAhZnlPPMuqzGUYs\nWzZu4aILL2ZDz0CZaBeRcgmHusuxxqBG8SdRyh52ZFURxTLygam28gwChfNCQcViMBhrGd64jeGh\nbTjNcKrsH9vHW3ve5MDYAWbzGYwVbGoxJqx/rsWYaeg4N4qNPnlD8d7fPlWMWBQXMs6HmTXOkWU5\ned23Cvt61nPu1rPYft5FYdE3760uJxoWwwVadX75NdptGVEgTYIfWT1EsYx8YKTaSguUAlptfuLH\nOq3UMOoYHtrKGYNbvHCpQ9VRd7Ps3beXvfv38s7I/+LIwqwYRaxfh8aLqRevooXnxwoV57R0qKhT\nv3aNGJ9AJHP0dq9jYMMgm4Y2MnzGFp9QF0BMOTRgxc/fRgVjpBxfVW26II/TyjiuF0sNDrHYulxd\nRLGMLImqFBTzwxtjgo7msKTm451zWNsIzC6G9vI8J007OGfr+Zyz9TycOlwxNrrgKOX7oU2fV01I\nUci6kUaXGg1jliFmEgSxvr74/LnX3gi3KisbjiMJ71EuVxVRLCNLoowarIQK+YaXFwlBQre5mB/d\nkA7fWnMVB4igTsOYYHGslMtjzPng+cztntMsUxqS7DZErBIZVbq/gx3hX3FNpry2ZrGuZmAqTlgI\nv0adXJVEsYwsicaiWZU6v6Npe+Hj5sUsNc2HPq7OvM/O4zmem/dVgqQqrcJGbqb5thX1J9K/Rtzq\nCQ6MrEhiirZIJBJZBFEsI5FIZBFEsYxEIpFFEMUyEolEFkEUy0gkElkEUSwjkUhkEbRF6FCWZYyM\njJSBylDMExZEhCRJyLIMYwz1er2Mv/NZYHIAkiShXq+TVBbaMsbMi9Urzrtp06blu8BIJLLiaYuW\npc/0YsmyDGstfX19JElS7puenqanp6esK+qBUlCzLCv3mZACrBDOxnQ1XTjQORKJRE5AW7QsRQTn\nHENDQzjnGB8fJ01TRIQ8zxkeHmbfvn2lWHZ3d9PX14eIcODAAYaGhhAR9u7dS39/P8YYJicnsdZy\n+PBhTj/9dFSVQ4cOleIaiUQiJ0NbiGXRCiwEcmxsrKkrDV5QBwcHmZiYoKuri4MHD9LV1UVPTw8H\nDhwgz3P6+vqo1WqMjo7S09MD+K56kiQ45+jo6KBerzclno1EIpHF0BZ90qIFeejQIWZnZ+np6WH9\n+vUYY8jznOnpaTZs2FAK6OTkJN3d3XR1dTE1NcXAwACbNm1ifHwcVWVoaIjZ2VnAC3HRFc+yjCzL\nynHOSCQSWSxt07K01jIzM1M6cCYmJvwyq84xNjZWOn9EpHTkjI+PY4zh6NGjOOdI05SDBw+S5znr\n1q0r6/ft29fUQo3d8EgkcrK0VcvSGFOWJaxhnaZpWL/FtwwLYS2cNSJhbebp6VIIe3t7mZiYACBN\nU9I0JUmScn/shkcikZNF2kE4ROQo8Far7VgCpwO/bLURJ0m0eflYiXavRJvhg9n9q6o6eKKD2qIb\nDrylqjtabcTJIiLPrzS7o83Lx0q0eyXaDMtjd1t0wyORSKTdiWIZiUQii6BdxPKrrTZgiaxEu6PN\ny8dKtHsl2gzLYHdbOHgikUik3WmXlmUkEom0NS0XSxG5QUTeEpE9IvJQq+0pEJG/F5FREXmtUnea\niHxPRH4W3jeEehGRvwrX8KqIXNoim7eIyDMi8oaIvC4iv79C7O4UkedE5JVg95+G+jNF5Nlg36Mi\nUgv1HWF7T9i/rRV2B1usiLwkIk+uIJvfFpEfi8jLIvJ8qGv3Z6RfRB4XkZ+IyJsicuWy21wEfLfi\nBVjg58BZQA14BbiwlTZVbLsWuBR4rVL358BDofwQ8GehfBPwHfx6gFcAz7bI5s3ApaG8DvgpcOEK\nsFuA3lBOgWeDPf8C7A71XwF+J5R/F/hKKO8GHm3hc/Ig8M/Ak2F7Jdj8NnD6nLp2f0a+DtwfyjWg\nf7ltbskvq3IDrgS+W9l+GHi4lTbNsW/bHLF8C9gcypvx8aEAfwvcudBxLbb/28D1K8luoBt4Efg4\nPsg4mfusAN8FrgzlJBwnLbB1GPg+cB3wZPhytrXN4fMXEsu2fUaA9cAv5t6v5ba51d3wM4B3K9vv\nhbp2ZaOq7gvl/cDGUG676wjdvI/iW2ltb3fozr4MjALfw/c4Dqtqkai0altpd9h/BBhYXosB+Evg\nDwEXtgdof5sBFPhPEXlBRL4Y6tr5GTkTOAh8LQx5/J2I9LDMNrdaLFcs6v9ktWUogYj0At8C/kBV\nx6v72tVuVc1V9RJ8a+1y4IIWm3RcRORmYFRVX2i1LUvgalW9FLgR+D0Ruba6sw2fkQQ/JPY3qvpR\n4Bi+212yHDa3WixHgC2V7eFQ164cEJHNAOF9NNS3zXWISIoXyn9S1X8N1W1vd4GqHgaewXdh+0Wk\nmJJbta20O+xfD4wts6lXAbeIyNvAN/Fd8S/T3jYDoKoj4X0U+Df8H6d2fkbeA95T1WfD9uN48VxW\nm1stlj8Czg0exBp+4PuJFtt0PJ4A7gnle/BjgkX93cELdwVwpNI9WDZERIBHgDdV9S8qu9rd7kER\n6Q/lLvw465t40dwVDptrd3E9u4CnQ8ti2VDVh1V1WFW34Z/bp1X1LtrYZgAR6RGRdUUZ+BTwGm38\njKjqfuBdETk/VP0G8May29yKAeY5g7Q34b22Pwf+uNX2VOz6BrAPqOP/st2HH2P6PvAz4CngtHCs\nAH8druHHwI4W2Xw1vivyKvByeN20Auy+GHgp2P0a8Ceh/izgOWAP8BjQEeo7w/aesP+sFj8rv07D\nG97WNgf7Xgmv14vv3Ap4Ri4Bng/PyL8DG5bb5jiDJxKJRBZBq7vhkUgksiKIYhmJRCKLIIplJBKJ\nLIIolpFIJLIIolhGIpHIIohiGYlEIosgimUkEoksgiiWkUgksgj+H6c5Dmq9zR7qAAAAAElFTkSu\nQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [] + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "fdbCDYJkG8Gv", + "colab_type": "text" + }, + "source": [ + "## Dense Image Warp\n", + "This operation is for non-linear warp of any image specified by the flow field of the offset vector (here used random values for example). " + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "dG557eQDDtSK", + "colab_type": "code", + "outputId": "9a5fa541-d465-435a-9b47-edded4a2811a", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 269 + } + }, + "source": [ + "input_img = tf.image.convert_image_dtype(tf.expand_dims(google_img, 0), tf.dtypes.float32)\n", + "\n", + "flow_shape = [1, input_img.shape[1], input_img.shape[2], 2]\n", + "init_flows = np.float32(np.random.normal(size=flow_shape) * 2.0)\n", + "dense_img_warp = tfa.image.dense_image_warp(input_img, init_flows)\n", + "dense_img_warp = tf.squeeze(dense_img_warp, 0)\n", + "_ = plt.imshow(dense_img_warp)" + ], + "execution_count": 11, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAUsAAAD8CAYAAAD+D4bnAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsnXm8ZFV1779r731O1R167qahgQZE\nQsQhDgioIAgyOibqM4l5mmjEzL5MT42axDGaxORjZk1MgnlP4zMxalDmURxQUEGZZG5oaJqe71R1\nzt5rvT/2qdu3m4ZuoYd7m/P9fLpv1TlVp85Q51dr7zWJmdHS0tLS8ti4fb0DLS0tLXOBVixbWlpa\ndoFWLFtaWlp2gVYsW1paWnaBVixbWlpadoFWLFtaWlp2gT0iliJylojcJiJ3iMg798RntLS0tOxN\nZHfHWYqIB34EnA7cD3wH+Dkzu3m3flBLS0vLXmRPWJbHAXeY2V1mVgH/DrxqD3xOS0tLy14j7IFt\nHgzcN+P5/cDxj/WGpUuX2uGHH74HdqWlpaXlsbn++uvXmdmynb1uT4jlLiEi5wLnAqxcuZLrrrtu\nX+1KS0vLkxgRuXdXXrcnhuGrgUNnPD+kWbYNZvZJMzvWzI5dtmynot7S0tKyT9kTYvkd4CgROUJE\nSuBngS/vgc9paWlp2Wvs9mG4mUUR+Q3gIsAD/2xmN+3uz2lpaWnZm+yROUsz+yrw1T2x7ZaWlpZ9\nQZvB0zI3sEd53NKyl2jFsmVuIGSRNDDJi1rNbNmb7LPQoZaWH5eBSDZ/pv9CFk6hpWXP0VqWLXMG\nsWnjctqqbEfnLXuLVixb5gSKoWIzRNK2WpNqmGprWbbsUVqxbJkTuEYKBcAMp1sF0wREhLb5Xsue\npBXLlj3Ko8rXj6lrimFI9vMM5i4NMMvD8xlCuY1obi+ge0hQjbRHttsye2jFsuURPB45mTmPCGCW\nxUO2f1V89K0P1sQNDz1inZIzHAQQzYK56q8/lofgjaf8hrf+7LZCWfdBmj2IETauh/7U1vXjm5sP\n1h3sxdaH00K4zaptxVHw2x1Ma+Xub7Ri2fIIBKalwAb/bW+gARVGf/Aeayy9wXPxqMXmbYZZxDBU\nwCw+Qkym5x97k4Rul5tf/Hyo+tPrA8Idpx/Pna84hYfPOB5JieEv/T9EExtOeR5brvgqh9x5K5sv\nvxCZGuf+04+HopPfvOpuVr/tDflxdxhSnT9xdEHeP3Ez9kOwtPXo1fqoedKMkCXMsjhajRFh+jgB\ntcEJaD1O+xlt6FDLNgzub88OwnEawRADEcWjQCKZx3AEc0QML5E6QWkFMRgb/+y9LHv7B4llH2cF\n3z/9xTz74q/zjZecgC0d4eD1Gznife+Dpz+PG37r7Rx4/60c8/F/4OtvfD0/sfE+BOjXyqgvkN4U\nCz/xbzx05gvyPq19iL5Eyg+9G3GOu/7kDzkEY6zRuwdOPwE04kLF6teeSWGejVbRO/QpjN67inlF\nYLyuOeKq6xCrECnBe0wNYQrE46igTkg1CcNLUJdQAoHQnJCIki0PEQGNYIHtjc2Wuc1ur5T+eDj2\n2GOtLdE2uzCyKEaBMBiONkKJKAqYKZ5AkgpHmS1GDEckmeK0oPY15dgmrNNB0kJ+cM6zeOYVNwKB\nqR99j/7b3kZysKa/madd+QPWfuZfkPM+hY8KIWFWYmaYzx9uDnxiegZTYkKDxzC8Uywa0YwQPJUL\nWEp4J1Qo87SgVmPMVcyzgFNYetHX+f4Zx7HSCUN//nEmfu/3wI0Tf+fDHPTSU6Hs8J2TnsPzvnZD\ntiYlIs10goZAlkhDzIHUQEK0w5SbpGsj07MALbMXEbnezI7d6etasWx5BDMsyGl2MKoUEoZHVDGR\nxgydwNIIjoiZod5z32+8kfTd61g0vJB70jhHsICen4Si4KCvfIuHT38BNR58DSKUdY1zgqpHqcE6\n4MBpibo+4h2mlj+vmbP0ZggJlQIlEUUogYTgzZhyjm4KeImoevCKWiI8/TlsuPkGRiRRTXmGy8T4\nMc9n/iEHc9+1lzO8eR1LXvFabv38f/Cir1+HWIGREAEVwU3W0C0wMcQCSATz1C5RUGBm2dpsmbW0\nYtnyhBlYl9MIWCNODsXMgTQ+6rFxLCZkKGJ+hI1nvJjohf5ppzN8ycUoHqTHQxZZYUNMBKVjnjJ5\nesUkdepQOgUMbw7UUQdFG51xRHwKoCGbltDMe0q2MtUQl+ceTRRp1F5EsKQgIASiGE4ipAJ1CZ8i\nUTzeZS9R9JEiCuaUG+YfzuJ6nOVTY+hxxzP/mq9zRx15zpXfQcqAS/mkJO9wkxNYdxgxaaZAmx+P\nwfi8Zdayq2LZXsaWHZAVUpr/kjQxjWaghpgRTZFVd6OpT9Q+43fdwLWvO5vvnft2Np11MvcefjAL\n/uXTDF1yKbVPIEqwksVuHg5PIZB8n8lyikIdhU8IFYZQOaMqIl5hKJYUSQgpUAUFF7eauI01KyLg\nXV6sCTGHNZawmZGcbxwuhhu4rkRRJziGcBYhRVRAzOF0mOg8z9i8lkPGN9PRSPG1qxEnrBzqMvat\nb/LDN78BW/swt33sfRiKDI3gpCbF7FyvbCJ/zi7eYbPAZmnZCa1l2bJDBvkxYqC3fh952rOxGLPl\nNLEJG1nMla96IYfUnsXWZ8Or/wfhy5/liC9cwbpXvAS1gjr0cFaAS3RiQXQRsw6BmujAWRY4J33E\nHKJdzPVJIog5VBzBKqzxlChuuqCGbL+vg1zI6RVCtjohqCOJYs0QWV3Ca0mwiFpJDJM4E4yA00B0\nhrk+nZjnJCtvKIZziqukmSKAWqbohiFcrElSsv4nf4qjP/ohHvjMf7D4zW9gaNLB/Hk7OLnGo01m\n9oHObrmCLbtKa1m2PCZxRpxgDvhunmsFJDAjUpME1n3za6DGD848Adm4nrHvfx/qSQ6c6LPijz7C\nWAXzv/QZRqNj3StfCnisSBTWIeAwMfq+AhyehIpDpQkjEjDtZhGTiBIQkzyUpiZRYDis+aoOpgW2\nnR0YZPc4sCy0YjKtn0kUT8JpwCEEDZgkkjjMRcoUcFriTEjiKBQ6MZAIBAXUU6oQezXqCsSBc0Ip\nw5g5+hJQhaW33cjEDT9ixU+/nu5UFsptY0+bPRchEreGZTXrzIzOdLBVy2yjtSyfzDTebk/K4S4S\nMEuICzOCCkFVsXt+hCsK1rztzRS9SUwE5xxR+9y6uc/TF2YLalBJDbIIO4TaV4TkUUl4K3GkPIe5\nzb443CD/WyIhlSSn7IjtnU9N0iOCm5Hnsy1iHpMmUL7ZgD3K+gHOFCXkKPhtCEDEzHF/nOLQoiSp\nseyiS9n4srMo3vQrxH/9e6JXqpPP4JB3/CHmOogquD4inWa6tUniTFB5KK1udqbY4XG37Blay7Jl\np0SJzSMjuQ5mhhvbAgZ9GYScG9e/8On4Q49Cyg5UES2HUAFNiqOYFsq8JZAmS8chJIkUqST6LJTA\nI4US8hxio4BiflooVR75Y779IkGyVQk7FEpgGyG07YRy+/UDVNy0UMZtMnbyeRNRDi06gOCdY+M5\nZ0JK1P/696gZUSNjX/86685+Pnb7D8E5hA4a+6j2MRLJFLNxypTPWJJBhGvLbKMNSn8SE0wwqaen\n+lQ8Mm8eMrWJQuZhJNzYQxw5r8v6lz0f1YLgFalrjPJRC0ha2LrCW/6KFanchT1KbE1qzDh7lA/Z\ny4TtBD7br9uKmmoWVm9GUmXi5W9gdNEClr7uNYzf+D3mrTgMGyowV+CjYV7zOXcdoMbM40hECe2N\nOQtpr8mTGENRM6QuSWtvp1hxGGs/+F6WvuNPWPvKk3hgwQEctGk1wbqoeZxNYClizk+LmJntxljC\nuZPysr1Qbl1hKHmKYuF/f44qGOs+fR5OhdXDNahn4vknctT7P8pDf/sxDnzrr5EKj9SBVFYUVuQp\nY6/T1nLL7KC9Gk9iJCkuleiNl1Gu20ItJWNLl7H2ZS9gy1TFAZvX4KzGmYHUmPMYC8CGyANeRUTa\noOsGEdlG4GJIhFRgKM7VDE0EQuwx/9sXcc+Zz2TZq3+GyRu+jdMSKRwhlcTaEby0QjkLaS3L/ZHH\nCoSeEV5jGOvOOp47iyGK0YIlBzyF0VuuJ1hBx/UY6Zckl4fP3ox+UDp1zBsQm/ZQtzwSAUJyYNkT\nj5VUrkJFsTjEUNnn4Tf8NMsuuhKcIhNriZ0DEGeo1TjZOm2x05YZbU+NvUL7bZ/jGFA3Dofp2jcz\nGnrlfG0w6zVL8rzaD098Nhsu+AKalKf0J1n00BZGbv0eqQvmEsOhxFwO9xEMZ8ZQHYjeYAfOkJZt\ncUrORBKl9jXRaoJXfHSU1PQTlK5gwzkvpX/S8aCKly041mLWQ218elv5cjYOr5mOphkN3HZQGKpl\nN9OK5RzHrHGiaOOwoSaJkWwQli2Y9REp6MceloDxzTzt6u8y9Vd/g/eOm11gXmHUzpDJJmbRHEmk\n8VLDwPHiTGnNmJ2TXFOIxBteO+AFM4dzHmeOBVVJrCFZYrI0Nr32daAdZGqC+rtXoIyg1kS/WpqO\nyXSDed3GmjQZXOeWPU0rlnMcAZxB7SqUGlGfi0qIYqa59qKCmaMjgbhpAw+87mU4hK4lxn/xjTyD\nhBkUKhReqN0g3XFGrGUTRB60vS13leSgSI+8xaKDXgAKcNJBXMFYP/LgOSey8Wf+J93RlWy+/ALc\nj27BW6Ip/sZ0wNMj4p7akfjeoA1Kn8PkoVfEEUAjlfMUFqnFUwD0e0hIkHIsoPrA7W96DcMP3Uc9\nZYwUIKYICXwHVcW59vdzT5Lz1nO4vgl0UsVEYRTRiMGhAl3ms/S/vsg3TzuNp44Gll78LZA8EI/1\nFGUxjKI5utRk2yD9VjF/bNqg9CcBQr5JFMNcU4xXi1wsIjkoh2HzFESlf+f3WH/6CcxfvRoxGC5y\nyI+JA58D0r2fO6E7cxVrgtwFBSrGS+gmoVTHUN9RJsfqJUtY+8rTOWq0JJ36slz2jYQgFMUwZnVj\nZxp1U3i42WjLHqQVyznKVkMi5zsncrqgKCQCZjWsXwPFCNZRkh8laMWICs4ZHZTSgXNuum/NbBhl\n7O+kJsxKBZwFhqucz94roddR+t5YvP4enARqVZa/8z3IVJ/ed78OgGzZAIBpwjQh1t7Ee4s2dGiO\nImTnTpJ8ERNZOPtBGYqCxQ1oVfH9N7+CkWcezaJrr0NclxCVMirJBYQIuDZOci/imx8kxaFOKZPm\na5hyznohQnRGTH0KX7LutOOoknHwhVdw/+nP5ZCLrkUnejA6iqUa5/JoYODoaecu9xztj9IcQK2p\nSjOoSGNNTUdRvPXzMCz1iMBwLay/8HPUq9dwyxtehm5YT3n9N5AYQRLjpSA6hFlAKWhvrX1DMCUo\n9ELBROGJUqISqJ3h1WPeQ1KSh65T7n/V6azrKwZc+/ITcRoJTSV2YGtb4H16VPs3rVjOAZwAxGnv\ntBGwxsMNglPAdfG6mX4Q+Ms/oXPAMg6QIQ7vdBitO4jPlWy8Keaa2o4t+5RsCRqd6JuQLMOZR0wI\n6qi94tVhrkAjHFiWPHTGi3jhhVcidYV6w4mAaS4SJdBGW+45WrGcI+Rhdw2aSEDfK6IQ772t6fsi\npG9dz90fehfihrjhzW9ErEaLRmJ1azyeSp2F19rLvy8xwBs4y/nkgzYeyVkuZ6fZjRMFOo3JOPzm\nN/HAK87i7r/4CDeeeTL1vbfkTCoDU5nu196y+2nvllmOoTk/RwJCkes+Si6Hm9wkD116DdSOLed9\nnBs+/McsuOoLWIIDH54CE1zdWBozrrQ0cXuPrNPYsi8wMTw2PW5wOignKiSXy8d5aiarmol/+ReG\nxBG/cgkHKjx0zbWDrVA5RaR1Q+wp2jM7y5FcPhczATzRJwIBZ47EEAe/+c1YAv30P3FwMcxwWMAG\nqRitA5PRMfyoW26Ha7MRaRqdFcmRnGUh7VVIUTDUabKoVHG/eS4Hve6NUI+TBBLtD9+eprUsZzk1\nhqdARahFCYTs7dEanyIyOcHmM59DccrrKbRmCzVBBSUy7F3TrrZlztBcrNorYgkxQzslokZHoXz3\nu7kjFTzlla/BbArtLMoN08idMoFBbeKW3UwrlrOcYlD81oQyJaaiokDtStac80LGX3UyFh311ecj\ndQenAUGyVQLTzb5a5ghCnmpRITqHN0OtS4xgvsuS5z6PKlRsvvIyhHlUKN5AUXBGMiU3DmrZ3bTD\n8NnEoLTazGC5BP1gdMTR91BaH5ccP3rxcSzudpjQEnG5UGzdFcqeEMs+ZSqb9gyxDSeZZWwfC+nM\noTPmj5PLKahBS/BCN9Zot0SA+3/2F3jBV69FXW7lWyJUAh1V1KXcMdPk0ZpHtjwBdmpZisg/i8ha\nEfnhjGWLReQSEbm9+buoWS4i8lcicoeI3Cgiz92TO7/fMTOquCmtRoCO5tjK0gKOIW5+y1uZ3xFW\nLz0UHwLOlQhGEZVUJIIGVIzknjzjsbk01ZAjErI41l6JgxTIptaaN3ApoRJZV0XuXrAUNc+6WHHw\n+VdhrqZyERHLBVAM1Dmc5STKNslgz7Arw/B/Bc7abtk7gcvM7CjgsuY5wNnAUc2/c4G/3z27uf9j\nGElSbtDVBBgbUKGNEiQ0TbLuq//B8tU3UaYOB25YhVp2AtQM+r9AFZ48IgnN6ZlZw3OwXGZnyP0g\nRAhygWBHTn+MTumZoRYRV1InY0iElVse5v46svmIo7jjb/4McSVd6TSFhXOt0cGN7KWdWdtT7HQY\nbmZXi8jh2y1+FXBK8/g84ErgHc3yT1tOK/iWiCwUkYPM7MHdtcP7L4Y3yYV1B1kZWlNqAIxEB+/6\nECsmXv3zyH/9XxZqpO+6YFDM+N0r4/47u7Jtv/AZbXEfsSL/0+3a5s4GsoDn6xV9zPndlq9zxwkx\ndBDr0zVFxVGWgeeefzHSKWHDuuaABBE/c4Mg7fz0nuTx3lXLZwjgGmB58/hg4L4Zr7u/WfYIsRSR\nc8nWJytXrnycu7EfYTmezhCcOEBBA+PBGPv8p9jwl//AlkMP5ugNdzJPu1SpZiIM40yJPlKm/bvX\n9CCj2rAsEoMK4Za2EYmBTqYZVqVKtrhnE0kGsZS5VJuiuLpP6s7DpT73ugUc0e2zbrLmqAuuAfMk\nwC06eFZay08GnrDN3liRP/ZX0cw+aWbHmtmxy5Yte6K7MecRcRgBMcmiaQV9D6PVGMte+1aWFIbb\nPE5KI0wFIwRFJIH3FPu5UMIg+8hNW90ImA5aZjRZK2qQcsFjRy6KPHOIPpvIu5Rw6hBTQPG+oNOb\nxCkcKH1UjWMuugxtpmhMbPaZyU8iHq9YPiQiBwE0f9c2y1cDh8543SHNspadoCjSWEuqJbXkztQ8\ncB/rTj+RjWmKI/vjFDpFSENUMo9uDUV0+P2gernTRx9Cbi0WkUXRNIIlnHOI5nVeczte825aT0yY\nFs3ZRnbwSFOT1PAGPdflpgS3jC5i6F3v5YHnnUgsR+hozhXPVdMFtSfXnPRs4fGK5ZeBNzWP3wR8\nacbyNzZe8ROAze185c5RwKXc88aZp3ZGWHsfZTTSYc9gUqY4pNtFk2dShkjSw5FQ8UQBnSVqYDvJ\nIhlUTVKJRKm2WSc7Mf/U5VJyhXj8QGQ0F5IICJui0XvJ6fhkOIMtSK7CYxB92NqSYXpfmu3u4HOj\nJfKQf8+JkldHkQJIJNT58Uiv4umhy9N6PZaedBrP+sAHCSmRxEMSRAKCIW3E3z5hV0KHPgt8Ezha\nRO4XkbcAHwFOF5HbgZc2zwG+CtwF3AH8I/Bre2Sv9zMSkUlvBM0+7U49Tly+ku+84lSuOfskRi0x\nISVlt8KCMRwFi0Lf+6Ym5exg0OvamqYw2fEi0x7qgSBmZ8ZWkaqsv02YU3TZglx39hl5u5JT/NIg\nflAcAcdEM/q+p1IWOWHkskvw730fOKF38CF4BDOjiDPPkW4jm9Z0quynenpZEI8a24iSNO/bXnQf\njdSYtzt6tViuWakuYubQAu49/EjuPOpIer/4RpZc+DWoejjtYN7jpjaiRT29v21k0L6h7cEzS9DJ\njbjOMLUrcJIlcP1pxyPAcIDJSghimEChRnSz947xmrta5NIQW4ebSRSHgpOmvJxj00RiwZCjWwqT\nMVGknA2vkvC1Y8GV3+SHp72In7r4ElS7PPyyk1j+lUvQskBEqMxTXPU1ZNFCOOY50NtCmjeftV/4\nN1a8+BTu+R+v4eDLv0Mw46a3/QIL7rqV1eY4xJRCXNP9MiISqK3Gi8dQzKDrAmaRCqW0EpXsmHGW\njy9o5NF8pIYy6FUWkgPJweeDsKGe9BmyDpPWZ/Mzns2Sm7/P4kuvZ8s5p7D0ggsxuigRLyU1PQrr\n0KrknmFXe/C0YjkLMJSeCQUJLHLNGSdwXAjcwDCHpZpSKxS306HqbEGomSoGvVo9wzGi4tHs60eT\nQ8Sxpj/FoZ0R+qKsGnU8ZSzxoCWWdxxL//NSfGcelsaQchTE6KF0e1NYZxgZH4OoMNTNvYZMsHtu\nw448jJi6lBJzG1kJ/ODnzuKpv/VuRp//ImxyEhkd5fKTn8WpV13Pqne9nc3fvo7aVyyXQJUcI0VE\n6zzEd24ItRpnRvKRIhYgSvQJ0+Ixh2ZiQnKJ5MCpUCSfY2J9wqNU6ihShbcu7u1vZ/GrXwfiUPG4\n7a51tD5BOnvwqj15aRuWzSHEHF2JBAI3nXEqRzq4+7nPY7i3BazPpKQ5I5RGwqzDUC10otFJue+4\nmQc83dQHp3hX44phUlFz0EVf57Ben6WXfp0lZiy84FpYc3euehzmYVN9Io6uGZQFSQJp3hBx0RJi\np7M1A+bwo3Fagsulkj2C18izPnMBwy84AQvKdW95DeoTp152LVR9Vn7wL3joiCNZXHQY/v0/Zkx6\nWNFh+SXXEMwRLVF5ofIVYh7EiD4ReWyhhFx6zWgCz02IPuZhtBkSE4V3uO4wa0KHRa9+PT21nKoY\nq0dsqxXKfU9rWc4GzHKr05jAOb5x6nGwZISf2NQnd9eBnbtAZhMRw+NJ+ODYUBnzHGx2wyxz46yS\nIdaMb+b0K7+Hxc2IX4qFiFhBjaLO4TfeDYsOIaRA9JKH75at1WQgzuEa90xiMNQ1oqMpLGGICo5I\n5QvKxoOcxHAqSK8HTnjgqitYfsbL8VYRXcmaM57HJoXaeQ79pV/G/vFTqOTq8kN4JqSiGwN9l+di\npwud7BBBTKhCxVAf+kXAN2FCG7Vgoa9ynVLnWHrhtQyqlzopcj3nYHPqqs9VWstyLqCDmEHBrIdo\n5KEzjufwjvDsiV4WCGZv2t6jYmDSByup64oRpwy95W0cedHVUEWecf4FnHbljSAV2l0GEpGHV2PA\nvWefQocpwqIVhDowKPMuJnl42kz6BXLmiwEeJbpcDDkYIBETzS2BnVCmrQ4e18QT2dAosTvMgWe+\nDL/uQcwVBDPGY81T//wTHJWE9E+fokyJNc4z+fSTqaYqRsoulUskJzMqQu34NIiBiiEaSE4p+n2S\nGMv++1IWuoQpPPyTz2LZly9p6jBLc6ygrVDOOlrLcpYQb7yW63/nNziMSEweT413JcklxMKsy0B5\ndIxbRkdYvnkTR194NeY73HfGCzno8m8y9cm/YfTc3ySaJ0geyopGChdQy818k0guKjejR1BqhtS5\nUGPZ1G8ceKazG6nfLPFAMI8KYIqIy44SA/CoOJzlsKu8u0bEKBlk/jhCfzOpnA9W46fGsaGFyKZ7\n+PZ73svS793L0LweHRWSCcGEuMNAccEN8vuD4mtPacpkKIhmrPzi+dCrsKUHsOpf/p6Df+nXKMxR\nuUTZltXbq7SW5WzEYGYoYkWFmqIbVhOOeR7H//fFrEkdnE8E18FbblgFkbgPKwi57X5QvfomNCj3\niElNqE80Yz2BZ29ex0YnpO58zAvlr/4mwTzz3/o7zbyfok0tusI5ooATIUkCiSSJufc5jfc424ig\nfvp5NjCzqEQ8nSb6MjRN3JSEIzf/8hRk+9NlT7c0jTU03wClNdvPlUCpy5Hs0LEAvmDjX3yMyde8\niXDvfTzgx9HoWPSlL5CKREVN1B4QoZh5OxlKwGEUaoiLbPKBAy+6mpGiQDTBggUofVb80q8QpIkL\npWaKyT13MVseN61luTdoirMKCZGC7K5QggVEHFRjrHv52dxx4HyOuG8tKhAkC0L0MQcv7yP63tFJ\nmq0kgVocnQQxClIodT/RKR1J4K5kLLbIitJzB12eedGlOD+Exoh3eUhtosgD9xIPWonHg0CiJlBQ\nUVNSkGchPYNu6DTiuC2Wvc2ieMuhOdnibARrZm1QeMT6Qexnfklerk0HTUmGOZdrmuQlmMuvu+Gk\nk1n6q79A75/PY56v8OryPGlqMoisIFiiT48Yugy5mpiMoJ57amFLIZQOTrz0W2AVNQVCjyBbG4CY\nJsS11uXeYlctyzYVYC+QBBwOzJGoKa0AMYzED95+LktOfBEPjHqO/e13s+H33k5obmiBfSqUSRzD\nNSCKmOK0xHwkOUOkYILEqMDUOecQzv9vxixwZFEy+oWLefq8IaauuZqhk07HhdBYgpZ7GK44nDBj\nPi5YFtIslMD0MNTnwhmAkrK4TiPNK7JQDizJXIDEbR0zDSprsDVoHvKPl9/mReAGt4MHNcV7R5L8\nI+fMoyI842tXsOWcFzCpgMsBXW6qQjodzIOkHn3niFYyr1ZSAHHCgk/8M0+r+owcdAi6YDnUkRgC\nQRyShsFDao6xFcrZSTsM3wvkNlNQyaCDX01FwlU9nvnn/8CKV/8ih22aZMPv/q/pG3pQwNXYN5P8\nlc/OkC0dZax0VD5Q+wiSiBJIw8owHeRt5xLOv5ilF3+Tl15xNUsvvRYbHaGoA67oZtuwyeYx0eZ4\nhGrmiOYxgq1lWhR3ICAz3uam17tHfqu32/y2QrnjOAMvDpOahCEpkEQQAx+FBRdczSEfeh+9yjF6\n6XdYY1DESKinqMwwZwyZETtQp8SGyiEKIwccjM1fmrOGioIgeSZ2sOs7PMaWWUMrlnuRkHKbUyxQ\nqieVI5hF1v/d+xmKfpsK19PFI/ZRDfAygbea4VpZ0BOSGJUItQOXKnzq8+BRRyPn/RvmIvG7V2OW\n5w3VFfQChBNOpAB6MypbDH5evK9IAAAgAElEQVQMyn2YjeJ28WtvBEqU5CMhKnVMSPB4HWL4hHM4\n7IrrKB68Dee7TBUlt48uwXyHoj9FbQnr93ECS3wkrDyceuEyEMFrO6Cbi7RiuRfxzjA8cvX5GMrl\nP3cOhILeRV+gV+g+ksVHR3F0kqPyimrOzCn7iqFsfuUbGV28lPlfuZgDv/jfdI59NhIEk4perKlS\npDQjAR0aq1KUNIe6TToE0WKr1VpA7YyU+z6gcQq3+EhWlp7Segxt6KHOiMUw3nVY9tUruXsi8WDq\nUneGKDQhyYMpc+YktEzTOnj2GgbJUFWcD1gaY93LTmaqVELlKXoKRfGIZlb7gpnVxQtVNnWUBePC\n5hFlIyPE4UU8dd0qFn/lckiKzV9M8o5gRpQ8N+hRxuqK0aIEcpZSdqRIU413Hx7grjDI1myqrfuo\nWHCYKk4Mak8qKqT2xMJRJNh09VdY++XzGfnuzRx85YVsPuMlLLjoGiQm6HTpE+nE0NSN29cH2DKg\nDR3a52xfEkyILjtp7/nIOyD2MGDZ+/4W/6JTsaIgbVcwbGclz3YXzqzJLBk8z6l63iA5oZMU6zj6\nvQ6HqOewLWtYcNX3YGgeccFSEIfDMMkqkOPIHfOKzvTxWFNQowl3nP00Oy4JfAJCDuFyzmGixCKh\n4kgB4k3fwXSS4sVnc+Tv/282OEe0Dj9acSi4Cut0SZbopICFhEn9WJ/cMktpLcvdziBmZUerlIRR\nf+/b3PSxD7Bi7YOU2gHzqGtKcDVhLJUThqIjiSESsT3yu6YklwgpxwOmptmVihLUUBFMpxDJ2TPL\nL/g+N59zIkddeg2uTvhiOxPJIEoOscieXTfty5YmvGdWmM474RG7OF2dXaZjZaNLBDy19ailQ8fA\nO2HDyccy/8rvkO66geLwY/KPiHP0ga4U1PQIdBon01z41dj/aS3LfcLAOhtE8W1rGZpzJEsUE5/i\np/7Pv7Pw3TfjSGwp1xCBWhJRsuVRarbFzE9gOkxXJ3b73pooQXOIdxSHtzxv6pPHRECE220+93YW\nE7XgB6e/gCMuuZRiagJfBLYNZARkayyab2IArIldHJyV2S6UsINdbM7F9Eqfa14iUEiXYRF8UzJv\n8VXXEUToHPls3KaN4DuIFHRIOUzdugiCx7fTlnOMVix3KwNLa2CbuO21hHTHL5P004xdcCjj1yxk\nwftuZsnLFfFGN3Wb+EOHs5pAHyWH7PTcyO7bSxu0YpAmSyZbsKkJhK8KJTkFCxwTag54w89z5/xF\npOc8k46V6NDojK09tvo5QrYu51p++y5guUJw82QHL1iyfPqhSBfBbRMltb+dj/2dNoZht5Pn5qat\nylzaG0vG+nv/iPmr/g/lCefx4C1f5oATvgTVQcTzD8ZsjAk3SaHDeFFIgegDfR8Z7UUeo0XNj7+H\n03esz90hneHNqJ3gDTrJqEUwg9UTyspPn8dJF1ySh+kq9Okx5Lqk7eIVH439VRTE7brymbW1e+c6\nrVjudgaWZdPOFgBFvGPkrj8jnbaK6sqncEi/S68biTbOgl+6n3TAg2z48E9iJDYxwWi5AKc1I31P\nv0xE8QzF3Ttwy33Ocqri1js5ISbUoWRtOcRzLv4GGgSpNhKKUepaKYe65CCodmCyq7RCOfdpxXKP\nMBiGN3N1BvWG8xl66SR20YGYh6nhMUIffNmjv3QDXVtMEk+n9swrhnAa8Si1LwlaUdjuF6YyeXoh\nISguKWpGVx2bvWNhvyYwgXnFm4ewiNRTQteBpdy7u6XlSURrGuwR8mlNKTWZODXF4rMx24Cc8U2i\nGENWUNKhxijMkdxmlv/BTfjXrcVXZa60I54N4S5Eu812B5kwvnn8xCoRTRQ1TqFQSF7piOP+E46j\nLAK3Lp7HQV+8DKljbtvqEq6bcvylyC437mpp2V9oxXKPYFiq8D5ABVIXqCrx8oPYcvUzKaphJlMe\nojsiScosQDEwcswDLPn9DZgFTDosqQ9D3aC3oKC5WgM7jMH5Ma3PUhVxSs9HrM4e8EVXf4N1vR7H\n/dPneOA//o0N//6viClaRRCPOUV0Oy94S8uTgFYsdyeWwHKhL7xg1KSOQAH923+FTWk+ALGcohwU\nzEiKl4SkiPdVjrMcWsXo6zYgCeqOIz3rbrBICh1AUFdjboqcYjIoWwtNue1dooyBJB1IJUOTHbqF\nMRkSUy88lXE1WHwAK375d6i6QzgJuDLQ9F3EZkmf8paWvUkrlrsVwaSJsTRBKXD0IT3E2tU/Yr6f\nYCjWzSsh+iaA3ST3n9GEJUffRuguE/o6TjrofsbXJrxfDDaGhYhZhVIiGCYVj8ffXIWmLUPo47og\n0Zj3vz7AkR/5Uxad8MLc8kKNFa/9ueYdgznK0FR4bL86LU8u2m/87kRyzcqgNYNZPaVLddVhHPSM\nN7JBfnLHb5OUe8ZYgfiEsIVq8Y84+F23s/L1C7E4QfW0mxEr8X3wNg8Aw01n/vy4ZNsw0deCSYUx\n75n8wPvQepIj/vRvcmdFJ+CKnWyppeXJQSuWu4nBwNREMSdNsl+EzZcSj/473A/ezlK9lR3N9RkO\nsUQSh1gXXG6zOnXrT1AVt3LkmxMT9w6z2W/C3BBSA1YwRsLp4xOzgS3qnCHemFxxNMu/cTVCopxD\nlYFaWvYWrVjuJqbFxxQs16YUgzS5mnD7byDalPF9FCdMwuFcL7eeIKFAccwPkeRJznHYr6/mqHes\nozAYevVqqoPvxpsibvJxCZsAvRBJBYxF5ann/StiBTI0j3qHDbhaWp7ctGK5u9AmkEcKsq2Y2HTp\ncjrLX0nVfT7mocK21j7bDnMyXXTCBJx1wed0QSc13jt6cZyH5WE6z36Q+asP4qB33Ujd7+xwxlK3\nrQSxzTqnEQM6VWLDmLBkWCi2bMZpIgGFFTkv/YmflZaW/YZWLHcXLkf4a5N3nSyw8PSHMIRy7HqK\nl9xKeIw4brECCJiA106u/yiKYnlukkjh4Mh3r0bpMPyuW7njL5dQFHmjSWpAiVZhJLZxWNvWJl2V\n86gL2YJ1HZb9+m+z9Etfg3mLMdfBUTTbArG2lFhLy4BWLHcnpmA1iToXJ9NE/+qV4KG68icQffTQ\nHmkELrdqrVA/lecjpUKkyhXWi5p+mgTfR1zkab+9BEJEykiIAfWam6GhmNRoY8WaJPqhRoDQxHcK\nHRwB/3//Dqoe1A40kiThrGh6d7fOnZaWAa1Y7lYcTgLePGY9Nl18HN2TbgTrY6ZY2nkcpGt6YUtT\nOUMQsICnItYdQghg2liw9+NOW8uSd95D+bqNSO2YkikwwakgKk0zWYdPHUymSK5iKngmvTDvsiso\nJ6ZY9fKzsBL6XghmYDELf0tLyzStWO5OJPfTTprbpvZHanqXPx0TwVJCfoyzbdsFmFtTBREcph5s\niNpPMvr8B1l/yTzG79qEoSx/112YCbUYnprCKsxKgjq8duhqzWgap7ZIJyrzLruG5ZdfQ00Od6oF\nhJAnPdtJy5aWaVqx3M142YLziZC2ENIqNoZ5iBlOHPwYxScGFiaAQ6crpYuvAQEX8U2xjnmn3s+K\nM8fplxMU6SA2jtxBwPKw3nm89BCg7kciwkRYwEJzqB/CxYqgAcVRWKDQQHSRfddXsqVldtKK5e7E\nQPwCnBXUYRFdapbqeqCDp8vjNdUUD1KDVKjmCuWiBRFHUkgEoiQO/d8PgUyBjuJNmBrqYdFDUKLU\nuO4QEzqfkXojE1j2urtRfLOHJom+i7mboVlbVmw30/74zG1asdzNJJSxKxagjKH94eyYESP5SR7P\n7WKSoOnBY1YiJnlOkj6gFBQEE5JXLE3S6zzIT/7uFub9wc2E3giT3U2kqNQAvUmGOz02xfncFIZw\nBkki0dW5TJsJRdM3R8P+r5S7W7zSTvpZPVZR9cdi+9fv6vt3tj8tPx6tWO5OEkTpM+/UTUxc8GI6\nYRzDMGqw8Liajol5zPKA28kUXiZx0s/PzaFNR0ghUHfqLHZ1rnx+wHsmoJdY9Ac3M0wH6wRSqjni\n/P/krC9/iSix+QYItSSSpBx2tHvPyhPGtvmrWNPPZkBtven1RhYJ2+69M7e1fQzpjmJKB9uZuW7m\na2YK0faPdiZRajqde79NhNd2rxsch8z8PN3xZ6Smf9LMme6ZEylphsNucJ7gkce+o8cGW5u2Abkb\nfBO5saMD3E+ZbffF3CZAaV16q/6GxWf+EPBN3neuFPR4ZwGzR9yDNbWabdDsKjcbAwVVLAkVSgqT\nJAe13cmKP9oCYtRDDg19NvoDWP3ynwY/n4DgCTgcnkCgwDUB6bkYyBOrl7m7GFhkCmCKmE3fvLX1\nKKSLpuaMqOXsqRnvV3L86yAG1swQNbQRGLOmBbHm1xhZ0EwEp4aZTgvc9BUU2U48FW8yvX1mfKax\nVcwS4MVhTe69zHi/WhbQZNqIo01vS0RQNXCS94UZwqZZUgf1+QefJeKmxVAQzGybY1fLaRA2Yz8H\nx640+ROWXzuYk1GmGkfjk29We6diKSKHisgVInKziNwkIm9vli8WkUtE5Pbm76JmuYjIX4nIHSJy\no4g8d08fxKxClLHb38OmW3+fydQlSv4F9hp44gM/Q60AthUDyDeTt0BIBU7L6eXxnmWkAAtfcT+L\n//qPGX75Kdy7bKSJUx986XO3bzNFhMbnLiCzo5D+4Ib3WP7BsACav7qFdVHrY16IgLp8HAPLUMiZ\nVW7QxtYMN0PoRC0fayOEKoJo7nYpprkVsQlRcntiaSw7r1mwEzCWtwTNfooIyZQNqcnJmmGVeTUq\nsiBNW8C61YZTJzhx032SslWpODXEybQo2wzBS02lK6fWHHPez9vX5xFIn9wLXhtbMIkxJQ4kC680\n3SvVDDFrIi/yucK21pvKbsZAot/8cmnT3fjJEWa2K5ZlBH7XzI4BTgB+XUSOAd4JXGZmRwGXNc8B\nzgaOav6dC/z9bt/rWczGiw9g6UsfYvTBjzPix3Ck6SEzJjh7IgK0VSL1MS5d8lu/vMVhd0OEzqEO\nuekDHP6WX+OFn7uMRCTOsE8hWyKzkUELuNwlMmcVqVYYFdHGiKnDlioLkWjuUvnd+3Lr4GRGMMWa\nhmw3P9zPAtNYdSowJsJvffLOaWE56cMbCWY4E664eQvRCb4RxiTZ+htzgtJYisC4CLUTvnvfBK/7\n6G1sMuG/LlsDwD2ba1aNJU78wDoeinkyZro9sBqbnPAP5z/AZT/cnI9VjaDGyR9cjwGTzXX5+Bfv\nwzUdOP/z2nX55Ihg4rhpTR8VY7ypSRCd8NRFJWbGf31tLVNNc7UpcThy/K1ToyeOUz46lcUYo5Lc\ntK4GNovQc5IzwAwcCUPw1mkWeQZ97p8M7PQozexBM/tu83gMuAU4GHgVcF7zsvOAVzePXwV82jLf\nAhaKyEG7fc9nHQlQFp+xPhfIlQQm+EHaohgIqOy9oW3ynjpGvDOminvhzD9lfPxziNw1Pe80Fxj8\nvAiQpAmbCh1IHnOjFNLn45+/g5d8ZD09J5z67ls4+tARLv3BJiZF+NPP38ekCKd9YC2rHpyiJ8JJ\n77qNk/74AU750x4v++B6/vJtR7LFCWf+8YN89T2L+Z1P3EnthBc8fSEnvfMW3vrxO7ht9STjzdB2\nRI2eCJ+9fA0d4LqbNxGBZxw6wmfecTRfvGwNP3/mCmoRvBcOnF/wtXcv4Wc+uJqT37uKV334R1Qi\n9J1w35opPn/TEuaPFrz0IxN8+P/dy5gTrnrPUv7hggf4wZ1jmMB37pzilI9O8tufvJNPXrieCXFM\nARPAYQcNcfKHNlMAk07oASe9/0EmRXjNSQfwt/+xCjHh9R+4lRf94So+d9EDJIGXvus2LnnHEH/x\nxfuoxXHGR8aZckKhxsc+fReFwbiDH6ze0nxntBkgxeaaOJ4sLUbEbNcPVEQOB64GngGsMrOFzXIB\nNprZQhE5H/iImV3TrLsMeIeZXbfdts4lW56sXLnyeffee+8TP5p9jNlmRIZYe9Xvs7D+a4SS2nk8\nPQxPkUrUVXtzj4CESyXOefqdki3jC1h61j2Ygd/aU21OEAFHjTUTBZU2ndrNOOX991MMLQIzTj98\nPZfds5jF7kHu3+z46WeXXHDXUmI1yQJZjxr0ihVc8K75KNmKeu2HpugHiJvv4XmHFnx37SgX/dGB\nrB+r+eW/qklEjjvoYb790LI8NLVAzyUKdRy/Yg3XPriM5DxFMib7G3DlCB9/U5dli0p+8a8mmZp4\nmDedOMTYROTC2xZz4qEP87X7l5B8gZgRt6ziqx9+Gj/9wUnMxTxl7MCpYBoIThgfv5uv/snTeO0H\np1BXYc7jUiKGknpiLRe8/zDOeu/ddEaW8MV3zufl719LGTrMZw1bOBCAw4bWcu/kcj76C57f/Wwe\nfhfRpj9THVTj6+kMLcJLh0/8qufAhSVdG0fcMFMIw9o0gZdcvHoOfYV2iIhcb2bH7vR1uyqWIjIK\nXAV8yMy+ICKbBmLZrN9oZot2VSxncuyxx9p11z3q6jmDAWOXHoBJzVCawDkQtBnGKWKdvWpZAlRo\ndoDYBGUJ6aSaav1nGVn8C82kvea7crZ/4w2QPoaidHEYEYcZaIKz/3wMadJJaxFWhDVsqA9FcEym\nzZS+i+CpJlehfpROdz6Yx+NRM/o6zrBfQOUTRXQgyuffPczPfGAC8YahJO95xZHr+MrtCzEpKJMj\nuUS/GqN085EAFg0JhtOQ239YdjjVBs7loe/5fzCfV32ox4rhVayePDQ7V3ycnjemceilIJTN16UK\n4LVGVJrpWoe6wcBQeOsJW/jkN0cJljAMSTmv3wsokV49TqccRTSgPpFUCSJ88PXCH/17QZI+0nsQ\nLVeCq/O8a+jwzpes488umccvndDnf568bFrEddARAPBz4fvzGOxWsRSRAjgfuMjM/qJZdhtwipk9\n2AyzrzSzo0XkE83jz27/ukfb/v4klrVGxm94C2nTwyyIl+El0fcF3mpCKlC3d4e/znsqjcSiw8gL\nxxF3C6ZPQ/KMP+IF1Xwjz3ZMczF6rAcEkjh8VfOi997JS54/j2/fdgCVq3Epx6JqkZ0hXj3J1TQ+\n3iwmgGhA1JH9G0JyFU6zua1Os6MkedQngpYYkWSCk7ylKIlgQBNLkEzBKUkS3gKmoD7iU5EdTI2i\n5Or2We6rWNOREZKvssc6R+ZimuMUohnqt1Z/cilHL5hLOHMkIjSxvCB48ySXSESc5rRVDXWzD7k6\nlRAQ9YhA7fp4ClQiZk1NApcwATXBm+DM85l3DnGAmwIL1M4oCLkyFnO/J/quiuWueMMF+BRwy0Ao\nG74MvKl5/CbgSzOWv7Hxip8AbH4sodyfUKA0xW/4MvPS/2fvveMtq8r7//ez1trnnNumMnSk21DB\niFhAaQKKAmooCigmxGBi9Bv1m4hiw0IR288UYqLkKxKVKhLFCIKiMWIEAdEYqdIZyrTbztl7ref5\n/bH2OXNnQBiGy517mfPhNdxT9tl77fbsZz3l87kMVCgDNLWdEzyujTCztGdRKkwd7Qfn4exW0mUv\nJT3wNaIlxOerfC4YSsiGUoEkzdx9RFa5DAu244g9N6HynWzIXARvYJlGpHQdLHpc93KXbHDEGYRE\n8iXqMju8OdfNZwCe5COKUro26gR8wpyhLlFogbiAukRyCfMJ07qyQEG8Zok3H6lcibpI5TokFIt5\nX1puELzhKXA4XCwyebRToou9DlmXsvYRXjFvIIY6BQ15f2uLlci1soIgYkjICcbkK6KLWJ0AMlfl\n3wOSHGIOEcnTcfU49VCvB4NvXRUZpUlbcq481jHvuW4onwjWJTW7J/AW4EYRub7+7IPAacB5InI8\ncAdwZP3dpcDBwC3ABPAn0zriWQwP4ALJhlg5vC/zR7+NIVQu4DX328w0XHQUvqL1xjtJ5ggH3k7J\nIho5vTynpk+xTip4HLmQKJFCwRXvhwNO7+BMMBxectWLr4+3iwFC6pW+YKC5Gz5/Ty5/cbj8Q3G9\nGsLs6WXaO6BXY+jx9QmX2ghnohPz2eg6L1B37xsRT4ERyVJvAUIen7latK5eu4R8UvJYPYmq3pZl\ngpN6VOCxlKu7PB4QklR4KRA8gqASswGu9zPVD2pzeWx5Pwqsrp4wy/tqkkMHwQoiFS/ZchmvfvG2\ntDDaBIKVmLQopY2nxbozHsxtPK6xrGOPf+iW2v9RljfgnU9yXHMS41cUtFyiYCHF2I8QUYqqRdko\nUTGK5JjpPoBERdJB2pdsy/zXXof5JdnMmMy5izxYyF1HvaSC8ZObJ3jFM4fqdzXJseVpNYBS4UJB\n13B14dZ4cE1JUtQxPlffGr633B8+b1OPpKy1nfzLUH8XHuVGmrpemfLp2tvnEcut5mVxay0ra6yj\ni0dfV/cXrn4ITflMhIIGp71tC179of/l0k89h3kK5lqUTBIYwM+xB+6TwRyZgM0NDO1fMbn4vZBg\nUFYQC8kZQxOKlAkwZhqeAqxiwSHngt+U3gRzrl3gdWg91AYnap4G7rPTILcui3Q67TUXr2Pxbj28\n+bWNzMaG7oOm2/FzwGnKicftgEO4dTwX6zdsoCar3jjKhqBvLKcV916xN4MPfY5WYxlJPUUF6pRW\nLFDXazqZMSRVyuRwIminm1hyODOM+AjOzFkPyTG5ROp5M684LbHT4oAbv2vNRTemYNpTBBHp/dvn\nmU2O+/z/ssO8CnOGSL6e0gxXd2xI9I3lNGLL/S9nslpEse+DOQ7khZB81rQxx0zapmSO5AZZZVvQ\n2eFkXOt5wCjSi72FudWmVts+X/+XG5cLzntf4KFSKBbtnBeb4hX1sf7oHT+dAODA09p0OhMILmfM\n1YNkTlVj4zCYfWM5nah+y9CBP8dGr8almvhgSh/3TPo6XpQGbTZxd9C67UMsvWw3sBGMXCqjVMy9\nqGVOlkRindSABQ3hj88o87EV6RvJaULPM3eDAOyy5F7+vxNfRKwfsMlVoN1Y7Ny7jtYHG3dwZrpR\n7MrK/1jEgPeYNPCSMKkw8TVBQbcXe+bMZgLui1uy3UG3kCkfWnUlX4HWvTBzBgYiAU83CWNc+asx\nZPntlK7AWktotkaA/jR8umAYceIBPvunO1KgKAFEsxpp3Y2W61a7FQNPX//r6btnGwS/Q02wkZ1q\nrzKiLv8Vcm3czBnKboIDtt//N2ANYBBq1qKalmLuYK2sq8NI5jh4txGuOv0FNBfu2DOU0J+GTxcE\noTEwn2AVy5OrJZZz4jJn90NtKLsEcU9fPL33boYwee2hAIxdcSBDzUl8+ybEJUw90tPsnuGLqeZW\nLP0iqqsWgcJdlz0LdLw2k+tDRbyB0cu+5npBJ4YmYYX4uRR9nXMwafEXZ9/HkZ9e1qODMxzW49SC\nJ08/OPsx5+6X2YiBF10CgB95JlUs6MQC56glIazXey0zeT3V1/DD1WJKW4jYJFsfcBP4AVbzac8p\n3xLohjACUJBLq43mWge2m8HtY5qgyq33DbLLdqM4Vbo0wYaSH1NdEr2nN/rGchoxsMflDB14DQN7\nnYtZqntnuz3BG+Zi2jLcwYO6FUt/9qa6f7gOBZjMPWdAun+EVPdpK/Cav7meF222UXTUbhg4R0yO\nL7z5Gex98oNMiq/L+B3JMgPU3LuYnjj6CZ5pxMM/2pPmVvvQvOt0pBdjq5M65kASNUX5jKDbOLeV\n/w3trQ6FHlVDVdNrtWZsLE8adXtmlEgg1J2GHiMSFj2LX9zv8Gjfo3wKoCZc+aGFmCWu+thmYLln\nXOvW0qd7YqeLp/8eziAam72U4Z3/hna1Obgp827JSZ6ZfPia9OwLxX6/ZXj7k4GUSRqsAZYN5ZxK\nhEhueTRqQYOUC1mueH8TVx/rObU/cwGWY8MHfPI+zrjkfnI8yWfGp95CbmNwLPvGcrowftunGLz7\nC4x/f0eG9r8QV2XCBqHrVc7seLp22kiM3/sfCB2MQFZGNKg7L+aaJ9Z9CDg85sF7xyrnMVu9H32D\nOY0QSCa857Ah/uqwLTOTEYrH6hyi9ZZ7uqNvLKcJQzuchD/ghtwh8+NXglgdFhSQVKdUZv5wj6Ul\n2G8/RMoNjnQERAUhkIhoXeY9V5CIud0RRU2YMDjk1HbPs+xj+uHF+My/5zYGyPo9mfyuq0K0caBv\nLKcR47ecQ7Xk0CyB6hWvLssC4BGyFo+b4Xt6xC9jNOxZy7QaTVXMKaZWy+AKYa6Ero06Xpm5Fr0I\nzVrBsaNzqrx+TiFZxSUnLWK+KBNk8bYur1FOe2wchVt9YzmNGNrpNBi7AbNYE64mos+KgIIi5tAZ\nn64og/FXmHgcHkFRyUSvOtcCTVMS+IrLpStmVDHRcgn6JUPTDtU2XgredsqtgHDcqTfXqo5Ws7Rv\nHMkd2Fj2coYwfnkDmlsxXjw309MmRahopEw84GYwEw45dje51Ydg67dhjCGWSK6ZvxOrJ1RzB5FY\n36CZUMMQzINbdVOeENrq3vB+3HJ6YDVp5mhsssKE8z6wQ/cbutHjjQUbz54+xRi9fJChA0qG5u3M\nQPpNrU/fQrR7U1eo6IyWDokI8favUd75PYyER/FW4SwhTGXenuWotbEE6WXDu1AVGoufnVUep8Qt\n+x7m9MBL5gNt06QQ4X1nLc3XMfTITDYW9I3lNGHkgAkgMnrfTxGBjmtR+TwdV9/GpSJ7ljPI05bU\nM1TcC2kVTkaAXPJRq36BzhHvq67p9+ZJNY9ibh71ODM6EytoayYr6eOpgOGCZ4jIrQ9Qd+1Yz8vf\nWNA3ltOI+y/bmyY3MuH3obA2wSJYA4nDGL73RJ4JGBHvKjppiJHGTaTJa8jEvzWjpcmcVJvqKjNC\nREmoE162w2StMljLLczB/ZrdEPbascm1d1d89oTNseSmCHFsPA+ovrGcRmx+4E8ZW/Q2Ws87HK9N\nQmyBeSyMYa6cQQr+7hTb0S52o1PsgRv4o0xCLFketlewOIcQZTWPpRBwKB2Eq/9nFJm4f414ZT9m\nOb147+GDdGyAnRYI6oROvzsAACAASURBVB2Y4HHMMe6qJ4W+sZxmNB78Fsuv/heSnyT6Ns6NgRW5\nINzVMq1POVZfwAvtRww//wzEHsTZGGjNkO4A0xk04E8CU/V3TOoOHiWZY0CU7356V5pDm6zxE+kT\nAU8r3vjpMV62ZcWKJDhKhKyTYnMsSfhk0DeW04xyi7eyaeM39c3qcoywFrwXsyx8P4MQhPKXe6My\njNjCXNIkOausInNDWuIRzkumaPPiqVR442mTfOsDg4+Yfven408eRtY9WvXgLRx08u+YHyKGoqKo\n03w9bSSxy76xnFZMUkiDUiKmVutJZ9JfpIOJgFQzNpq2LQaEoB6f5rPi2r+qM8sNnGXdbZkjl0Cq\nPZgoEaGoj22FBy7/2wavOaXseZJ9r3I6YXg8//2ZXXn3sdtR4hFt4Mzh1CHm8HOlquJJYm7cKbMe\n3RtzgIXPfQetl19FaDVQUcQC1gEQnLqaqXxm0JKH6bgCE8f4lc+gWLwQbEWmarN8E8ykxMWTgTef\nO3gs9EIHRk7sC9Ca0utuZn2vctqQj+PBn7yd/XYexptQmgCaVQA2ouPcN5bTgvqCsV9S3fddZGB3\ndLIiKEgSvB9GzNWkGjN7cbWi0tj7F7RlHkPbfgJz8xFTzM09DsIosSeHa0QE4cAP/Iq9T/wVHQtc\n/IHBnqHMSZ6NJ572VOGoF6xAgbLYilIhCgQHmfdJSCi6kUzDZTZMV3bffXe75pprNvQwpg1j1xzB\nwPJvgQ1gAp6KMnRwOv1eZZe1eup02gCnHnMJLQUpDF3yZzSf92YoK2jtRxSyxJQZbo54B5Eq11bW\n+2oVWFGRkmOfj95FY2QxkrrHQRHpFk3XVe3dY2S5NTL/DTxuWYAlkLULsLsdLDble8mDkmKtZdZe\nT4I5VNCdgMtOHOS+VRXbzC/qKLfhibX+DvXDa25Ox0XkWjPb/fGW63uW0wwDhl70Z0TxJCnxWlIW\nHUTDU+LImeTpUHQxK/F1C+GdYgjeBdrVlril/0R0eyKN/RCxOos5dwwlQKhDGGZQpYQV4NoFJtAc\nXNIzlCo1K1EteWBW9y/bFA+o6yRYzAaul2O3+n0X3YoBpXcCLR+7vNxaQsfip/x+ygm3arXBtVpO\nNg2QzHp6SbMN3RIswWFm/ODqhzHLiqB5xKsf/nPVUD4R9I3ldMJ+m//KQYRXnk+bTSmdIQqhaj4l\nJBrePEVyBA14Leo2xgp1JUiJSYeR4g5Uhhm79e8ZveEoKA2dg4StObljCJMEcYhlw+gd/PVrJjhu\nn0nMKpwZCYeYoeSMv9bepZr1XqekJAIqPk/ZLeF63qCCVfVkU0gGSP494kim+RhC3RGltZF04DxY\npuVTUdRirkfsGlFpo+pAShqSSNrEzGN0CBboXSgb+Pxo/SDVaLzuw7/ibQdtimhZyzpncr85UEsx\nbegby+mEPAfRDtWyc4k/PYxmuIdGbFAFKJsTeVo8jV083cmlV6NdVKg4imqYygvOBG/5gm+7wJnp\nDfi7zqS14jvQ+B+aqhv8Znyi8AhijiSBynUQ2tCEl546yhe+3+R5zxhBxJPwiBitOEo1Nkpql+TJ\npAMShgPnkFAgmr1DxVO2O6hpNn6WeuvJLE25O9SRSJbJb8HQVJFMet6iWgRN4ASx+hiLZ7uRMQAu\n+cAg7XFjUFeBb1O5CufKfCqkwfJUIT7rcW+o3FvXo3S19z0sD8HQtoDDfAOVnBY0C/UR3TjQN5bT\nCLOS9hVDNBa+jo42MCsom+O02kN0s+HTyTyUiaod0TmGOwUpjCMyTpEciRZtGcJLpFnBW0eu5I5n\nf4JirzEi8+rC9DpuN0eQBeAMb4IRqSR7j6/abpRPHR75uwsfJiEc9Kw2L9hkgk4xDz88wrxGxQ8+\nMJJlW8VnwwdUY6vAF2jZAYwwOMiQjXHpB4ZR5/EondFVaNlmMK0CckG8d4KKZ0TGwHsu/uAQOEdV\nRgyHOT/lOZR55W56wNMeH+d1p4xx5Sc3YyIM4ixgVrDrVg/SRIm6imGfsBmsmHg0dNUxuxUFbb+Y\nC94/HweIxVp9R/Llw1yKvj45PP0DDTMIoUHrgMiKa99ISyoqmUdAiCFPv6TrVdr0XF4+eZJTvAEY\nhebb2WtitJFo6SSpMcJkaHHQ7bvwmts/wqXzTubcO3bhl+86BzNyEmStPMRsRRQIJhiOpg4Cjkk3\nxkhcyq477c6/vDvREU+LYQKR8/97ksP3GOKS64wDThnlhx8c5k++eD9nvXtzblupnHAmbBJWct84\nXPnRhQC86lRoEvnhiYPsd8oYr3xu4NRjFmAIK3B86eKHuemuDme9a3PO/qny1j0HyN4q/OBji3nd\nyffyvY9uwX4fX85VH5lPNwm036cSV5w0DMCPbm5z5QdafPPnYzywrOLn/6OccFDJZy5usf3Ivdyd\nngGWPeANWQLVrSzYdYu7mSc7gSlCQFQwl7LnLBuPv9XPhk83bCXoOBM/3goVj1giWAuhRKbJSE5F\nEkAiQYWxphIUqAYYTpOUbggnyu8GtmKn9gMcfs/z+cJbLuXhyZW8eGBzBI+ZmxOlcrXpqJM2Ocwg\nJDAlOqFtgW/+aAXH7zu/5unsyrXBSgLzUM64ZDnvO3QRt69MbDM/0OxF3IxxPGOVcv6VK/nLg+bX\nREdWkwxDB6GZ/Ub+8bKVXPTLgh+cOFSPSHAYd08KQwOOBdAbw3Ic1900yb7PbALChdeMcfjuAxiO\nish7/3kpn/vzrXnViXdx1ambMqotXn/6ODLRxgazqNyGNJjOOd756pUc+oLN8vGQnMn3KlSuomDD\nesHTgXXNhveN5TQikmVa88z2CtJVr0Yta/B0sb4G87GcP5WEqFKYw3QQK8ZwqcFpo89mmevwn2PP\nIamSvENSk0oe4IZjrpxTQZiqpmzIddAJ0YC5CrMCZxXmCqgNaa18VEcVu/w4OSNudcqGukqQ2iCe\netEyTnrjImDNY51fu5rtyHprtZ6cgtRaNFM/c/UYsjGewDNQf54Vz/M1EFIkeofDoQbf+fUon/uu\nMTT2EJ3BzYCZMZR53x65nakdUT94/yDR5fhuoYHKVVmUxPycmJU8FvqlQxsAgQDpBsTdj8h8qi0+\nhiNldUeTJ2Eorc7PPvJ0RV/3SdsQ2CBLR7ZAkueFKw7jR6OBFw60aDc9ND0NM971wiO46A1nZTE1\nW2MjvW1NfT9bUGBZ+8XACKirgICrPetsHF1toLpsOF2z2E3u0Pt0tfcZcDhOeuMmOfFT12Pm1911\n5V9avd68nUg2flJvKyB17p3elnM14iD0tp9NqPXifB7H0gm4cyzxhfOWcsAOK6gGu6QgM2OFphrK\ntFYEUlUxy6J2RSwozFO5aorfbrPuWnmq0DeW04TsWyj4XbHyJqofvoL23Zcx2X4+lSQKdWCGyXok\nVFwkMoCrdXwATBKFGiE1Sa6BhnEmG5FFnTv5oweO4sqtr+GOxg58fNlOaDLElI4X/vHXZ7O0WsHL\nzz+2JstVJliVezCqlNUTbTZ2ZDik1g4SKoQCI9EmgQz0/Dnwq2/inhno0rrRM2bZXGWSky6k/ny1\n6eiG9LX+bfYypRbqystPKXbH9X6fCUr8Guuu2TYpMCQp+IBoxWaDE/zJ55ax1/MCb9h/Cw597gN5\nyRny2LoPSFFwdDADtXGCTRB0jCtPHOKwU+6j8hFEMIOQChw+D3KOe5brin6CZ5rg6v+3taIodqV4\nxU/Qn36W4H9BUUH0hkuKJcOewFE3PE4jwiQquWvCmVB6obQ6oa0NVEoKOuy59BCOGbmZ/yw3pVk4\nBmJFRwpKJzSSMimBEy7/OD9445lEMUomaMVhJv0EvigIyVH6rEIpszHxI0zpGhFatXGcOkw35f9T\nflZj7eXDI5Zbe30ypfrykcNZi+loym+mrq/7uuvr4rNHpg6cGW8/qMW/XXYPV980jIUtZzSOLAhq\nDnW53MxchxQHaYWKMizk4Whc+qFtess3pNh4UuBT8LiepYi0ROS/ReQGEfmNiJxcf769iPxcRG4R\nkXNFpFF/3qzf31J/v91TuwuzCy1XAPMZ+9EhLN7zE7lwN9TTr+Cx8MSeT0LCCHhLIBFIJB8Rg4kw\nxLh3DOgY4ofopBE6jYKLR7fmlN9vlr1Jq3CiOMtTy3kpMmDKIef/NVEmKWwQMWMotWjjQYxGzEmf\nNNsM5Vpwa3iM647HW1Ye5fWTORSPNr4KoZKIEWjLAAe/aIRLPrwbKh608yS2th4wcESaWmszqULR\noiPzGNBV/Mlnl87seGYp1mUa3gH2M7Ndgd2AV4vIS4HTgc+b2U7AcuD4evnjgeX155+vl9uo4IDm\nAb/j4R+/D2cVSRq5tk/Wb4obUmDCtyhdgQCVb6I4Ais59PcvZY9lR7DCD/Hau5/PYKdD1QAbgEYE\n55sgFd7aJOeZbASSCMlP8kA5yo0rbyaFNuYdjQqSc1iYOnntY7rRxihjhRG4e2X2W5sC944a0XnM\nNXPH0AxCcFTOSAIWHSYrEJSYmlz4gc1ndjCzFI9rLC1jrH5b1P8M2A+4oP78q8Dr69eH1e+pv99f\nNiK+LKXM3cRSsPBV34Ydz6CSVwIgzuPliUc+oo+IdWhVFdEVDHU6rPILSW5TnuXu48ebXsB7bt2E\n0cFtGQ8FyYxKBA1tNCacQiUDgDJYJhqa9bZ/v/IOdlmwBWM2iRJJhaOqL4m4sUTtNwBaCC1JKCXb\njCgkjw/wtn9Yia8p5/yM3jGKSU5XeVOCCoMTgar9EJJA5oqw3VOMdUrwiIgXkeuBB4DLgVuBFWY9\nN+luYKv69VbAXQD19yuBxdM56NkMJcd8JE0Ql/4LnZv+gdbWLwEMWY95rVcHVtEyw1NQFU0m3DCt\nAobLldyWtgeDEJtUMonXgEcZLJt4NZo+YhSIRJzBRCgoXZMkwnuu/BwH/8Nf8IUffw0IBIOW5SRS\nT3C2f588CTzawcvlRFfc3kI0UIlx8GkdDjp9jHJyZW6VnGFY7cskSUQHVWOIcsCxxdByzj9pCN/z\ndTbui2GdjKWZJTPbDdga2AN49pPdsIj8uYhcIyLXPPjgg092dbMGwQLeGhQsorHo7YRn/QWrfvf3\n5LxzIuoje8PzJZhQMWRtGreGYtIkESglt0z6eU32vWtvXvzgIaxqNdj7gYO5cWhznCVKD2YNOiFh\n1mQi5Km7VyGoUfmSQjuIOZxFVm0ywcX3/4wJVlAKJFx9OzvUqtmX4JlDmFqa1fXUFSNhvHKnDsEc\nNy81NN5LoYFWawmaQ/8bBM3oaCSPSpsz3jLM6Sc8i9efej+pd8lu3BfDEyodMrMVwA+BlwELRHpz\nyq2Be+rX9wDbANTfzwcefpR1/bOZ7W5muy9ZsmQ9hz/7YAiTIpnxxxkD276F0Gwi1sDjCc71nuQm\naUqWNXelmKvWaIeMHTDpoALOVZxw13ZcOr4d5211LSOrbsVJBAqa0qQVjYIu3Vj2GJox1w4GhYki\nd/pE8UAgtRxM5krBAQZoaF2dKKnumCmY6mCuXZa5cfsZ64LcYZRZ6dtAhaPCUoeguYB958WBiclJ\nTNpEoKUVyWbWYK7O6OcHbNOEj515LX/2xVF+8OHN8b5/pmHdsuFLRGRB/XoAOAD4LdloHl4vdhzw\n7fr1JfV76u+vtNnQJjRDqKTDYBImnSAqdH74fBq7fghvJaJ14Yh1a/MEJMeLVOj1iCRKvFOcRrSo\nSBaovHH25Av5yma/5vzbSo6854WsXLBDXUQsRBPUNQgp32hTD7gA0UEjBbyGulZQ8R2j04pYmuCl\n57+JL//uIpLAP/7uYjQZFdrlgZjSEbP6/cbtZzweDJEAYiRR1FpAgeIJzuPF0XEO8UZzwVYkbRCA\nyCDOjW6A0RqdkOtDJyVSBM+APdTdkxkfz2zEuniWWwA/FJFfAb8ALjez7wDvB94rIreQY5JfqZf/\nCrC4/vy9wInTP+zZCqNBC3MwEBuYN5r7LKW15Hg0htogkqt/UVTrFriaGzH6RLCIl9wpUjrjZ/5Z\npDDEd9s78JXRLdhz2SH8fnAHRJSAAgXB2r0RSE2YJWuM6tHhTGmJp1oRaCB85jtf5ucPXc87nvVG\nvEAha9YKrm0gN5on4BOEUZMCG0QUR8qzBqUXZvnIefexdDxx9e0TMP4wkiqis7rjq7kBxmw4c4gr\naeoAF31kN5YuH8VTzvhYZiv6veHTgSl8seos6+yYgSiijvuu3p/5O72Z4jfvwKvRsay3HHztXXYz\n0E5zl44KogFHJAWlocIfPXQERbtNe3CY+eUy5i2/k7s3ez6NFOvO3id2HpNkqjNcohEFE2VCGxy8\n1Sv4+F7vxLvVFF1Ti6rX9lj7eCSyamZ9tMyTZHXLK6liIhQMYUBk/08sx4oFJFcSkpGcw1tippnH\nhQ5RrG4EbfKdvxlkxHfILKJP796Vfm/4TKJXuRxxlkP4+cPEuIf5L7+CsOnbMQLJe5wzCt9ti8vk\nCmIQkmB1mUZsFOy28jBefNee7Ln01QymcebbQwzoSlYVIzy0yY7MK7NH6eSJ1246E6JTmpVDJZca\nvWHrvbnx/pup/CQdm+D3VQ5Dr/1A7U/BHxuCB3OYeqIYCSVKxDGJC8YQHQ44+TZuGfeYj6hEfCpI\nzlOkIovbzSAFeQ79NHHWooieztidvOvMW7DYqJ+OG96hmg3oG8tphEnIXp60EUmYVAwptBhFDLwM\nkmKojU+XzyYhWmR52sYiROcTymH2vOMwjpWbaTU2oRMGUQfLWlvhU6CwSJQCQ/C6FiHG442x/ivk\njjsaglkgWIN/v+9HnH3kx/nt6B185r/O4RlhS8pY1S2Gqwk9+vfOOsBAXSSgFAaFGmaOUjwQKYst\n+Kdzbye0x/Fq+FxzhqOqWw5nbqiJmGlD1EgaOHL3xXzxhJ0AQbXsxyxr9I3ltCKRyVCbROnkKY0l\nnMzDywS2z52UNo+pBGBOAyYR0cDnli7ibx/YksmBAY7d/AYuLp+dqci0JJrPTEEutzoKRse1yBpd\n695rs+Zl7xg1R+UgSUm0kn3PP5a/uvRjfHDPP+dff3ER3ntUldSVSFibv6yPR8IUXEXCEanDMjTo\nuAbLzfOeM39LKwjXPbgIHdwKJJLcJE2NVC4x3ogzdmyVzJblFJSK1Cw56lUL+fy3V2QdJz9YE9n1\n8fQORswwpDZa+cJqkoDgHaaQbJDC30YzrMTRxMh93h1X4mnwkmWv4RcLvsvuKw7noLvbtKUFoU0j\nKSIFKtBMFaVv4Uyz6+HGECtqOrEnjlICA6liwkoKcTjn8VZQYbz0rDfx0z/9N5brOIvcME6EyhIF\nmVk9meI3IpbsJwRxJJSicmgRMWkgqjQERjBufmAe2kigrdq9L/A2yURo0lRHKzJjcY4upZ06aKYG\nz110H0d/8iH+49SdMIk92pG+b9n3LJ8SCIGAJ9SGBZvEuQcx6yDRKEOHMuSMd3BgEtlm/DbMDSII\nu9gtFNLGyzilN5pJa6ahgDOgS2hrzfU2lABNzdN5H4bwUuBqVcGUEr4V2ev8N7FQBnjl/3sroHhZ\n7V+kjaeD9YnDFE9AQ9Wj7ivJceJXfeguJt3mmDj+5nXLgczFiTRxVI+52qcCXXYkRalCmxvv3ZQL\nP7wjA5rqh7/Uaph99I3lUw0RxA/gWcrEZbuxouMJ5RCuiqzyQyTms8cDr+fhKLx02WuZ1MTv3DZE\nhlARnAWSKzG0p7Y3nZeuI1JoThAlKhTFOaEKyguGd8FZ4Cdv+RqmghiUEokYDZGe3HX/VloL4mjr\nKpwEnAbG1KGm7HN6h6KxEHUrMIPTLizpTu5MHIVuuImeGLxul5IqjrLJYCI6j7euZnr/HEPfWM4Y\nxtmFoYM6LDx4GfaqlahuxnjYgeuHduRAvYHJeTsy3FnJAp1kVTEf8Sug2pzKRxKtngcAT90MLUgD\n192OOa5ddQMvuuAwzrz5PEwMVaNQj5c8DZ8O+rKnH7JZacgQKy17bCf83Y2cevFSvCVcUeD9Jpk+\neHDTDTtUgGRYVYHBt65tsP0SrSVua6XKurOsj36d5YyhV4p50zuRnd9Oe9XN7PuDc5GqogrgE0y6\nJs1kVH6S0g+zoFyJSiDRWp2JniFEWnibJFnFYGqgTfjqG7/Im896F1cffx4iWZfGzAguyyv0b6nV\niJbwVGgZePVnVpJkONdSWsJrwOo44YbE1Hv/+x9oMCGeAkdh4ETxUXp0ffD0fSj26yxnGQQYR7Gd\nPk2bnfnfiXm8vniIMgViMsxaDESHiWNhB4rYpJJhSg9Ie8YvVG9tcEpLNFO9mefK3/0XP3v7NzCX\nsqFUpXC+Jwm2saJrcszyw0NVCeIx9bhGIskATV9lDSYdJLkNbCjX9o8MjvvcLbTU8b1r78cJeHPE\nkLuQujJwGzv6xnIGkIP8HYZwONeikAHO+OHZXPSAR2wMBBwl0Vc0bJLRhkPCUpxFglY5kWNFZtGe\nARgCTokMIHgOeMZrcKniSzd+DUlGSJ5SEuIcCUO7sdQNP0l56vAY+zbVkGjK7aZlNF7zyXt5zSnL\n+fgxDdrqQEqSqyji6pXZhogGCmsoN77nj4doTyzndadO8qxtN8Op1soXeRlvM1ghP4vRN5YzgNzU\n2KRCsVThUP7tzedw1dv+nao9wiCBmHKmOxHwprTKBoUqqktQcYhUOJsZrkOp2/SCRSZdg+/d9UOq\n0MbjuWL5L3nJN45m33OO4n57EDOjE7u96YZ2b6wuW9GUG21O2lLL2eyOTMkJ65ovSpsgWUUbcPUd\n9eGv3865H94WpMlJXysxU7w1EIwqdNtIH12C9ilFnUTqtbJaYtmyB1juns/3/lZ47qIcMDIU61Y/\niDBV2G1jRd9YziAKQFyBWEBMWRUiWibaCKmltdStw1mDKA0mg89EwsQZ90C6/Eeh64FoFnT98BWf\nJSXFCsfiOJ+9vvmmmsot0VX+MyBKllAVha6Jl7nG61a3nkov7RUxEpXrGk5HZRXg+L9n3UFTjf1P\nn+R9X72Ng/fejMNPGaeiQfDgauOUNcK7651BQ2lgJkRfZZJf6+DN0V51F1QBF1cRXQsRIWmq99nV\nwmlCvyS7byxnDPm5nJsGhZLkhFvLe2kWFc0oFBUESySXqOqzIpaZgcTcBkmfPFK5MA+s0czm78Lb\nvs/33nQm+339OBKeslMhkr2SBJQiVF5wplRmIJmNe87A5fKomkOISC42d7WEbrRxggXUlM8fvxMS\nHSlWXH/3pnz8GwapQ1emUafEKJzIjB8HldXttSaKs0i0CYoF2/G1nwmfe8dims560/PZkPidbegb\nyxlCpnoVkgRe9M3j6NDmzPP+kW8e92WuOPpcGqMDlAGCCk7zTRrM0Hr6pzPJrPCYMJI4gjjOuPZL\njDBA1ZrgpB98lr2+cxz/+fD1JBwNFRyaFbcFgkSgws8xGTQx8BYJlqfMHqnrDyHYAPfbJA0dYN/T\n7+Y+MYpWC5GKdjWJNVo9o7N2t5O3mTwONakzFU4dTj3qB5DCI2pc8rcL2HXTzFkgIr1/fayJvrGc\nYTjgF286h6YOcvbRX2RJYyEHfelIynmZUKHtsrBD0IThKTTfnG4mmRUeE/km6ugkjdBkj/OOYXh8\nHttusTXtMvLiTXbEoXx/xbVctew6GhaJgFhurCMppLnhXRrQroW8qiR5Vm6eKMrl9/6C+2Ulm9Fi\nv0+PE6Xi5POvotJEkAFaFI+3+hmEZC0nqfkFpEJcJKUmBz9nFDWoCDm2LoL3c+uBNlPoByJmEDXl\nZU2plm+6wiqIimcQv6JDXOypSHjN8hPOFJ1lPdgGeGlieBpElg2t4Bu/+joy4TjgrHcwUUzyk2PP\nyTGvZAQvmctRjOiEMMv2Zypyp3SGAC2rp68+MzwlKamsohpo8tqLjmfe0ki5c+DZfh9+vfJOWun5\nJGmhCy6n1C1oTfzRGpnnDYXkEqEqKBsRSy28VaCRd75+U669TXjhdkrhZ+95mQ3oH50ZRDcHmmgi\nQGHQsiZX/eUFJFOaw4O0yzbRCTZR4izljKTNPmr/Vkw0UwRTCpQkMDLUZOVIxQ/f8hVEApXAPl/+\nU2KKVJKIFgjraDDWK2LWlWzttYWuuRZb6y9MSWzXcLCGZnclmZ9U0zh//p2PcNjZJ/Dr6lY+fNGn\n+N4bzmH5IiUl43q7Cm3eRrXp5zPV2sqDaU7smomA62nt48YB10NT/tHwCNE7crSy3RonlE1So6Ty\nHiYexHC8fJvE0Oy6vGYl+p7lTMGgR0ZhfnWtmwScCK2VEStLwmYNRBxusKCquylMutnp2QFviU4A\np0YSIfpG1m3xEZ8qDvjm20kuoUsdh+52ID5FfrbiNyxqLuDZw9uiUuBZ7cVpXUTjzHIvPY9eBK1E\nLNOT8KhmTqiz8nmJzB8fEUKvTMeIGJ4KoUFW3HR4IgnM4UUyEZDksqcgHsVwvsEvx25m62ohf3HB\nh2gMF7z6nLcgwZG8x6rIpA8EG6FsrqK99V8xdMfZSC0CZ3XZVzcuCHm9gtaEFVrLjaRMy+fW/3yb\nq0AdOEXUYyhIREjERhunBc51uOKUranMkIYnWqTR950eE/2jM1MQaq5L36OFBOuVlJxy+Ac54iWH\nstnSQBCYFEN9JBHXyKTOBqj4XBPqXC4xMcV5T6MyCivoBMXjGFxk7LnTbrzkX9/CHotfyKKREXY9\n4/WUNonqJBYnMXJMUCyztUPt2aU1jaFaLp/KZfCpV7qjAKqgEaxLY9utPlBymqn7LpIIWfEDI9Xa\nR0Ztp3I6imUyzldv/HcEx7UrfoPh2eOCYzhm51dz77x7SNZi0ntCE7xvZE58FQb8CM3lb8bCA5jA\n+PZHkcIk0Rm9R0NtKOnS7CG5ZEyqLD2Bz8utJ6mGUddu4UAFdRUiCfAU2sBJQdF+mEapREt87T/H\nEDMafb/pcdE3lhsA0mMer0tqgBcu3onj9ngD/+fod0OpDK0wBiZ9Lh+ahZnJJI6Oy0Yoollyt7bp\ngx2HiieJ8NEfs07eWgAAIABJREFUngabFOx94bEcesFfs2lrgDvG7yM64bXfeDcrWEGsva0idb3t\nbIgBsCo/XERw5nPLoHkSDrNMcFY5n5eXHBdUssnJRBBaNwUYgicQcShFqnAW8VQki0TnMcum+D+X\nXcslN/4Hf/mtk3nR/Ofw8ouOo7DAv955KT846pv4IjEQFaeri6ua3hPTKCu3fS/tLT5G6GwFroMW\ndxG3PBF1eR+6RfpGxEmFSUmWoPWYWP7rEqyHVAi1BIRKQiWSQgenTZIAKGqBo/eYJBXDfO+D8zEZ\n4Ji9hkGUJLPrgTwb0SfS2MCY2sVRVQkJiSCBd136CX42eiN0p23qwc2dLHJQw0gE9Yw1EhFjJA6S\nbBR8E1fBj4/+f7zqa+/mirf+HYd86Z2c/46/Y1WnYsvGYpzkh8kkMGCCScSZw2ozKAJlPbFOJjQl\nTokJFzU3pNRLWK/CNR/xAFZm/XYDlcTLzj+WjjoGU6TTgJEVg0wWE0jTiJIfBMGapNAmoTSS4Qjo\nH3iQCRXmJrG0hIFlR1Ft8iVat56b90EMRxt0PrgJsKKug6zyBMSK9WvGrsMHKmVvXyH3eSuOAS+c\n/tYWz9w8cewnb+ZrH9qFYBCogPXc5tMAfSKNOYPuFaqEAi694ydUJrx4h+cBCQuKSpozhhLqPRKj\nEwrKoDg1BtVhrk1wDTQqdAIvueitHPqsPQniefXuezOSBnjDhe/gjBvOIYu+ZU55QdjvX/6Uu9My\nINKpY3sNSxREmpLQOpJZ4XG5WImHrV0XyKd6Ai+0MwUIpThecu5hvPjCY9jr/KP4yRH/ynVH/Cud\n0CBEx9j8cTQEPIrHKADnJ3Dqc4xRmkT3h6yLELWJ2Xxwo5QLzkPa25MWnYsW4yS/EtVmNmpmmLRr\nUuccolFX8ciY7LoeeHA6UNOqeVQdScAn4ZCXGd/5yTj/u8zzD+9/Lg2ljj8IJjNPPDzX0DeWGxiy\nxmvjkO32piPK56/+Kp6ApYCb0QLmJ4/SCx3vaaZs6IqacTu6bLCQkomRPDG+6PffJZnyjdu+w+4X\nvIHClxy84568+Bt/zB7nHkK0iiO/+W7MRTYPi7hHJmmirCKBZBGwq5ddV6duPEWtrGnAAmlwR7WK\na0ZvA8v9R00K3vfvJ7PXVw8nMIgfN2Iq2PvfjmGPC44maJuY828kb6gISRzJZSGQ6JShylP5iiI9\n+u1jJLxTIGI2TPQRWr8nDf+U0W1PIG1xBjCIGogYRrNuOvCAx1nB+tyaoh40h27MCswEcaCVkQrh\ngh8b7z58hHd+eYx5oZMF1UxBDJlVdaGzE/1p+CxATvjUT3iyLEslxgFnHctYM6HtitBqYCF2Z1qz\nG5ZTL1G6E2HN02dRMF9Pqclia7XSZXuswfBwhViHZMJug8/nhvH/JYnPTXqSuPCos9iReRx45p/x\nwIKH8CGQTPn2IX+HU2XToS0565df58/+6DgeZoJ3ffX9fPG4T3P4F49jYvPIic97G5/97dnIKsMG\n21SjQ/j5QDKUKme+XUKiIE7qdE/+Z6J1HFUpEjSipwx/6N4xTHICxyQzjntzGBHSAoplf4Lv7Ix0\nFqJ1V1M2VoZKhZMnfpYNq2WYc4Iox3kTokY0oykR66zgvI9uz5BPYJGmhrocwZF8VgjfGLGu0/C+\nsZxFSOSyHHCskpJ9z34TYaBBShUijqLjqJqKmsPNmvbHP4REEun1k4uBidGKDsUoQ26FDJa10sUp\ngYrK8rQ3yny8dSgrB82Ex0jmaK1wVAsrnh125Jd3/RoZ8JSV8qmD383mrfm856rP4XGYM8SyWByA\nSp6apypRFC3+4cCP8s7LP5qVNSlywTmGSMKZ0fGBYAmxrKsugDMjikdFEYygnkYySv+HjIxHRRBL\nCCn/ziIBz8CtF1Mt+XvC8iNw5WKcdACfS4bMsbZwuNWZ/ceGgiVUsoidpooBWcnm81Zxz4rNuOzD\ni0maCBIRbRK94VVAEiJ+bjyInwL0jeWcRC5lcXVxclsEs5JXnHdsTlREhxUJkqcpkdIZUQKFbvhz\nuDZUNLfWmceb0PXTDOgmzoPWVQFqyFrxP7ExTIZ77ytSJhVxhliBAFWq8C6QiBTSACASCZoz9N45\nMJ812dc2NKKIC1jS7PFZwJzW5Tv1InWsM5gSnWRCE8vspPaY3KIOkxKxgLoSpw2UAiHmuYO2CO1t\niQO3MXTbF0kDd1C0nwE2nKf/VHhCXWaUC21NKgSPSgdnA4/cpCVMPEkrxOXSLqpJzMZ535Gbs+WW\nBT+9bhXv2nchSScQBvOxnFLBuzEaSugbyzmHbvqmVyWoUDmtScEihQVe/s2jEfE0tKQTHH6NfvHZ\n4xfkabfrGUVB6ux49/vViM4IKnUsQuBxi7HX3E+jq0LIGgXpsLrgfE3kqfQfgor1NGd0reF0jefj\nH+fuNjyrz+zae5EQa+GlZOj2fwKbR6LK03GbQAhobSCNAnFtnLZq//IxjpFWdWepx6QgSIfvnzjI\nLSscOy909bZrI6mr7fHGnL3oZ8PnELq3v/ReO0QEh9DEM5Aa/Nc911E4w4lizjDJJS+rMTsMJUBX\nnjcoFJrLiGD1Pk5FqOV363nuOqx9bdq4KcZwLW/vkYYSHi/LvNpQ2iOGY3Wi6vHR3cYfrmAI5ZYA\nVJIY3e5dVIM3EDf9J8xFcEJqPIxLLcQczjpAQqWCx2EtV1fkpJoIXkc5Zr/sp263YPW4u6Vq0j0h\nfSuwTugfplmCqby42dh4vNUErC6w85JtWNQeJpCIEhArASE5RR7jpuxj/fBUKxpWzYdQP0kjjaAu\nMrnFpzAc1dCPcm95tRjzJSKx5gdoZPJk13nU9VntuYekmHpKgze9MnLUSxbypcsffPRQzex5vs4J\n9I3lLED3ms086as9BxGhYxUmkS0am3LusV+iHPPElAg6iK9yIbP1PKj+1T+noAOUfpLmxLPJKZwS\na/2Oya0+gjbuqXu8m4h5RJuoFjhrPOqqzGV/8eIPDuFFuOykIb7xkxwDfceBm9LG1W2hGz7sNlfR\nN5azBN0JXncKm8N4jqY0a/MZWBGXw/yKS9/8FS486u9BPGmimBLBWv1K5xIj+UaGXNIDIm0EpT14\nM2YF1cjPKef9CLEGUm1JXPx1Vj7ziFyobop3FVFbj7rOzJaUOPT0SYbi7ZQY3z9piAIlYLQkYs7B\nHyyk7+Px0DeWswhTEyBd7ksFAgUiwjZ+Cf/1xvMY64wxnyH23+Jl+EFH9ShTM2cepw2sbzRnHcQ8\n4jpksosmQrV6UmAFqVgKKOXIj/FaUG75cbxTzAqCm0T9w4CSpFxjvU4c3/2/Lc75yC787PclCerO\neAMXHist1Mc6oG8sZzFc/a8bzyy9Y7/zj2XnxtaYeK566GeElJNAYt12vzqZUpet+NTvzJh9MEyH\nQXJMEkBsdYrP/DhjO/wp6sdIAu2B32PWIBUPYRKhWgI4SIr6NqEm4LzixEG8MwY0sfc2gUY9J8lC\ncY9MWPXxxNA3lrMIUzPiUz/zQEBpJOUnR56H4BkahwMX7Mu3j/xnnhueQzFQczZqmbtQzMAC6mJN\nBdbH7EDu1BJpI1pg5nPxvETEqAvPDXGrkFr3p1kuoBz+GWnwetLgL6l8IqknqOFSg9hw+Ik7OOen\nE3gRbnrYEXzu5AlYbS2lH9J+kujXWc4J5IrBnAztQExIo8mDxRhD1qRJwcsuPArvhNhJpGAUKRB9\npIjNbDD7mEMwkBIsgJtAqm0IVZOy+TCb3vL3rPLDDEZjkdzMfbIDauNccdISgmWKO1fT200VIKs/\ngFlI97eh0a+zfBrBTMksjYY1C2xokFR4NoktIglB8Cs97939BGJhFKnAtJ17k63PJjMnoU1wbdSG\naK54FZ3m/Yzc9w4e2v6vGNZxgkywTLajqJbz7oMG8UwiDpxbLZvsnFtT96dvKJ8U+sZy1sOyzETd\nOeJweWpuQKnMY4i700p+evw3ufO+OxETylDhnNBURZxmpp8+5hAEXJVbJKXDxJKv42iwcot/pPLC\ng9t9lPGBh4lOOHrvRRyy+wiuGsKo2d8fp3C9j/XDOhtLEfEicp2IfKd+v72I/FxEbhGRc0Vyc66I\nNOv3t9Tfb/fUDH1jQbfFQvFoTcqQabVscIAksI0bIppywkuOJnaM1v2Kl4A159G2Am+PVlzUx+zD\naiPntIUzQ3Ue1KxLRsI5x1b3/DUTm36JZc94K2dd92tC2SVvcrjoHulR9jEteCKe5f8Bfjvl/enA\n581sJ2A5cHz9+fHA8vrzz9fL9bFeyES1GVLH6aXOjmd1GW9gqiRzDFVw/THf4sfv+FbuiptYRcPp\nGoH9/i00W7Fmg3aWefC1XpDDrINnAGEVd2zzYd70gh255siL+PF7X0jVGMcVEVHFNfpn+KnCOhlL\nEdkaeC3w5fq9APsBF9SLfBV4ff36sPo99ff7S/8xt55YzQaZSSLI3oMZrqZUEFFcaNKkwhUtbol3\nE7zwkyPOo0PB1Yef+4i1Jq2VBlEMrVmOuiY4oWudrX7JyfQjOlnrOAuFJiqnNYdnppTzeEwVYRij\ng04mWHov79v1L7l+8oYslCZDmTzDe2ya5HT7eCTW1bP8AvC3rJ4nLAZW2OozczewVf16K+AugPr7\nlfXya0BE/lxErhGRax588MH1HP5GgPoQR+lWXcLa7Afy/7d37lF2VfUd//z2PufeO5NJZvImZigg\nVQtaikgtotWq2FWoK1INAUFBWopV12otbS2pbVltlbW0a7nsC0UKIhUwJFhBqkUEtNXKSx4lZhII\nFggEGJIYJsnc19n71z/2vnfuTAIOr7l3kv1Za9bdd58z5/7Oved8z3789u+HYE0J7zyvzpZTqGCM\n4e7T13D8NasAaBTBgVlRrLF4r6FFWg/RwFvZJr0YvBj6OuaFpopn4sVRmJD2N6aEj2v7Y/xPk2M1\njDJ7l2HZiRZlcuOw0uBvT/gLfvDx/6DA84Z5R9PK5ujjUI1IytL4cvFzxVJE3g2MquqPX8oPVtUv\nqeqxqnrs4sWLX8pD719IhkHIWj9VO6Vuiw7RNIJ3nlyCy0gDx22nreNXykeS1/q45IQLaLg6knuc\naWBNBfpCfEkkpLXNHfQ1PdXky/6y0YrC5I2j5IsQqJecepah3uFsDcThbRiMzPJx1p78eW5YdQXH\nLT+SAeaQxYm+XEKiMTvpYZp4OZjOt/tmYIWIPAx8jdD9/gdgSCYeY8PA47H8OHAwQNw+CGx/CW1O\nPAsigs1s27+uLIadOsY/rvgEOqCc/d2/oWwrvModQmZzKBeUAaUc8mkTUtwaVQoTR0tTF/wlR/E4\ngZKD8axM01gybWK9YtRgvJB5IfeKLw9QmH7O/upfskTnkTuJKUgCaYRr5vi5Yqmqq1V1WFUPBU4D\nblHVM4BbgZVxt7OA62L5+vieuP0W7QXP9wMAVW0LpVelWsB8mYehzP+suorTh9/Fd079Mg9v24p1\nBdpsgDqsdyGqeebxotSzeKNOJ85t4nkjGDIPTix9RQNRqNsynhpeLHmjn6JaYbxe5kfvvZK7Vl7N\njWdfhIowzwzFpZGJmebFtNv/HDhPRDYTxiQvjfWXAgtj/XnA+S/OxMR0aa3WEBGMCBUbUqpmxoJk\n/OHxH+Yd15zBVz/099y08irEG1xWoqF1rDYoNZSK8xQiFGKwyV3vJUcJrfaSg7JzeKP0F020sZuy\nqyB+N4jn8P6DWXPqhRgsDk8mFhEXf9/U3e4Gz2s0WFW/B3wvln8KvHEf+9SAU14C2xIvgFbLEsJc\nalMMuW8gYtnT2MM9p1yLSshjU1AwWJ9PXqmxu7YHU81pzFP6C0dDMrz4NBP+EhNSbEDTGkSF3Dn2\nlBRfVzJfxteVZlZw1C8cxSGlg6lR0K9hADl6jiW6RJo628+YvLwtIwe8KWNVmZf3h+Aa2qBqaszb\nNcB3z7mMtY/ezEW3XM744B6sOuoKGI/VfV8eXgxm0iqRMLPrjJ1Sf+AQHixmUiCUQipYrcUVVy7E\npRQLJse4JtZ7arnD+BJ/97bz+eWlh7IkG6JJnTn04SWE59N40LpAeVpZHhMvB+lb30/pbBCaVmRM\nk4MRrMkZ0D5qC+DCOy5mxfDb+NZZl/A/772S1Ud9FCPSDvlGMxzJqMcZR3COj6G/ggcoIaKNThLK\nA6FBKh0n6WPXWMWFMGpSUHZVQMm9YxxD02SoGlxR4CrCrj2eT//qx1HTYPHcOSy1izEYBlwFbXpK\n5BidyF9U9iDexOU6iZkmRR3az/F0PhEnZ0Ycb+6mLx9gN1VKTaWWeXY1d/FP37+Cm3fciceDd1gR\nkBgCTgURxajSNNpOGGZ9iIx0QHUTFQpLCKUmgvETPqkmhsizMXWuF8GULI2iQdZQXF3RecKcgSFu\nPvGSuJ5bKbsSku39JTYBi0aHocRLSYo6dIDTegRO/oEn32ilrIJTT1n7cHlBhX5Ov+w86v3wZ8ec\nzadO+DOMzfBG2repxYc1y1hyL6CKFw1O0RI+ufuP35nBxBijRg3iYy70sEgbQxMxVSA4ode1ER4+\n1QyfOypL53LHyjWsfs0HUB0nw5KR4yzQ2LuFnkMSyi6Txiz3U57ztooNzHBzC04UYR5W63z/nKso\njCdTeMO1qyg9kVEcVJBrgSIoFqXAGaEgo0mDisvCwkxthnXMou1cQvsrClStoeQduToKkfDtqEcJ\n3e1iu8csDCFQpFHCi/LN0/+Jg+xCjv7q+7hiww2c9dqTg9ekGuo4+ryH0sRtudfv2Dvp4Q84klge\niMSbLRNLQwoyDS1Ha3K8CbPot+/cwN0r19IQwWid49eejsOS7TGUfYna0Di+ZCm7nHr0FczjrO2B\ncC+3ksuJF7w4asbQ55s0JMNZh2JYdcQ7+fq2/0Jo8N9nXk5dPHMYogDu+eD1qNbDUlUfUhrPcYbC\nFs/9mDkQvtweJY1ZHqAoilcHYrEhZRZ500EuNDF4QkoD25rM0YJxU+PXLzqT6869iM/cdCl3jq3H\n1mF8riNraCshQgw+q3t9nszAnT69z3n25tmzBxOfPPrbomnAiWOgEKqZkO002MEyztfRvODm936F\nPdRZzCCqOWWEmigWwWgY48zUIyoUxuGBks/SANkMksYsE8+JoljJsF5Aw2JHjRMLeVxOZ9QH53QB\nkYw+cu7+6Dc4OHsFF5z4h5w+fCIYodRUnAWpg6inpIo2J4tlK1RHEVOxtpKwwd4z5zopppzD0Jiy\nPRDSBbdCgLRmjWXSsScb0cp0KShFey6/fVxVRAhBRtq5NaHQJh3pFyFmalccfYWnIhWa1iK50pjn\ncL7KykNO4q73/jvzmMsyFnDOmtWAoyph8idkyNHQtRODClhsmC5Ld2VPkn6WAxQTc/qEAOw2SINM\nCFlZm2RiKAM0C+7Yfh/XPfj9EMdD4Y+v+is+fvwHsaMF31x1aTiegWMHjiLfbZGag2bRFkmvHsRj\noxCFtF2BqQ05mZBDiC5MITJPwMay0SiO4umURxWPSli6Gc7HR7sn9pF2KIoJWuOsIYWNtAOWZFkF\nh2OiRSoUKLevuopxp+juBped/FnyJ+DEJcfjd1vWPX4jDQq8D8Mal6/6LGWx9BFntdWH1TiqNAll\njzyrb2ui+6Ru+IHMvnqj6qNIaCzbUCS4xyihS97EYAnCJl5QHKI5DVPlbWs/iDqPNIVmxeEdWJvR\n3K3YOQUiOUYL1JqQpTC244LwRVETh5IhFBh1FNJPydcpxMauq8PF7TARGzJMWAWRVfHRF7K1nwGK\nIEqAw4TcYBhEFKeCkQLBxrDLgroGlVKJposi7z3W5Pzdb/45n/r65/jW2f/MO6/9faoPV6nMr3D7\nOWuxaoOvJTky5Yt2rom1k0M67buDn5gpptsNT2KZeHbimGYoE2fQw5hgSIRmg4O6ZNQYx0g/Hniq\n+SR7KPjIV1YzWBrgujM/jwJvWPsBKkVBPbP4okkprhQSsbCnhPaN420FVHC2ivE5aBmkBhqTckWB\nczGzq8cEwcajonhnIAvyIz7H0KRhSxhfYNSgUiO40xusVxCoFMqukidXS45F1aNSRgQazlPeCW9/\n9bFc8I6P8e4vf4xPr/gj/nTNhXzvw1dgKOMEbAGSCeo0+PlQIKQ4d7OBNGaZePG0hBLaLVAljHGq\nhFjtTZPRELD0k3sPupuD8oM4Mhvmxt+7gi+cdgGK4TO3XcacXSV+cNo1eBVsXgZf4reXvpPzfvVc\ndKCBWgVVDI6Sy0N33ARxExlvt3DHXRVBcOoxsUvuULxAboBCQ/ddGnhyKkUNLxnOQMilDWV1ZKZB\nRoNq7jhj2ckUtsDthOvfdwnz3SAN9aw9+ULGBzzf2vRDfv1LZ3PTh77EGxccza0fuQpnyuwxUIjg\nLCFKUx6d0JOjyX5HEsvE8yIM51mMF6wWZKFxRubAG6GipRizGyoIK676Yzw5f3LcWdz6uxfzQP0J\n7jxlDVpVfnjGFXxzy4/QBoyPZQw0lnP7qet4dfnV3HLqGrxXxHvA8um3rua3f+k9OFUym1NIQVMa\nhFaiR9RinITUwKbC3avWYJxj99g2tOGoFDVKT9YxY0qB8oV3fJamlvnBKd+gEGHt+m/zo/ddTTbP\ns3nHo/zn+y7iO6d8kQtu/Fd+9IFLuO/DX+e2c/8t+qSCkQyjYd69jGJMEbr0Gprg9gBdI78/k8Qy\n8YLwRmiaCb9KyRxWC5qm1E564QXu/NDVABj6gQFeVfkFLJ47zljDLmnygw9ezqmveRf3fugKbjjj\nc1z50H8woCXKWCpjJZb9bCnfXnU5bzrol/mro8+g6RtUxgxzd+eU6aNe1MA7nDR5+7zj8F4p73R4\n9TjvOedtZ6IWbnv/tTDfccTA4Vy34ov8wa3nc+ySX+Ou7Ru5d+W1ZEOGspb54aq1HL/k9Vz8wBoW\nN4f4t/dcSIkhVAxCicxnYdRTHZk26acIM16SY8TFiPOKpjBq+x1pzDLxvFHA4VFM8BFEaYhFgRLg\ncZg4i63qwpgkClLgNMdoSJSGwLjUmevnoOL5k9v+md855M28ZdkbQJq88eJTWb3iPE5Ydgynr/sE\ndfawo7GT777/aoYkZ4cWDFJiTJoMknHu2k9yT7GBhY0hzjr+FFb84gnUtMFiMweP4oAfPzHCccte\nyw7qzKfEEVe+h43vv45J6TkKhy9ZpBA0K8KKnLhHyzcyjJsqEme14zw6RIekMGeeBHM2kCZ4Ei8r\nCviYiredE6iVJVJjWDEBT0i4ZoOPUhBJnQhg63ExOIeJDuGh+6oxsISIjYKbt+slrkF3anj7F1Zx\ny0euQUT52k/+k9NfexJoECwj8OkNl/HJI8/psNvTjP6NHoNFcQqZQiEFVnPECKrBWUjEYHB4wtBD\nVEGYeGkfV1RSwMlZSBLLxMtKWyyntJ48MXVue0LIU8TpDmk5CbVm2TVEdW8FLA5CGN2W2quAWuLT\n8rMM6V6nncWw5QrVns3fOx7kRGb2iU/0HpwpCBEli/hfwcMztpNpOTftdZC0gHtWkWbDEy8rIQLR\n3qLT8uee2M9gEYQMaEafxnxyRPf2a4wJiYNJbusQJMpO2n/ahna87qtrHFyQOhsNHmMgx0TnnwzR\nsEbIdvxPNvlEO8xNQrk/0tNi6X2aUZxNPJtEmLbETPgdmk63pL2Ok/Hcl+az/+++Pn0q+woiZ2ml\n4uj8n45xTMmSBB7g9LRYGjNhnvd+2uLZGlqYOsTQeu+932tb4sBhJgJ6JPY/ekYspwphS9A66zu7\nX1PFs7VvKx1sOyXsPvZpZUBMJBKJ6dITYlmr1TDGtAVQVTHGtAXNe7/X+86yqjI2NsbGjRsB2pMG\no6OjbcHcVysztS4TicR06QmxhCBemzZtYmRkhM2bN7fFbGRkpC1+Tz/9NBs2bGBkZGRSV3tkZIS5\nc+dSqVTaLcqNGzeyaNEiRkZGGB0dBWjXd+bXTiQSienQEwtYK5UKmzZtAqCvr4/x8XGcczz44INt\n0Wy1GgEWLlzI+vXrsdZijMEY0xbBhx56iP7+fpYtW8b9999PqVRiyZIl7f/v7KInEonEdOmJlmW1\nWm0L2IIFC8iyjMcee6zdHR8ZGcFay+GHH46qsmPHDg499NC2UDrnMMagqhRFQbVaZevWrQwMDFAq\nldi4cWP7+FmWsXHjRh5//PFun3YikZhF9ETLsiViMDFxs2DBAqrVKosWLWJgYAAR4ZFHHmF4eJjx\n8XHq9TrLly9n69atZFmGc8FpuVwuMzg4yLZt21iwYAEiQrVapVQq8dRTT2GMYe7cuSxfvrybp5xI\nJGYZPdGyBDjssMNYunQp1lr6+vrYsmULIsLo6Ch79uwhz3OyLGPHjh1YaymXy2zfvr099ui9Z/Hi\nxSxfvpyxsTFe8YpX8LOf/YxSqcSuXbsYHBwkyzJUleHh4W6fbiKRmGX0hFiqKg888ADbt29nbGyM\n/v5+RIQ5c+a0BbJarXLQQQeRZRnVahXnHIsWLcJ7z8KFCymXyzz99NNs3bqVRYsWMTY2xvDwMI88\n8gilUolt27a1P6s1PppIJBLTpSfEsjXZUq1WaTabzJ8/v91CdM5Rq9X46U9/yqOPPsrSpUsZHx/n\nySefxFqLtZbR0VGazSZLlixhyZIlVCqV9hhmq37evHk0Go3kLpRIJF4QPRFI43Wve52uW7duL0fy\n1jikMYahoSGeeeaZtj9mi9YEj7VhCVx/fz9FUVCr1drbRYRGo8Fhhx3Gli1bOOKII2b4DBOJRK8y\nq6IOicguYDb2jRcB27ptxPMk2TxzzEa7Z6PN8OLsPkRVF/+8nXpiNhzYNB1l7zVE5K7ZZneyeeaY\njXbPRpthZuzuiTHLRCKR6HWSWCYSicQ06BWx/FK3DXiBzEa7k80zx2y0ezbaDDNgd09M8CQSiUSv\n0ysty0Qikehpui6WIvJbIrJJRDaLyPndtqeFiFwmIqMisr6jboGI3CQiD8bX+bFeROQf4zn8r4gc\n0yWbDxZzgNuIAAADqUlEQVSRW0Vkg4j8RET+aJbYXRGRO0Tkvmj338T6w0Tk9mjfGhEpxfpyfL85\nbj+0G3ZHW6yI3CMiN8wimx8WkftF5F4RuSvW9fo1MiQi60Rko4iMiMibZtzmzsjiM/1HSKbyEPBK\nQsrp+4Aju2lTh21vBY4B1nfUfRY4P5bPBz4TyycB3yakcDkOuL1LNi8DjonlucADwJGzwG4BBmI5\nB26P9lwDnBbrvwh8JJY/Cnwxlk8D1nTxOjkPuAq4Ib6fDTY/DCyaUtfr18hXgHNiuQQMzbTNXfmx\nOr6ANwE3drxfDazupk1T7Dt0ilhuApbF8jKCfyjAxcD797Vfl+2/DnjXbLIb6AfuBn6N4GScTb1W\ngBuBN8VyFveTLtg6DNwMvAO4Id6cPW1z/Px9iWXPXiPAIPB/U7+vmba5293w5cCWjvePxbpeZamq\nPhHLTwJLY7nnziN2815PaKX1vN2xO3svMArcROhx7FTVYh+2te2O258BFs6sxQB8HvgEIW8v0YZe\ntxlC1uLviMiPReTcWNfL18hhwNPAl+OQx7+KyBxm2OZui+WsRcMjqyddCURkALgW+LiqjnVu61W7\nVdWp6tGE1tobgV/qsknPiYi8GxhV1R9325YXwFtU9RjgROBjIvLWzo09eI1khCGxL6jq64E9hG53\nm5mwudti+ThwcMf74VjXqzwlIssA4utorO+Z8xCRnCCUV6rq12N1z9vdQlV3ArcSurBDItJakttp\nW9vuuH0Q2D7Dpr4ZWCEiDwNfI3TF/4HethkAVX08vo4C/054OPXyNfIY8Jiq3h7fryOI54za3G2x\nvBN4VZxBLBEGvq/vsk3PxfXAWbF8FmFMsFV/ZpyFOw54pqN7MGOIiACXAiOq+rmOTb1u92IRGYrl\nPsI46whBNFfG3aba3TqflcAtsWUxY6jqalUdVtVDCdftLap6Bj1sM4CIzBGRua0y8JvAenr4GlHV\nJ4EtIvKaWPVOYMOM29yNAeYpg7QnEWZtHwI+2W17Ouy6GngCaBKebL9HGGO6GXgQ+C6wIO4rwL/E\nc7gfOLZLNr+F0BX5X+De+HfSLLD7KOCeaPd64K9j/SuBO4DNwFqgHOsr8f3muP2VXb5WfoOJ2fCe\ntjnad1/8+0nrnpsF18jRwF3xGvkGMH+mbU4reBKJRGIadLsbnkgkErOCJJaJRCIxDZJYJhKJxDRI\nYplIJBLTIIllIpFITIMklolEIjENklgmEonENEhimUgkEtPg/wFaPG/+ZxAINQAAAABJRU5ErkJg\ngg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [] + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "FcLMnSKYPcjA", + "colab_type": "text" + }, + "source": [ + "## Euclidian Distance Transform\n", + "This operation updates the pixel value with the euclidian distance from the foreground pixel to the background one.\n", + "* Note : It takes only binary image and results in transformed image. If a different image is given it results in a image with single value" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "-OMh6oeRQaYQ", + "colab_type": "code", + "outputId": "bba9e240-3427-41b8-f1ec-38f5afdc0356", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 240 + } + }, + "source": [ + "gray = tf.image.convert_image_dtype(xray_img, tf.float32)\n", + "gray = tf.image.rgb_to_grayscale(gray)\n", + "gray = tf.image.convert_image_dtype(gray, tf.uint8)\n", + "gray = tf.expand_dims(gray, 0)\n", + "eucid = tfa.image.euclidean_dist_transform(gray)\n", + "eucid = tf.squeeze(eucid, (0, -1))\n", + "_ = plt.imshow(eucid, cmap='gray')" + ], + "execution_count": 12, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAADfCAYAAAD4Bhh5AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJztfX+ofOld3vPOvfPj3r2hadQua3bp\nKl0pUWpsQ6LEP1LFEoN0ESQkFI26uP6RoEKgblKothJIQZOmWIJfSdCAmqQYyRJC0xgVEcwPE1NN\nso2uuiG7bLIqa9x75/fM2z9mnjPP+cx7Zs78unNm5vPAMDNnzpw5c+be5/28z+f5fN4QY4TD4XA4\njhe1fZ+Aw+FwOHYLJ3qHw+E4cjjROxwOx5HDid7hcDiOHE70DofDceRwonc4HI4jx86IPoTwyhDC\nF0MIj4cQHtnV5zgcDodjMcIufPQhhDMAfwHg+wA8CeBTAF4bY/zC1j/M4XA4HAuxq4j+pQAejzH+\ndYyxD+C9AB7c0Wc5HA6HYwHOd3TcFwL4sjx/EsDLdIcQwsMAHp4+/Tc7Og+Hw+E4ZvxdjPEblu20\nK6JfihjjHQB3ACCE4H0YHA6HY3V8qcxOu5JungJwnzy/d7rN4XA4HLeMXRH9pwA8EEL4phBCA8Br\nADy6o89yOBwOxwLsRLqJMQ5DCG8A8BEAZwDeHWP8/C4+y+FwOByLsRN75con4Rq9w+FwrINPxxhf\nsmwnr4x1OByOI4cTvcPhcBw5nOgdDofjyOFE73A4HEcOJ3qHw+E4cuytMtbh2CUajcatfE6/37+V\nz3E4NoETveOgkCLwWq2G8/P8n3IIAfV6fafnMhgMlg4oqYHABwfHbcOJ3nEQIKEWkfr5+TlCCLlt\nuyb6EAIW1aGkXhsOh3ODgxO/Y9dwondUEpbYSea1Wg1nZ2eo1WbpJRK8JdbBYLCQiBXn5+cYjUal\n9y9zvFqthmazufCcYow54h8OhxiPxwB8AHBsD070jsqh0WgUEryS/Hg8RowRo9EoSdKrEH2MEcPh\ncGtEH2PMzTCA9CxjPB7nBirOEjzyd2wTTvSOSkDJvV6v54gdmJDgeDxGr9fLnisxaySsKEuOJNVt\nkemyXEK9Xs8IPoSQDWDNZhPj8Tg3cCnx6/k58TvKwonesXeQ5Ov1ekb0JL7BYJCRXowRg8Ege58l\n902Ib9ukmTqeJWrC5hh4DYjxeIxarZb7rjbid9J3LIITvWNvIFE1Gg2EENBoNLIonqQ+GAxykbsl\ntEMiuKJzrdVqc7q9Rvsc+EajUW57SmY6pOvhuD040TtuHUURPOWZ6+vrOWI/NgJb9H00Ure5ihAC\nQgi46667sn3G4zHq9XoW5XOmc2zXzLE+nOgdtwoleY3gR6NRFrkPBoOTdp7od6bco1G/RvPn5+dZ\nHoPyTggh5/E/xWvoyMOJ3nFraLVaOYIHgNFohE6ngxjj0Ubvm6BI62dS2sperVYri+aZ12CU3+12\nb/v0HRWBE73jVtBoNDKpptFoYDQaZfo7I3gn+HKw14l2Uzp6zs7O0Gg0MikMmMwCTnmWdOpwonfs\nFCrVXFxcAAC63S6Gw2EWlTrxrA9eO1pPGeFfXFzg7OwsG1yHw2F27xH+6cGJ3rFTaNIVQM5JUwWC\nPxYdW8+fCdzRaJSRPe2aquP7LOp04ETv2AkYyV9cXKBer2MwGKDdblcmktTzGw6HACZkWVSN2mg0\n5l6rortFI3wtQKvX62i1Wlmilt+ZxF+l7+DYPpzoHTsBI3kWPd1mBF/UUdK6WehR5zlqdW7qfVrN\nCsx72df9frtodcBBq9/v5wqxeONzYNaF08n+eOFE79gqNFKu1Wq4ubm51SRgo9FAs9mc62Zpm4dp\nlMvoloVKrMqlxKGDAj3/ALKq3V142LdR9arvY0Uxv8P5+TkuLy8xHo+znEmtVqvEbMuxfTjRO7YK\nRvLAhGhuQxawBUa0GmqHS20eBsxHt7QihhCyPjskb26nZ53b+Hm852cUVfEWoWi/bbU4YHTPAQ2Y\nDXzsq0O4jHOccKJ3bA1qoaRtchefAcwTH7czItfIG5j49ZX4WahFOYYEzcfcVwcADgzNZjMjR+7H\noq9FmveqxM1CKQ5em5K9bbXQaDRygx3gMs6xwonesRWwGIok3+v1tkIWVjdXmYQgCTebzUxiOT8/\nz+yco9EI4/E41xSMBMoCI41qGfkPh8NcFExSbLVaAPIzlvF4jMFggH6/n5GqEiavD4BSA6Beu6ur\nq+xa8LzWibwpy/B47XY7+z60X/L8nOiPC070jo2hkfy2E6+UgkiS7E2vkSmjeGrpJGS+5+zsLOtd\nr+/RAUQJnVE5JRyCchDPQd/HVsJ8z/n5ee4aWJlIYWcpNvLXLp3NZjNzMa0beet35XOeW61Wy/IS\nek6Ow8ZGRB9CeALAcwBGAIYxxpeEEF4A4H0A7gfwBIBXxxif3ew0HVWFRqqdTmejZF6KXEjYjEJJ\n8pa0QwhZEpaE3Gq1cH5+nkXzKaKndJMiehvp87iUher1elbh2+v1ss/S9/B7cbCyJNtoNLIGZfz+\nPCclc+sYoi10HXskfyNNRKssxYFKbaeOw8Y2Ivp/G2P8O3n+CICPxRjfGkJ4ZPr8Z7fwOY6KwUby\nqYU/yh6H90qEjNB5oxUy9TlamEWy5yDBVgAp6UZXryJCCFkPfCvdMNrVJGwZKUZ1f/1+PI+zs7Oc\nDZKRtfYAIihd8bqtG91TbmI0n1p71zX748AupJsHAbxi+vjXAfwBnOiPDqqdU6pZlQwYcTOC5YAB\nzAp+SGgkcFoCLTTS1kZfJC4bZQPIBoNUb3cSvY3oVRq6vr4GMCNxJW313JO07dKCwGzGYp1DvAa2\nCyUdNNfX15mNlUSciv4XgfvZugEmquk82kVS3XG72JToI4D/E0KIAH4lxngHwN0xxqenr38FwN2p\nN4YQHgbw8Iaf79gDtH8NiWBVktfZACN11b2tp10j9BSs/VGJn68teo8OHpRyxuNx7n0q9wB5p42S\nPPvDA/Pe/GXXhKBHP5XvUC1/MBhkg9p4PE4uObgMw+EwWXOgOQy3XR42NiX6744xPhVC+GcAPhpC\n+H/6YowxTgeBOUwHhTsAULSPo5ogSa/rrqGuf3FxkXWz5MIjw+Ewe6wOGkbn1LotaarsQJ2ZUTXl\nm5Sur8VRRK1WywYbu51k3+/3s34yJPVWq5UNSAAyDd9W4MYYcXV1lRtoNKfAmULRtQdm0T0jeV4v\n29KhDLrdbtYUrdlsZte62WzOrc3rZH+Y2IjoY4xPTe+fCSH8DoCXAvhqCOGeGOPTIYR7ADyzhfN0\nVASURdaN5IF8sRJJnElW62m3+6r/PXVc9dDz/vz8vJC0SbKpRG2RdENZieenej+hC5zrjEL1cH4+\nbzoz4Gs8d6vLp6J8vYareu91lsCZAc9Xj+k4TKxN9CGEuwDUYozPTR//OwD/FcCjAF4H4K3T+w9u\n40Qd+4f1gq+TfOVAobbJy8vLTOsvcrow2qW98PLycq7vDElStXo+L4roSag6uJDgiwaHTqcDYGJ1\n1HMDZlZFa+mkzAPMfP3WLcSZDa8RJSBdIJ15DLqQUpq8JotXBe2cALIWETr7AlzGOURsEtHfDeB3\npv9s5wB+M8b4v0MInwLw/hDCQwC+BODVm5+mowpgVN3pdNb+Z1epgTf1iFOX1s9UfV4j4JR+zsja\nRvZFRK9yDDEejzP5SCtjObCwQIrRLgc/bQXM4/C7cSBj62But24hnj+Pq5W3McbsOQckWzymcg6j\nem5f5fextksvpjpsrE30Mca/BvDtie1/D+B7NzkpR/XAKBNYPaJLkQ2jZvWsU+NOFSkx+lc5RuUb\nlT1SSdkUdHCwyUhG0HZwUKImIeuyiLw+vNd1Xnkt7PairpL8LJ6TvofXTfVzTcSqC6fVaq0s42hi\nnOe4rizk2D+8MtaxFOrAWDWiU7kHyEsNJDKN6tm2gLBFSpQ5KJdojxuSk9XobXthYlWip91Rz1sl\nm1qtls1IKM/w/RzMioie30335yxAm6jxuGzPoNdvNBplJN3tdrPrvE4RGxO0mlhWCccj+8OCE72j\nFEg6trJzlfdz6cBlUCnCetdV5iDRW786JRBr37TROd9TluiZl7Akz4GEujv957YrJB04KukwYcsB\nibo8X7euIM4mNMfA8021etgUWn3LAYnfw6P6w4ETvWMpaKe8ubkp9Y+tUg2jYACZdc/aCm1kzf2A\nPHlTjjk7O8sieq6Nyn2BmeOF90yaLiJ6vpfkniL6TqeTs4ECM2ljNBpldkuSur6ftko7AAB5t5BG\n8pw98JjAbGbAiJvHurm52Xphk0o4KbnMq2YPB070joVQO2XZ/UkOWrEZY8ykiUW2QiV+bQWgUbq1\nTtI/b/e1SVwL67wBkMkg+hxAUq7RqlsOJIyqLdFT1kkVb2mzNPv5JH09j+FwmCWuu91uNtNQScf+\nJuuSsW2TwN831bjNUV040TsKYfupLPunZoMuTvW1ba8ej+2E1VbIIh0SCMm81WrNyRvUszWy10FB\ndX2VcKxOn3LdqBNIH2tEDuQjecoZjUZjrkeORvSpAcC6hSjdDIfDLMJXWyaLqbRqtt/vZ4Sfsr2S\nnNdxStk2CSwUo3zk3vrDgBO9YymsnzwFbdCla5WmoklLwDYC1yhfLZWpiN7eK9Fbm2WqmraI6Emu\nWhmqrZI1kmcBFTDzy6s9k5+lRK8avRI9P5fnS6mHxK6RO1tC87i2T70mquv1eukcSQoc2Hjv7REO\nC070jkKsos2ThPkelUNSujyjf3XSsBhKj8PH1NmtVs/3aoWtEqcdLAibwFVvu0b0JFQr16jLhgSt\nRGwdN1r0lIro+bkA0Ov1MBwO0W635+QZvsZtXO+13+/nZl26EIy2U15Xxkm5cFqtVva5rtVXG070\njiTUzVIGKnUAyGm62s5Ao3VG20rEttWBDgxK9Nr3hu+hJKTH52fwOUFyVaLX75Fyr3AAo9YeY8wS\noBrdW3mGUTolDy3C0nMgsVNnJzmrJMPXNIq3ko1tGKcFXZsmbHVxFdfqDwdO9I45qGa+rl+azbVY\nlaoWSW1PzHYGqrczsmeUrhE+I3ur1fN92nuGUo6tEiW02lX72CjZn52dZRF9USRPkqWOr/ZJlVpS\nTh4AOX2dt16vlzWM63Q6GcEzsmYkz/3pledvR787B1wmkTexXqoLBwDuuusu1+oPBE70jkKU8c1b\nGyRvJEadFaj/XXV5asjWI28XErEavT7X/TXatFq97Y9jWyEoydt2DOqXZ/TO89coXxOofK4VwEq2\n1P91XVvtha9RPEldo377G+kMhdeOcs8mdRCEXh8OepsOII7dw4neMQeSRZkkLP/R1dZI8qSOTijx\nWiJPFUWpfGNlnpTNMpXcVQnHylHLiH4wGOR844xeSdyatOVnpSJ62+oBmCV5+Zj7KtlrElbJvoxk\no7kKHm+TVcAUmpjlgOsFVNWGE71jDiTN6+vrpf+4lFcoqajs0mw2c/vavjXsbcP38F4TiXxs96Fk\nw8GkVqvljqezCltABcySseqjZ2QeY8xshHw/2w2wOyX70GsSltG5avxaQKVEf319nSN2S/KUekjw\nKRslpRxgvsc/r1NqZa1NwL8H2kBVEvOkbHXhRO/IgVFvmam4runKxym5hSBxqmdeH6cic02+6n4k\neE3kantiJXrrz9eoXm2RSvTMK1CS0RYH2kVSJRtG9Cmi14VMer3e3PVNJYCtl5/Hs6tOaSRvZ0U8\nL14fm4xdtbulBQnfk7LVhhO9I4N2eyy79qhq64zGNRJvtVrZvrZvjW1QZqN3jVCXRfacTZDo1Tuv\nNkttd6DSDTDz0FuS1x43GsmrjGLdNozcrUNm0epR68BG8vqc35GzEWCWJ+F58Vos6m9fhHa7nZPQ\nKHM52VcPTvSOHNZNrFH+0GX8SP5KrCRcRv+2TYG9pSJ8G7lqRE8pSbV6ew/MtxK2LYY5SJDsGZkz\nateZQMpWyeMy2tdiJZuk1mtjZxs2h8BrUtTmWK8VoYVe+nmUqOw5lJFf7LoCvA6u1VcTTvQOADNb\nHqf3m/yjUi/XSDPVi0YblJ2dneWiUkbmbHCmr6kGfXFxkWtepnIQZQlGr7Qm2qKlVBMzJVKVpBjR\nM1mrtsoU0au+bp1JmjDVm8pMStqUj1R753H0evA66eIhwExm4fs5u7CLlawCJoV5jeyg5qgGnOgd\nAPILX29qwdNjaiSt0o22QEhF9lbr12jVRvgcNFSjZ+KU+ritWLU3ddssuj6MWLXXjUb0trqWMwJL\n8hrVa+GULfbSalS2YdDiLzsQ8aa2zJRDRwcbYlWy5/7qLHJUE070DgAzpw3JsEwDMyUvgn1emPRT\nZwajTeuJ1wZljOzVuaOafJFmz9d47krwizT0VBMzK12l2iVoghPA3CDCSL7X6yUJnIMXidzeA7M1\naanv89rZpG2qGI2OKS244j1/7xTWaXrGmRPX/t00wevYPpzoHTmUieY1aasata56xMpRax0E8itA\n2V40qcKplC6f2ofJUW0XoI4Xa38E0k3MioieEbD155O07fEWyTMpuUaPRd2bcguQXouW2/Waatti\nTQQzsud58libgrMbTe46qgUnekcme5CkyibiSBq8V2+1XVJPbyQmNjUjeVNb19bD1heuXnsSIdsF\n8Bysdq4dIG0ET9iukry3iWSN6NUuqnKTRuup5DKvk2rm/B0A5JYI5GfxO1AqsUTP49Nnz2Zj6sdn\nSwX93Rf91mWTqprQdvdNNeFE7wCAzEZYVp+3ZG8930qkVg/XzySJW11+2Y3EbaFErwSvGrolenvu\nPGeeI+8t0WsES82c0ksq4ZpqzaCSjZ0d6CyBZM/PTV3HVATPWU3KSbWM5FeJznmtbZ2Coxpwondk\nKNPyQGEtduv2O7FJ1FTxj0bLJLOi79Dv93MErxq9JXpL9qsSvRK8NkWzLZMpZ+k9Bwp118QYc/kN\nLdRigjn13SlbWcmGUtY6CfYiHd/CJmU1ae1RfTXgRH/ioK1yXTKwbpJ1QBIjoTOpp5INgFzB0aJB\nhcnYfr+fSTeW6FWTBxa3J7Zedkbstt8Ou1yqW4bkTWmM34WWRNW2dcGSZWvRUm9ngrXX62E0GqHX\n682RvV3paxFsIrXVapVKzmtSlktGus2yOnCidwCYRfOrRmBqcbRyhe0nYwcDPrZWRbUspop6eL5F\nsElZbRBWJqIvSsZSltDCKRvR8xiqsfP7WMuk5izUL8+BhO/TxKxW5Gpzs36/PyfXLIK2bLZLPer1\nPj8v39aAv6PtceTYP5zoTxxMxHY6nZXfpw3KNJmqvnjKHHojYXW73dx6qCqHkHC1N0uqXUEKKlmk\nbJaazFSiT23XwSpVB8BIXhueacK11WplMya2I9DBjN9VnUEkeQ4SwGzNWSZUe71e1rdeLZSM6HWw\n05kayZzfYzgc5pYcBJDT5ldJrBblAhz7x1KiDyG8G8APAHgmxvht020vAPA+APcDeALAq2OMz4bJ\nX8o7ALwKQBvAj8YYP7ObU3dsC4xSV4nmdaquUB3bJiPVggnMIleNuqlB12q1ufPZFtEXVcIu0v61\n4Eu1eWrSGuVrpG+LmEjsOphpiwKSvP0uduUpTTSniqJsLYEWQ2nRFc+Z11ddR7Ylg+NwUSai/zUA\nvwzgPbLtEQAfizG+NYTwyPT5zwL4fgAPTG8vA/DO6b2jwlh3eTmSh5I1I1a9B2b2RdWsbQSvrxXl\nC2yvmhRUw7YunGVEv0ijt7ZKuoW0dbHq8pRiaAvVSJ3fW5Or1NlJ2LRI6mNbBMVFRRjht9vt3HfX\n34lN5GhZ1YGJ35MzACaCKUGtkljlb+mFU9XBUqKPMf5hCOF+s/lBAK+YPv51AH+ACdE/COA9cfKX\n8/EQwvNDCPfEGJ/e1gk7to8y3vll77eLa/Cm7XlJGiTKIm1+UVK4jEbP82FUq1q9Jikt0dtt+plF\nbhsSokbxwKzwjN9RG63RLqmavt50m53paG0CCV0lKSV5+5um2kfYATPGmEs883lZ8PM5S3FUA+tq\n9HcLeX8FwN3Txy8E8GXZ78nptjmiDyE8DODhNT/fsQVoE7N1QTJRx4i1NFLLVtuhEj8dKZwdPPfc\nc8nPKivd8HXVq3mvEs2iZKxGuZpk1oreZrOZRdr8DppUtbMWzkYsuafkGdunRqUZjexZIMV8h5I8\nI2pdK4B5A84yLNGz8Ru3r9JWmX8LHMgd1cHGydgYYwwhrJyBiTHeAXAHANZ5v2M72PQfsshe2e/3\ncxEuI2aSlyZldTawbNApI93w85iQXGSzVFJX8k8RPZOjjN5tdK++c11uj5KIHaTsOdibjfTtgJDy\nyttInoM5JSct2mINBKGVt+vaI3UxGkd1sC7Rf5WSTAjhHgDPTLc/BeA+2e/e6TZHxUD3xaZQzZu+\n7l6vl2uyRQ2bmrAt+R8MBqjVaqWcP2WJHkBWOKUOFGCW4LR6vMo3PL4OZLbrpurt6vVP5R9sF8si\nsrfkriSfWl4wFeUDyC3HeHl5mStE01bPHJz4OY1GA6PRaO0FUrSITuUb1+n3i3WJ/lEArwPw1un9\nB2X7G0II78UkCfs11+ePH9osixEsiYtEYvvfqOtkFQ//KkSvEb21VzJ3YF02VtLRiJ5OFH4+yVyl\nGr7H2ijH4/Ec0StSUlTK12+vmQ4ChC4taFeAsu2deW9nDNuAR/XVQRl75W9hknj9+hDCkwB+DhOC\nf38I4SEAXwLw6unuH8bEWvk4JvbKH9vBOTsqBEZwJBobsZL8FnW0JOGWIZhViJ6Dil3yzyaJiVRC\n1hK9tVhSEuLnMbmsg5omNPn+ZSRY1o9uB0rmQFKJV31NX+dCJMPhbLFxQvMT3tLgcFHGdfPagpe+\nN7FvBPD6TU/KcVjgPz8lhBBCrqc8MB/RK9H3ej10u92l5EbCAcoV55CEVcLh+/SxItVLRgulWGSk\nFkpth8DvxNe14MoeSwuTUlXDOgMgbAUyC9O4EhePTQslHzMJq72D2AG02WxmC5ar35/7cruT/OHC\nK2MdWwGjSS6ZxwZl1i5ofevsz8Jti7AO0VO60dYAy6QbPbbaP3lM5ho4iNmqV5VsSI5adaqdLy2R\n28S2TQan+tjTscSBRZukWclGK5iZkNUBRYvcNJnuOGw40Z8orFtmG9Ny7ZlSZH8kuTNxy0TsMrsk\nsB7RU9LggMJt2qvenp8lehIfSVMlHEbySvwkXntsra5VB4ytIObNRtf8DB6f140JYBI9i7lI7Izk\nU+vunp+fe/OxE4AT/YmCER5JZFv/7EVLDALIRdMAMqJSTX8R1iV6dd1o90cOOgqVlXiONrrlY9uX\nhsdj0Zeen0bl/L7c17Y0Loqu7epdwKxgy8o7qVbPGtFrrx67aLomeVeF2ivL5hkcu4cT/YmCJNVq\ntQDMt6fdFCQKetiVnNrtNmq1WmYBXIVUeIyyLZVJ6ozoUz50RRHRq1WU34+ET+mGzqNarTZX9JVq\no6CrR+miI+qSAfKdJlVGYjM167pREq/VarlIvtFo5KyXLCrTOgfbGXPVFtY8d9f0qwMn+hOFRnDa\ngGxTCce6cLSBllooSXbAvC5eBI0SbcS86Hvy+NTqbWsBBQlPid5KM1pZa9sqaO94e+6pNgpWyrG9\ncPicko02UyN4HP0sHUisNq9J8mVY1f7qqCac6B2o1+tZ1Efi25TsgRlpkriur6+zSJ5Ewza/ZYle\nI/qy0oC2QNDWAnTf2OOovKTyB3MZlCesnZLVpBolE9rVUyNubYrGRVZ4fM4UGMWrnZMJX+3jY68T\n73UtXm2B0O12S7t6vEHZYcOJ3gEgX74/GAw2juytTk83jkafWrKvcsmiY5I8tTvjMiipa/+YIqLn\ndj4GZi18bbUqI20tbrIJXb7ffn/ux+/F7babJ++B+eUFeY5WgmJEz4Sx1en5e5dx9WyrgMqxPzjR\nOwAg16oAQFZSvy4YQXe73SzRq90bSfK1Wm2ukVYRGAnz/FYlepVtOp1ObpsFv/vV1VVG0jZhSfD5\nzc3NXCGYnrv652k/JZHqUoOtVmuuqpazLUb4lIhShV/WxcPPosuGywPa6H1Te+WiRLxjv3CidwCY\nVZpu6x+UWr02KVN9XaNIXdi6CFqdymMsc+rwu1hdnoMQj7UtOUJnJkVEb6tr1eLJQVblIq0uVhmH\nMwA+XkT0uuIXH9vGbtty3WhuwFEdONE7ACCn9eoqUJsgtUIUMOsJA8z65CxrgaBRNVAugcvPU4LX\nCHiberN+jxRJ8vxJ7Er01klkiV/tqJo/UNlHP8/KL0yIU6vXAZKPbWJZyb7szEk/34utqgUnegdG\no1G2MlGMmy1CYmGPFWPM7JXUm5lUXNSiWBOMPM6yhKzdd9UIXpuDaXGT+vkBZNo/XT2pc7NJWMox\nGsXb+8vLS7RarSzxy8S2TcIWRfT8XNt2QZPfdrC0kT0Hfu2M6Tg8ONE7cgtb7NpGpy0BtBcMyaQI\nmojl+5fp9LYB2jrfiw6XVLdN/U6sGdAOkLoPByntj6OfkbpnopgDotoytVBLk8fqiedzfayyjW3g\n5jheONGfKBid6pqjwG7tczw29fter5cV9Kw6k6ArqAq2v263i+FwiE6nk2vxYIme+QgtErOLlnAQ\n0O0XFxe5ZLBW+XJw0T4++rm2pYJaOrUgimvOrqvNO6oNJ/oThEoS/Ge/baLUvjjr5AO08IrHsa8t\nwrIBQlv6qu0wJd1oXsP69Al1uQDzq1Hpve3Do9v19yoi+pStU2cDXEVKdXgOIPpd1tHm3XFTTTjR\nnxiUwDSS3xc2HWC63W5ubdSyJN9sNpM5AR6LVkRt96sLfGuPIEbDXL+V15WyCPVxq8PX6/WMxFVH\n133UQ09bKGcP/Bwl5lQ0rhWy2maZRM/3c6bA4zLSX2cg5uDhqAac6E8Q+4zkU9hW10xGrkUgiacs\niXYfLSyyj23Rk42EKYcwutambalePRxwGMnbffT68Pj6WXZxFQueM4BcYZbKPLbQi89X+TtJvXcb\n7i3H5nCiPyEwkgWAXq+Xk0+qQPibgtG9zloU2umRlbopNBoNXFxc4OzsLIvotd0vWxWQABn50pky\nGAxyLqYQQnYcIG+f5GNG4insVZyQAAAgAElEQVTNXmddHEQ0srcN26xGz4Gp2Wzm+va02+255LFN\n0hYNiMvgPXKqBSf6E8P5+XkuQiMpHgvZ6/eylkzaC7XwqoiIdDFtG9mTOBmBq4tF5RBG9TroWI88\nMIvotTWE3ius5TEV0VuNnjKN6vQAcrZWm8xdxz/vqC6c6E8EGumyOReAQq360KHLG7K9wGAwyMia\ncsqiwY19YhjBa7tfbetLfVtljpRVVUleWxnoa6rZA/O2UvrgbV6Azp8iomfLBBarcRbQ7/ez2YHK\nQuv45zUZ6/p8teBEfwJQgteWtwCOenqtbRgY0QJYKNloky8LDhJFvdat3KH7sO2Ctmbme7S/kN6T\n9C3R8/1WrycxW+mGsLMJrRa2x1pWdZyCavSOasGJ/kSg8gMwqxo9RoJX6Fq2i5Y4JLTro1oqlw0S\nZc+DBViarC2SdainK3gujLpVJiLRq9tHI2z9HG2vrAOFSkGeSD0eONEfMXTJuIuLi0yGsEk320cG\nmPVVOZaBINVZkRKWlVfUlaOtAjYFE8A8nrYLtiRfr9eT/Xx0VSqVVlTC0d+VFb2UbNj3ZjweZ9ZJ\n6vR6HD5f5fd36aa6cKI/cmgnRCYYtUSe//gA5rRd9qUvwiENApqcVWeLOlqWRf3bqBrl4JlqgaAr\nWHE/W+mq+r5Nnmokrv59kry+VwcL68Nf1o7CcXhwoj9isAKWqxbRNlir1dDr9TIXyng8zgiOoAUx\nRXiM+A7JqTMcDnHXXXdl1+T6+rrQH24TrQT3LbsMXwr6eSqPpBK0RUSvq3LxfLX1AiNx/qbtdjsb\n0GOMuapgm4xlInnT39Wln2rBif5IwQhWfdRaHTkej7Mp9mg0KpxuF23bdpfLXSIVqVNisK0QtIUw\n9W8uAqKR86b91jVRrMlRXVuXRJ9qpaDumkWzDM4erBxF371G8VrotQ60h/4xJ/kPEUuJPoTwbgA/\nAOCZGOO3Tbf9PICfAPC3093eHGP88PS1NwF4CMAIwE/FGD+yg/N2LABXRdLyfY3qGd0xekxJEkVV\nltoD5ZATulo8lHLHqD2ToEURSCdlNflZpjZBZwh2qcJer5fp7Xq9+XtqiwU9T00ca00BvxcjfOv3\n563b7a53QeUaOKqHMhH9rwH4ZQDvMdvfHmP8Rd0QQngRgNcA+FYA3wjgd0MI3xJj9HZ4twRG8tqX\nhbfUYhRAmtS1zJ9gks8W+xyShENwgLNgpK3RO1v7aksCzo5Szc5oYV10TTT3YS2d1gHDwUVbKdgF\n1bV9gp2FcZbCe3XmWFul4zixlOhjjH8YQri/5PEeBPDeGGMPwN+EEB4H8FIAf7z2GTpKgdG6RvIa\n0bN8n5G9umq0UyKAXAtcSyaUeXS5uKr0zFmGwWCQEaqucmVh2x8z8gUmAyCJXl1N4/G4cHZkoe9T\n2ytv1kmj0Tp/U+rq/G1st0kdfLkmLwd3/o7AZAbD9srb+g3X8eA7dotNNPo3hBB+BMCfAHhjjPFZ\nAC8E8HHZ58nptjmEEB4G8PAGn++YQolDdXhbvq/Pad+jZECXDTsXplwfttxe9e6qR/VlffSERsGE\nLhXIQW7VxKz+VratAp93u91cQlibtqkd0+rpy64/dXObKN22ll4k+zn2h3WJ/p0AfgFAnN7/EoAf\nX+UAMcY7AO4AQAjBh/81wUZlGsGTRDSi1/J9SjIkeW3KpS1wrXTAfbU3ClFlkk/B6ukWjIIB5AZG\nzmQo57A2oSw482o2m9lxWq0WarUaWq1W4ftUz7ckWubac6DjgK7v2cZvZ9c4OKRk/SlgLaKPMX6V\nj0MIvwrgQ9OnTwG4T3a9d7rNsQNol8ailrqpZlyNRgOdTieXhLNtdpXotRRfI8xD+ke27Q1I8ClN\nm0h1uFRnDJOoqxA9wdkXaxsY0esCJdymeRCe08XFBYDVcySaXN7276fuHke1sJZHLIRwjzz9QQCf\nmz5+FMBrQgjNEMI3AXgAwCc3O0VHEVI6LzX4FMnztVSBDeUa2zdFfdnrVkxWEbpo9iLoLEYblul1\nWrWISgvW9HfhwK0JXa2eBeZzBmXBnMNt5FM8sVs9lLFX/haAVwD4+hDCkwB+DsArQggvxkS6eQLA\nTwJAjPHzIYT3A/gCgCGA17vjZvsgIbCtAROt9jkHAcoDarPT7ocq12jRzc3NzdF5oSkp2F4wRVGx\nbtOOn5QqrI6/DDyezgyAWWuDRqORyWvMoeji4N1uNym7LFsa8TYH5221jHBsD2VcN69NbH7Xgv3f\nAuAtm5yUYzGKIvmiyJ5yDRN9uthEqj0tE63HSvK0KapdsazODSCX1OV12rSACpjJKpRwbJ8aYJY4\ntee7z2S49hFyVBNeGXtgYBuDRZG8Jl5J9K1WK0fuSvL9fj+L8DXZekxQcmw0Guj1eqjVari8vFz5\nWHTk0K65aaWsWigV/P2AWRKW+rddOIZRPyN7Jstvg/z5+Rxsju1v5xjgRH9AKNJwU4lY9cszaWhL\n6TUiTTW0OqZoXmEj81XtoXrtVk08anEVI+CiBdr5W5PstdOogr+3JkK1Kd0uf0d122ijtWP92zlU\nONEfGJTcmcBLRfLczn/CIjIhlLhO5Z+UDpZFxVOrYpltk9AVnoB0TyEmjLUnjerfaq3lwM/WCOxS\nuovfkrMGW6BHJ9ep/P0cEpzoDxjaqCwVyTOKL5MYK5OYPEYwOb2NdXN1kZIi26auNKVRfWpfjZJ1\nkZHU5/b7/axqVxPN28gdKKylV7+ru22qCyf6Awb/qeic4bYYI87OzjAcDrNIjxq9+rOZ8OM/LV8D\nkDk8Uljm8Dgk8DssKlZaBjpo2AohxphbmzflkuHgSyJODcbaD2eR5n5zcwNg4s3nLGHROgKrIkXu\n6v/XRcndbVNNONEfILTrIDAjfD7XQh8Sjy6Pp9GetkwgFhURUS46BpJXrOOc0UibbSRssVORZFbW\nFpl6jz7XlcE4wGzj99GBQls2kNy5jb3xve1BteFEf0DgPy/b1QLzqxPRwcFIi5EX92HUyX/kVquV\nRWPUV3nMq6ur3OcCs396RqvaCkFJxy5uUXUwCi5DktbxQpLTWQEH3TL+9mWfswicdfG3WAWWzHWw\n59+YynlK7pwxagGdJ2GrCyf6A4NG68sieXVhqCSjC0TwvTow8B8+tRgJC4S0oZYSH8mjXq8fXITX\n7/ezRHYZwtKVnfT6UtrQKH/XsB03y0DJXYmdzxm5s2hLO3aqJs9Zo5N8deFEf4DQhB4wI3yN9Nkr\nnWDb4cvLyywi02IcTarxH54Rui2E4cIVKSmBZH+oDp5VolKuPqUSju1Tv0gG2zbKnrcOxhq1a+tp\n7a3PvysW3tm1Zj0JW3040R8g9B9a+6STeGi54+uW/Gm9Y0SvTcy0FXGn08mRlMo0VmPWQp1DI3dF\n2XPX2RNthiyc0vqEKpGgbZGs9Rcke5Vv1HrKQYAWSvZHYrHdIf/mpwAn+gOGRva6xJ2VdvjYkpEt\nlLIrG6XWD7X/0IxaU4PAsSNVOKV+dyX8fcL63huNRs41w9dtTYGdITJ/45LN4cGJ/oChVj16qdXT\nDOSrJmmnBJDsd0OSZ9RWRsbYdI3RQ0MqYcvIvdvtol6v76WVRMpOqZZIlV+s3s4kcoroAcytUcAE\nLFemclQfTvRHAGrjRXZLG8kXtdW1UahHavPQ1Z6szVXXl72tdgBWOlOQ3NX7biN5DgaqxRO1Wg3t\ndjvZ+M5bHRwWnOiPBLYQh7orp9ectlPCScGW71vy8H/qCex10GvNm1oPtwmVWfi5SuYK/o5qidRI\nvtls5iJ8Oq5CCOh0OpltktG7jez97+Fw4ER/ZEgVS5F8bBMuWzil5fvAfFm+/2Mvxy4dNmpfJcHb\nKlWF9cBzf6vRaxEUFw+nBp/K46RyN45qw4n+CGGThOr35q3ZbGZWSo34dZBQCSLVRtcxD9Wxt6HP\n2w6kFxcXc4OyknmK7HW1MUb4NrJnzcT19XVuQRqVa9Rp4zgsONEfMUjW1JP1xsZXuv6pOkf0n12T\nu2Vgk5XrluSrdHQo5LKO26ao0MnaIbXYTe2QStpWvlHpRo9BjZ4RvS46k1qQhn8zjsOEE/0Rwpbo\nn52dzdkpr6+vs+eM6ohOp5NbUnARyVrNGEBuAYzU87JgRSZnHMRtLqqxKlaN5tlq2G4DMBd5k5SV\n7DVK14InIpWE1UXNQwjZgK7rAtsFabwF8WHDif5EoGuUMmIvitLK9lQH5pOCfJ/NBawTDZLotTqX\nM5HbWFRjHaziWlJZhu8FUGiHtESvfYe0J71CfxcSux5XZ2zaEtk+d13+sOFEf8RQbV6jeQBZE6pN\n/nl1dSESkfbBJzYlel2qT7tMHnLeQGUZetc5mOn1tMlSq6un9rVQYlf3DYDMbqs5Gc6YbtMm6tgt\nnOiPFLYnuV0vtdvtJoleozurMzOC5opWurqQRpbbBPvm8zxIQtpTvyoEpIVSy8BBkmv9audPJW4l\n9/Pz87kukjqbIpmrfVZdVfqe8XicJV4pyQwGA/R6vTkZxyWbw4cT/RGDnnnb8kB72iiR64pUtr84\nCcZG8bpWra2s3AYoN3FGUKvVsgVVGOlXRcIpuxyjOpxIvCRfIL9WrNohU1WtJHhdbUydN3bdAb6W\nst5qDYA+rkqvHsf6cKI/YqR6sXS73Sy5Zv+JSfSDwSCnM9O/rZZMuxg5bX+7JnoW8vC8GXlWgezL\nLMfI2RBJVyN1zoYs0Rdp9KrVW0eNnpMebzQa4ebmJku0WgslB3iN8l2bP3w40R8xqJfr2qQarVuZ\ngURf5GrRToa6BKElnm3CSje9Xm+uDTBQDb2+TETP66oRPQcyum/UI6/krdo8720xVBHR6zlqFK/R\nO4neW2EcH5zojxgkYiVj/mOT6K+vr7P9U1EoI3nVgJVcWq1Wtr1er2cEtC1QoiHhkAybzWYmSbEV\n8z5IiTJM2e+8bD1X1ex1QLWFTyql6YCXsllS7rq5uZkrgLI2WtXlqzB4OraDpUQfQrgPwHsA3A0g\nArgTY3xHCOEFAN4H4H4ATwB4dYzx2TD5i38HgFcBaAP40RjjZ3Zz+g4L61/XG3vWF6GoYEc1Ym2G\nZTV63mx15iYgwfV6vRzxab/91OIotwkbRa/yHm0/YXvT2EQrI3nbvkBXgFIXDtHv9+eKoayjxkb1\n3oL4uFAmoh8CeGOM8TMhhOcB+HQI4aMAfhTAx2KMbw0hPALgEQA/C+D7ATwwvb0MwDun944dg1q6\n9kMhQdBGpwQdY8TV1VWyKEojed7TYcPl9tQeSALivtsiXvZe4WN+B11VyzZiu21y0tW4ltlIdZBM\nyV/c1mg0sshepaqiPjW85iT8Wq2Wi9A1YlfZjs+1x7xG+Y7jwFKijzE+DeDp6ePnQgiPAXghgAcB\nvGK6268D+ANMiP5BAO+Jk7/4j4cQnh9CuGd6HMeOMRwOc+u1MvJlwZQSDPdPdaokwSgJpYp1mES0\n+2yT6GOMmZatbhsleJLePuUGbR2QAgdhXbbPNpYrslVai6SVbmxE3+v1clG8bU5mI3x93ZOvx4eV\nNPoQwv0AvgPAJwDcLeT9FUykHWAyCHxZ3vbkdJsT/Y5hWxUDs2pLFkhdXl5mC02QFLnGq4IygY3k\n9blu474acW4Dg8Ega68MIHOOsGKWWj2lin1668v0uNFZlw6MGpmrPMV9Sf42kudrJHjmX1I+eI3o\nU+0OUjMAx3GgNNGHEK4A/DaAn4kx/qNGbDHGGEJYqfQxhPAwgIdXeY+jHLQ4il563rOBGaUbEkaK\n6FORupKSJmZTBKTHWrcylsSpWnZKqwdmLqNDgHUr8TtpdM8bE9zWR5/y1bfb7blKV6vNL2t34L75\n40Mpog8h1DEh+d+IMX5guvmrlGRCCPcAeGa6/SkA98nb751uyyHGeAfAnenxvS3eDqArT5EYqCOT\nIFN96oE80ReRvGrMup0R/qZgktVq2iR+fjaAwlWzqgBt/KY5Bb22+v1SSVrOXPh6KlGuersle2un\nXNTuwJOwx4cyrpsA4F0AHosxvk1eehTA6wC8dXr/Qdn+hhDCezFJwn7N9fn9gWRvyfzi4iLT84ui\nbRKOlWyYjE3JOpqUVYvfqudstWJtp6xSzqHAJrZVomm1WllCm4OmtkjgINpsNtFqtXLEz0GERVC8\nsW+8Fj/Z9gbe7uB0UCaifzmAHwbw5yGEz063vRkTgn9/COEhAF8C8Orpax/GxFr5OCb2yh/b6hk7\nVgIjP+v11qRfkaasZKRask3AprTmdWyWSkwp2UF931VMGC6Tp9SOqlF8So7SxHkqQasNyrRnfCrR\nyntbIGXbHbhsc7wo47r5IwBFwuf3JvaPAF6/4Xk5tgQb0fOfu9frodls4urqaiHRs/pSE4OMQDWi\n18ie0oJtsFV0fmzLoPckKo0+dR+2cKgS4RclMfU3AGaEzwie14rP7b1G92q9DCFkRVAcAFPJV14z\nXsNUgRSfeyXsccIrY48cjACV5JmUpU2xyCFjnR7WKZK62X0XRfXtdjvXOMt6ukletopT9yM5Afk1\ncKvQ+4bQ6tnUwLcostdErY3wbSGULXpadHNt/rTgRH/k0H9cdn0MIeTkkLOzMzzvec+be69WaoYQ\nsmh9UUTPe2rRdhAZj8dZFMoI00brXPGIMw8dCHjTyF6J6jb7s6hNctk+akm1Gj1/E5W/UpE+t3EQ\n00jcRuj2mvV6vbnr7dr86cCJ/kSQ6mRpkSqasr3Oizoo2qiebhA9Zkp71wEnFdmrLGG1ZBv9L/pu\nu0SZpLPV51MFU7ZlBSN42/dGo/kibd5G+KyM1mvnrYhPB070jgy0XbKgipGmVsKqs2ZRRK+R/bIC\nHnWKMFonkTFyJ9EzYasRv22nW+XIVAdDTWzbNghK7uq+aTabc+0LUlq76vF2EOU+qWvrOE440Z8I\nUi2LCUZ9WppPucYSjvXQFzlveG+XqlOnh40+U+4avdlo9NDb6WrfIVsoVdTwzC4PuUiDt9tsi2Kd\nERzatXOsBif6E4H952ZrAbZB0MZh7H9zdXWVJXO1ClMjetsmwd6zUjMlxSyL8Pv9fuauUd0+xjgX\nsQKTRT2qRlrU2VWqUQIH5mUd21qaCW26kIokLtXduS2lzdvBskrXy7EbONGfEBjlsTGYPtfl5DRS\n5oIYWpVpq2KLInoAc21wi9riFpXop0hNo1qVHRb1ed8n1E1j2xtQHiPpW40eQC4/oW0hUteyqK2B\na/OnDSf6EwGJUP/xSfBKFOyBozc6QizBa2I2dd/v9+eOpSSlCcNFJfpK9IPBIFvukO/TiHTdatxN\nkfLzW1ulSmGpBKx9rlG3ldP0OmnC1V7LlCymyesq1SE4dgcn+hMCyZ4+eiUFdavYyJ5oNpu5bora\nf573tF6mosyUKyRF5qnOipQdOp1O4XKH+4zqtQMksNxWmXIusbmcVghrh0pel3q9Pid/8bmdFaU6\nUzKB7R0qTwdO9I6VoX3SiyL6Xq+XDRJ6n5KGNNLXxykJYpm7pkrEpdJYSr5RvR6YnLvuC8yatbGZ\nG7cvk7/sbMmLo04bTvQnBCYGaZe0rg7CyggKbqM1UG2ULKYCkBG9vQGYI3clHxuNagXsMZTok/hZ\noXx2doZ2u52L8pkMZ/J7OBxmEf94PM7cTCpnWQulrx7lUDjRnwjsEnbLEoOpG2Fb7VrNvmzTsbLF\nTTowHBJBqSdek59MhpPwAWT32g+HK2tpApZJbkb23L8owreFU67Jnyac6E8E1Iy1NQG3WV+8XQTD\nVmnam2rzSlTLUHaREB1YqMEfCuF3Op3MuspoHECms/d6veyak/ytjs8oXN97cXFR6LzRGdGiBmaO\n04ET/QlA3R+2Ta4t2lEyV704JfXYaL9Wq2WaMjBP5Cli18GESPnNSYBMxFapaVkR+v1+NuCppbFW\nq2V1DIzc2WBOHTMAsqieoJyTKhjTGZLaZa0s5jg9ONGfCLSMvl6vZwuFaBm+Ns/Sm430UzcAme6c\nshTq46KbRrMkdXUJURYajUa4ubmpdHSv7hvVxCndXF9f5zptMnLXegVG+FzykYMzk7Lav0Ylm5Qk\ndqjyl2M7cKI/MdhWuErkKWdIkVyj5K2+dv0cK/3YGYDOBGz+gJEoCZ+R7KGV7adW+Or1egDy35uw\nET6QH0BVmlHtflGdwj4avTmqBSf6Ewe1YBvNq36fivCBiTzQbreTPe2LInYOMCRxew8gq8Rl9Mrt\nvV6vsj3ny+Lm5iYrQuM1GY9na+BqhA8gJ+mwoA1Aptlbiaao0ti2i3CcFpzojxxFSwkWQaWDVJvc\nEELm92bEWLS4yDKpRo+rco22ZCCx6bG47yGRPGc9fMzZE6UUJXxtfQAg13DOJl/LfrZG+47TgxP9\nkaNocfAyIMkDyBKJmthTnTh17GW6PAcLJW+bfGS0r+9hz5dDiuqZfAVmRK+DGKNxzXloAlcHVhvd\nOxzL4H8pJw5N6JFMSL7sGAkgt49G/Jo8TUFnE7Rf8p7JVGrQOnjQscLeNtyXUe7FxQXq9Tqur69v\n4SptBtoZNXnMJC0lGHVEAcgROeUXXiPvU+NYFU70JwZL7KmWBCR366YBkJEMBwPtN59CyouvchDJ\nTaUNvmaLiziwsMnXvmFtocuQ6suj2zgrWqXhnMNRBk70JwJKByRtJVHedKFtjSaBSSKUJG2rbJlE\ntEh1bQSQs0zSSaMyDgkfmFWMat98atbL5BueZ7fb3eKVnGFTRwvPmfdXV1drN5xzOBbBif4EUEan\npz1SNXKSLCNXujy4jW4bEnYKRcVPGqGrNq+aPQcja+csm5TV87ttHb+oi2bRgATkB8YyoL3SI3zH\nMjjRnyC0mViv18t1WaRNUrsmchbAVglK1NxWRE5FRM/j8fgAss9Wd0q/38/lAeygsAjcn771bYP2\n06LXlOxVEgPyLZW1LUWqdbFtScFrz2Pa1sS2iMrhcKI/IdhSe/qrGZEzImRykKs5qSZOrZgDA7ct\nInpg5uDRlgpqM+SN52fdOXbGYGWhVJWs5hZ2hUXH10IvzoS0hYPOaOzCLprwLio6sy4c9dNbfd8T\nt6cNJ/oTAQmQ0XKn08nkES1YIjFRC9foXcnJeuAttOqVxGR752jEymiWvnw9PiUcEqBWzKobx5LZ\neDzOFtnYJVJaPZ02jNYvLy9zLRx0kCPBt1ot1Gq1XHsKLVKz7Sg0gtd2xHYlKbsAieP0sJToQwj3\nAXgPgLsBRAB3YozvCCH8PICfAPC3013fHGP88PQ9bwLwEIARgJ+KMX5kB+fuWBEq0WhSk4VJjDw1\nytde6ACyfUjE7MKYAqNQAFm0rvKD+vFttJrS5HWQUE86I1lLYhpJ7xKpZmE8F61D4KDE787vpi2e\ndUCzUb69FlafL1psxNsgOMpE9EMAb4wxfiaE8DwAnw4hfHT62ttjjL+oO4cQXgTgNQC+FcA3Avjd\nEMK3xBhdLNwjqAkzwuXydGwWptG9DgbWBkliUt93EaGq/GLdNyQ23WZzACl9WmEHBiC/+hW/d5Vw\n11135a6VzpS0uRwje9uegnmBZV0rVcbRdsWO08RSoo8xPg3g6enj50IIjwF44YK3PAjgvTHGHoC/\nCSE8DuClAP54C+fr2AA2ylSphklPIO/AIWnyOe1/lE6svq/QCBRARtgcTFS+4HPuY288XirC1947\nHIjOz89zNs1dgueQSvraOoUQAprNZva6Dpgk/KIIX1tFOByrYKW/mBDC/QC+A8AnALwcwBtCCD8C\n4E8wifqfxWQQ+Li87UkkBoYQwsMAHl7rrB1rQ6sygVmZPStQSdjaekDbHJC0mVRkRL+M6Jk8VP+8\nSkBa9p8i91Si1pIeiZSzldskep5bkadfV5gKIWRLLlpi14FqkQvH3TSOVVCa6EMIVwB+G8DPxBj/\nMYTwTgC/gIlu/wsAfgnAj5c9XozxDoA702O7gHiL6Pf7aLVaiDGi2+3mkoappCuJ01oqtQiqCKrv\nqyZNeyUjWv0cFlTZKHY0GmV2xlarlWndw+EwyxVwQLmtlZQ4+JGU7efxOQcgfmcAOUlGE9KUcFTK\nIfFTzul0OrlcBpHKbWji2uWb00Qpog8h1DEh+d+IMX4AAGKMX5XXfxXAh6ZPnwJwn7z93uk2R4Wg\nyUtGzBqxU7/nc3XJALP2uYyoiyJ621LBSj98zuPqvZ0R6KpY/FxGyDro9Pv9W2nHy89elvBdVrDW\nbDYzSUfrCzjwaWSvA5q9qR3TWlN3bTN1VBtlXDcBwLsAPBZjfJtsv2eq3wPADwL43PTxowB+M4Tw\nNkySsQ8A+ORWz9qxMbT8niREMqWEwGia7RNUQiCRNBqNQlcH30PQOcIZg2r2eqMkxHMBkEtQKskD\ns0Gr3W5nkfxtJB+31YqAJH9xcZEj+Gazmd3zO7fb7Vz9g+YpmONQkvf+8w6gXET/cgA/DODPQwif\nnW57M4DXhhBejIl08wSAnwSAGOPnQwjvB/AFTBw7r3fHTbWhSVqSFgmY0Sqjc03KqnWwyHVjHSE6\nQ9BCKO5n2xRzZkEi0wWyeRy2T65qZ0dds3dRZK19gzhToBQFzCqFtS8Ri9o8YncsQhnXzR8BSP0V\nfXjBe94C4C0bnJfjlqHRL/3eJCjqyqrdM5Ik4RQRvbYIILHTdWI1f0aw9PWrns1iIn4eyXyfhUFW\nD9frpINnvV7PonWbWOV9CCGbiTAi53UZDAbZQNftdjEajdDtdrOOn+6TdyyD+7Qcc1CS1FYFQF67\nJ7mnSuxJgLpd+63zubpteExN3hKM1KtUGKTSjbpm9DunpCltb8DrxGgdmA2AnMnwdQDJ6ldeE4ej\nCE70jkLYKJ+RpSZG2R3SasGaHORzEiHJ3pIfXTUcXNSXbgumqlgYdHl5mZOQFIuklXa7nduP15gz\nHCX8TqeTzVg4e9GEty0qczgAJ3pHSehCGRrlk2ytZ10Ji8+BWVsC1fwJkhU1d4X2yUlBNfDbthFq\nl0iNtBWUXuxqWva61bs5SQ0AAAY6SURBVGqz/vwa1QPI5B3aR3nTz09VEDscTvSO0rDkyZYKtdr8\n4h58jdDqWyVkJlPVXdPpdABgTvbR91h9XN0nu2pLbMHrwXMi8Q6Hw7lzaDabOR896xcsKas2r8sJ\n8rtRx6f7ptvtJolej6vXyb30pwknesfaUKIreo2g9KOuHgC5XjYkLL63yLFDucj2e0k1F9slSPAk\nUn5+Sj5SHz31+FTBmdYZ8PvaFbc4mNgbMOsv1Gq1cqTPHMJtVQs7qgUnesfGKBMh2jVSU/12qLMz\nkrcR/aIqXEuitwFbCKUDT9E5MvKmPGMHJo3E+ZomsJl0Jrkr6bPgTZvB8TmdTO7OOU040TtuDZaA\nNRI/OzvL5B/15xNM2LZarVwUS6KjK6eq4Hdvt9u5ZLaCEbhKWSR5TXpre4dOp5NJRSRzrTnQhm/e\nwfJ04UTvqASYYLSN02zSV6tiSfLquqlyJaiSbKrIiXkGzlyUpNVCam2l6r5RG2aqTYInak8TTvSO\nvYGkc3Nzk+uCSYLTVsZsA6CumsFggG63m5F+apWp2wBbRCwiU205AUwKwOy5aoEZv6smZKnfq0RE\nouc6A5zxaLtnjfCrPOtx7A5O9I69gT1qlBhtVKo960l0NprVyP62pQnV6VU2WQT2BypKWOtARx99\nUfWxhe1/D2AuqnfnzenBid6xN6hrh0nUfr+Pq6ur7HUSqSZsi4qk9k1em3aJ5PkzmleC36R/j43o\nb8t+6qgOQhWmciGEvwVwA+Dv9n0uFcLXw6+HhV+TPPx6zOPUrsk/jzF+w7KdKkH0ABBC+JMY40v2\nfR5VgV+Pefg1ycOvxzz8mqThKXiHw+E4cjjROxwOx5GjSkR/Z98nUDH49ZiHX5M8/HrMw69JApXR\n6B0Oh8OxG1Qponc4HA7HDuBE73A4HEeOvRN9COGVIYQvhhAeDyE8su/zuS2EEN4dQngmhPA52faC\nEMJHQwh/Ob3/p9PtIYTwP6bX6M9CCP96f2e+G4QQ7gsh/H4I4QshhM+HEH56uv2Ur0krhPDJEML/\nnV6T/zLd/k0hhE9Mv/v7QgiN6fbm9Pnj09fv3+f57wohhLMQwp+GED40fX7S16MM9kr0IYQzAP8T\nwPcDeBGA14YQXrTPc7pF/BqAV5ptjwD4WIzxAQAfmz4HJtfngentYQDvvKVzvE0MAbwxxvgiAN8J\n4PXTv4VTviY9AN8TY/x2AC8G8MoQwncC+G8A3h5j/BcAngXw0HT/hwA8O93+9ul+x4ifBvCYPD/1\n67Ectpf2bd4AfBeAj8jzNwF40z7P6Za///0APifPvwjgnunjewB8cfr4VwC8NrXfsd4AfBDA9/k1\nyb7fJYDPAHgZJpWf59Pt2f8QgI8A+K7p4/PpfmHf577l63AvJgP+9wD4EIBwytej7G3f0s0LAXxZ\nnj853XaquDvG+PT08VcA3D19fFLXaTrF/g4An8CJX5OpTPFZAM8A+CiAvwLwDzFG9mPW751dk+nr\nXwPwdbd7xjvHfwfwHwGw8c/X4bSvRynsm+gdBYiTMOTkvK8hhCsAvw3gZ2KM/6ivneI1iTGOYowv\nxiSSfSmAf7nnU9obQgg/AOCZGOOn930uh4Z9E/1TAO6T5/dOt50qvhpCuAcApvfPTLefxHUKIdQx\nIfnfiDF+YLr5pK8JEWP8BwC/j4k08fwQAjvP6vfOrsn09X8C4O9v+VR3iZcD+PchhCcAvBcT+eYd\nON3rURr7JvpPAXhgmjVvAHgNgEf3fE77xKMAXjd9/DpMdGpu/5Gp0+Q7AXxN5IyjQJj0930XgMdi\njG+Tl075mnxDCOH508cXmOQsHsOE8H9oupu9JrxWPwTg96azoKNAjPFNMcZ7Y4z3Y8IVvxdj/A84\n0euxEvadJADwKgB/gYn2+J/2fT63+L1/C8DTAAaY6IoPYaIffgzAXwL4XQAvmO4bMHEn/RWAPwfw\nkn2f/w6ux3djIsv8GYDPTm+vOvFr8q8A/On0mnwOwH+ebv9mAJ8E8DiA/wWgOd3emj5/fPr6N+/7\nO+zw2rwCwIf8epS7eQsEh8PhOHLsW7pxOBwOx47hRO9wOBxHDid6h8PhOHI40TscDseRw4ne4XA4\njhxO9A6Hw3HkcKJ3OByOI8f/B31rY5QP9HskAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [] + } + } + ] + } + ] +} diff --git a/examples/layers_normalizations.ipynb b/examples/layers_normalizations.ipynb index 253b4494bb..7eb91dccb5 100644 --- a/examples/layers_normalizations.ipynb +++ b/examples/layers_normalizations.ipynb @@ -137,8 +137,8 @@ "outputId": "6e55e2de-663b-4ce4-fbe7-4e004594516e" }, "source": [ - "!pip install tensorflow==2.0.0-beta1 \n", - "!pip install tensorflow-addons\n", + "!pip install -q tensorflow==2.0.0rc0 \n", + "!pip install -q tensorflow-addons~=0.5\n", "from __future__ import absolute_import, division, print_function\n", "import tensorflow as tf\n", "import tensorflow_addons as tfa" diff --git a/examples/layers_weightnormalization.ipynb b/examples/layers_weightnormalization.ipynb index 904fbee4d0..ce572e883d 100644 --- a/examples/layers_weightnormalization.ipynb +++ b/examples/layers_weightnormalization.ipynb @@ -119,8 +119,8 @@ "colab": {} }, "source": [ - "!pip install tensorflow-gpu==2.0.0-beta1\n", - "!pip install tensorflow-addons\n", + "!pip install tensorflow-gpu==2.0.0rc0\n", + "!pip install tensorflow-addons~=0.5\n", "from __future__ import absolute_import, division, print_function, unicode_literals\n", "\n", "import tensorflow as tf\n", diff --git a/examples/losses_triplet.ipynb b/examples/losses_triplet.ipynb index 79a02d2500..82f27ba18b 100644 --- a/examples/losses_triplet.ipynb +++ b/examples/losses_triplet.ipynb @@ -124,8 +124,8 @@ "colab": {} }, "source": [ - "!pip install tensorflow-gpu==2.0.0-beta1\n", - "!pip install tfa-nightly\n", + "!pip install -q tensorflow-gpu==2.0.0rc0\n", + "!pip install -q tensorflow-addons~=0.5\n", "from __future__ import absolute_import, division, print_function, unicode_literals\n", "\n", "import io\n", @@ -378,4 +378,4 @@ "outputs": [] } ] -} \ No newline at end of file +} diff --git a/examples/optimizers_lazyadam.ipynb b/examples/optimizers_lazyadam.ipynb index 037e811b63..d77e652ab9 100644 --- a/examples/optimizers_lazyadam.ipynb +++ b/examples/optimizers_lazyadam.ipynb @@ -120,8 +120,8 @@ "colab": {} }, "source": [ - "!pip install tensorflow-gpu==2.0.0-beta1\n", - "!pip install tensorflow-addons\n", + "!pip install tensorflow-gpu==2.0.0rc0\n", + "!pip install tensorflow-addons~=0.5\n", "from __future__ import absolute_import, division, print_function, unicode_literals\n", "\n", "import tensorflow as tf\n", diff --git a/examples/template.ipynb b/examples/template.ipynb index 4030320fed..8e22d9d2c8 100644 --- a/examples/template.ipynb +++ b/examples/template.ipynb @@ -129,8 +129,8 @@ "colab": {} }, "source": [ - "!pip install tensorflow==2.0.0.a0\n", - "!pip install tensorflow-addons" + "!pip install tensorflow==2.0.0rc0\n", + "!pip install tensorflow-addons~=0.5" ], "execution_count": 0, "outputs": [] @@ -319,4 +319,4 @@ ] } ] -} \ No newline at end of file +} diff --git a/makefile b/makefile index b4284e1025..b58f61c7d3 100644 --- a/makefile +++ b/makefile @@ -16,7 +16,6 @@ all: code-format sanity-check unit-test -# TODO: install those dependencies in docker image (dockerfile). install-ci-dependency: bash tools/ci_build/install/install_ci_dependency.sh --quiet diff --git a/setup.py b/setup.py index ff056c7b5a..5d6d3cba40 100644 --- a/setup.py +++ b/setup.py @@ -29,15 +29,28 @@ from __future__ import print_function import os +import platform import sys from datetime import datetime from setuptools import find_packages from setuptools import setup from setuptools.dist import Distribution +from setuptools import Extension DOCLINES = __doc__.split('\n') +TFA_NIGHTLY = 'tfa-nightly' +TFA_RELEASE = 'tensorflow-addons' + +if '--nightly' in sys.argv: + project_name = TFA_NIGHTLY + nightly_idx = sys.argv.index('--nightly') + sys.argv.pop(nightly_idx) +else: + project_name = TFA_RELEASE + +# Version version = {} base_dir = os.path.dirname(os.path.abspath(__file__)) with open(os.path.join(base_dir, "tensorflow_addons", "version.py")) as fp: @@ -45,17 +58,26 @@ exec(fp.read(), version) # yapf: enable +if project_name == TFA_NIGHTLY: + version['__version__'] += datetime.strftime(datetime.today(), "%Y%m%d") + +# Dependencies REQUIRED_PACKAGES = [ 'six >= 1.10.0', ] -if '--nightly' in sys.argv: - project_name = 'tfa-nightly' - nightly_idx = sys.argv.index('--nightly') - sys.argv.pop(nightly_idx) - version['__version__'] += datetime.strftime(datetime.today(), "%Y%m%d") -else: - project_name = 'tensorflow-addons' +if project_name == TFA_RELEASE: + # TODO: remove if-else condition when tf supports package consolidation. + if platform.system() == 'Linux': + REQUIRED_PACKAGES.append('tensorflow-gpu == 2.0.0-rc0') + else: + REQUIRED_PACKAGES.append('tensorflow == 2.0.0-rc0') +elif project_name == TFA_NIGHTLY: + # TODO: remove if-else condition when tf-nightly supports package consolidation. + if platform.system() == 'Linux': + REQUIRED_PACKAGES.append('tf-nightly-gpu-2.0-preview') + else: + REQUIRED_PACKAGES.append('tf-nightly-2.0-preview') class BinaryDistribution(Distribution): @@ -73,6 +95,7 @@ def has_ext_modules(self): author='Google Inc.', author_email='opensource@google.com', packages=find_packages(), + ext_modules=[Extension('_foo', ['stub.cc'])], install_requires=REQUIRED_PACKAGES, include_package_data=True, zip_safe=False, @@ -84,9 +107,9 @@ def has_ext_modules(self): 'Intended Audience :: Science/Research', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', 'Topic :: Scientific/Engineering :: Mathematics', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Software Development :: Libraries', diff --git a/tensorflow_addons/__init__.py b/tensorflow_addons/__init__.py index 2a9f4df814..76b48a8940 100644 --- a/tensorflow_addons/__init__.py +++ b/tensorflow_addons/__init__.py @@ -17,59 +17,6 @@ from __future__ import division from __future__ import print_function -# We need to put some imports inside a function call below, and the function -# call needs to come before the *actual* imports that populate the -# tensorflow_probability namespace. Hence, we disable this lint check throughout -# the file. -# - - -# Ensure TensorFlow is importable and its version is sufficiently recent. This -# needs to happen before anything else, since the imports below will try to -# import tensorflow, too. -def _ensure_tf_install(): - """Attempt to import tensorflow, and ensure its version is sufficient. - - Raises: - ImportError: if either tensorflow is not importable or its version is - inadequate. - """ - try: - import tensorflow as tf - except ImportError: - # Print more informative error message, then reraise. - print("\n\nFailed to import TensorFlow. Please note that TensorFlow is" - " not installed by default when you install TensorFlow Addons." - " This is so that users can decide whether to install the" - " GPU-enabled TensorFlow package. To use TensorFlow Addons," - " please install the most recent version of TensorFlow, by" - " following instructions at https://tensorflow.org/install.\n\n") - raise - - import distutils.version - - # - # Update this whenever we need to depend on a newer TensorFlow release. - # - required_tensorflow_version = "2" - - if (distutils.version.LooseVersion(tf.__version__) < - distutils.version.LooseVersion(required_tensorflow_version)): - raise ImportError( - "This version of TensorFlow Addons requires TensorFlow " - "version >= {required}; Detected an installation of version " - "{present}. Please upgrade TensorFlow to proceed.".format( - required=required_tensorflow_version, present=tf.__version__)) - - -_ensure_tf_install() - -# Cleanup symbols to avoid polluting namespace. -del _ensure_tf_install -del absolute_import -del division -del print_function - # Local project imports from tensorflow_addons import activations from tensorflow_addons import callbacks @@ -83,3 +30,8 @@ def _ensure_tf_install(): from tensorflow_addons import text from tensorflow_addons.version import __version__ + +# Cleanup symbols to avoid polluting namespace. +del absolute_import +del division +del print_function diff --git a/tensorflow_addons/activations/BUILD b/tensorflow_addons/activations/BUILD index d454860322..34e87c6298 100644 --- a/tensorflow_addons/activations/BUILD +++ b/tensorflow_addons/activations/BUILD @@ -6,12 +6,14 @@ py_library( name = "activations", srcs = [ "__init__.py", + "gelu.py", "sparsemax.py", ], - srcs_version = "PY2AND3", - deps = [ + data = [ + "//tensorflow_addons/custom_ops/activations:_activation_ops.so", "//tensorflow_addons/utils", ], + srcs_version = "PY2AND3", ) py_test( @@ -26,3 +28,16 @@ py_test( ":activations", ], ) + +py_test( + name = "gelu_test", + size = "large", + srcs = [ + "gelu_test.py", + ], + main = "gelu_test.py", + srcs_version = "PY2AND3", + deps = [ + ":activations", + ], +) diff --git a/tensorflow_addons/activations/README.md b/tensorflow_addons/activations/README.md index 4ab59b23bb..500eee194b 100644 --- a/tensorflow_addons/activations/README.md +++ b/tensorflow_addons/activations/README.md @@ -1,14 +1,16 @@ # Addons - Activations ## Maintainers -| Submodule | Maintainers | Contact Info | -|:---------- |:------------- |:--------------| -| sparsemax | @AndreasMadsen | amwwebdk+github@gmail.com | +| Submodule | Maintainers | Contact Info | +|:----------|:--------------------------|:-----------------------------------------| +| gelu | @AakashKumarNain @WindQAQ | aakashnain@outlook.com windqaq@gmail.com | +| sparsemax | @AndreasMadsen | amwwebdk+github@gmail.com | ## Contents -| Submodule | Activation | Reference | -|:----------------------- |:-------------------|:---------------| -| sparsemax | Sparsemax | https://arxiv.org/abs/1602.02068 | +| Submodule | Activation | Reference | +|:----------|:-----------|:---------------------------------| +| gelu | gelu | https://arxiv.org/abs/1606.08415 | +| sparsemax | Sparsemax | https://arxiv.org/abs/1602.02068 | ## Contribution Guidelines diff --git a/tensorflow_addons/activations/__init__.py b/tensorflow_addons/activations/__init__.py index 5792d00356..45903a3975 100644 --- a/tensorflow_addons/activations/__init__.py +++ b/tensorflow_addons/activations/__init__.py @@ -18,4 +18,5 @@ from __future__ import division from __future__ import print_function +from tensorflow_addons.activations.gelu import gelu from tensorflow_addons.activations.sparsemax import sparsemax diff --git a/tensorflow_addons/activations/gelu.py b/tensorflow_addons/activations/gelu.py new file mode 100644 index 0000000000..539afbbe1c --- /dev/null +++ b/tensorflow_addons/activations/gelu.py @@ -0,0 +1,55 @@ +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import tensorflow as tf +from tensorflow_addons.utils import keras_utils +from tensorflow_addons.utils.resource_loader import get_path_to_datafile + +_activation_ops_so = tf.load_op_library( + get_path_to_datafile("custom_ops/activations/_activation_ops.so")) + + +@keras_utils.register_keras_custom_object +@tf.function +def gelu(x, approximate=True): + """Gaussian Error Linear Unit. + + Computes gaussian error linear: + `0.5 * x * (1 + tanh(sqrt(2 / pi) * (x + 0.044715 * x^3)))` or + `x * P(X <= x) = 0.5 * x * (1 + erf(x / sqrt(2)))`, where P(X) ~ N(0, 1), + depending on whether approximation is enabled. + + See [Gaussian Error Linear Units (GELUs)](https://arxiv.org/abs/1606.08415) + and [BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding](https://arxiv.org/abs/1810.04805). + + Args: + x: A `Tensor`. Must be one of the following types: + `float16`, `float32`, `float64`. + approximate: bool, whether to enable approximation. + Returns: + A `Tensor`. Has the same type as `x`. + """ + x = tf.convert_to_tensor(x) + return _activation_ops_so.gelu(x, approximate) + + +@tf.RegisterGradient("Gelu") +def _gelu_grad(op, grad): + return _activation_ops_so.gelu_grad(grad, op.inputs[0], + op.get_attr("approximate")) diff --git a/tensorflow_addons/activations/gelu_test.py b/tensorflow_addons/activations/gelu_test.py new file mode 100644 index 0000000000..f510715593 --- /dev/null +++ b/tensorflow_addons/activations/gelu_test.py @@ -0,0 +1,106 @@ +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +from absl.testing import parameterized + +import math + +import numpy as np +import tensorflow as tf +from tensorflow_addons.activations import gelu +from tensorflow_addons.utils import test_utils + + +def _ref_gelu(x, approximate=True): + x = tf.convert_to_tensor(x) + if approximate: + pi = tf.cast(math.pi, x.dtype) + coeff = tf.cast(0.044715, x.dtype) + return 0.5 * x * ( + 1.0 + tf.tanh(tf.sqrt(2.0 / pi) * (x + coeff * tf.pow(x, 3)))) + else: + return 0.5 * x * ( + 1.0 + tf.math.erf(x / tf.cast(tf.sqrt(2.0), x.dtype))) + + +@test_utils.run_all_in_graph_and_eager_modes +class GeluTest(tf.test.TestCase, parameterized.TestCase): + @parameterized.named_parameters(("float16", np.float16), + ("float32", np.float32), + ("float64", np.float64)) + def test_gelu(self, dtype): + x = np.random.rand(2, 3, 4).astype(dtype) + self.assertAllCloseAccordingToType(gelu(x), _ref_gelu(x)) + self.assertAllCloseAccordingToType(gelu(x, False), _ref_gelu(x, False)) + + @parameterized.named_parameters(("float16", np.float16), + ("float32", np.float32), + ("float64", np.float64)) + def test_gradients(self, dtype): + x = tf.constant([1.0, 2.0, 3.0], dtype=dtype) + + for approximate in [True, False]: + with self.subTest(approximate=approximate): + with tf.GradientTape(persistent=True) as tape: + tape.watch(x) + y_ref = _ref_gelu(x, approximate) + y = gelu(x, approximate) + grad_ref = tape.gradient(y_ref, x) + grad = tape.gradient(y, x) + self.assertAllCloseAccordingToType(grad, grad_ref) + + @parameterized.named_parameters(("float32", np.float32), + ("float64", np.float64)) + def test_theoretical_gradients(self, dtype): + # Only test theoretical gradients for float32 and float64 + # because of the instability of float16 while computing jacobian + x = tf.constant([1.0, 2.0, 3.0], dtype=dtype) + + for approximate in [True, False]: + with self.subTest(approximate=approximate): + theoretical, numerical = tf.test.compute_gradient( + lambda x: gelu(x, approximate=approximate), [x]) + self.assertAllCloseAccordingToType( + theoretical, numerical, atol=1e-4) + + def test_unknown_shape(self): + fn = gelu.get_concrete_function( + tf.TensorSpec(shape=None, dtype=tf.float32)) + + for shape in [(1,), (1, 2), (1, 2, 3), (1, 2, 3, 4)]: + x = tf.ones(shape=shape, dtype=tf.float32) + self.assertAllClose(fn(x), gelu(x)) + + def test_serialization(self): + ref_fn = gelu + config = tf.keras.activations.serialize(ref_fn) + fn = tf.keras.activations.deserialize(config) + self.assertEqual(fn, ref_fn) + + def test_serialization_with_layers(self): + layer = tf.keras.layers.Dense(3, activation=gelu) + config = tf.keras.layers.serialize(layer) + deserialized_layer = tf.keras.layers.deserialize(config) + self.assertEqual(deserialized_layer.__class__.__name__, + layer.__class__.__name__) + self.assertEqual(deserialized_layer.activation.__name__, "gelu") + + +if __name__ == "__main__": + tf.test.main() diff --git a/tensorflow_addons/activations/sparsemax.py b/tensorflow_addons/activations/sparsemax.py index e269cf0b60..a72a5d5ba0 100644 --- a/tensorflow_addons/activations/sparsemax.py +++ b/tensorflow_addons/activations/sparsemax.py @@ -22,8 +22,8 @@ from tensorflow_addons.utils import keras_utils -@tf.function @keras_utils.register_keras_custom_object +@tf.function def sparsemax(logits, axis=-1, name=None): """Sparsemax activation function [1]. diff --git a/tensorflow_addons/activations/sparsemax_test.py b/tensorflow_addons/activations/sparsemax_test.py index 3abe28fbb1..62e03f0184 100644 --- a/tensorflow_addons/activations/sparsemax_test.py +++ b/tensorflow_addons/activations/sparsemax_test.py @@ -274,6 +274,20 @@ def test_gradient_against_estimate(self, dtype=None): lambda logits: sparsemax(logits), [z], delta=1e-6) self.assertAllCloseAccordingToType(jacob_sym, jacob_num) + def test_serialization(self, dtype=None): + ref_fn = sparsemax + config = tf.keras.activations.serialize(ref_fn) + fn = tf.keras.activations.deserialize(config) + self.assertEqual(fn, ref_fn) + + def test_serialization_with_layers(self, dtype=None): + layer = tf.keras.layers.Dense(3, activation=sparsemax) + config = tf.keras.layers.serialize(layer) + deserialized_layer = tf.keras.layers.deserialize(config) + self.assertEqual(deserialized_layer.__class__.__name__, + layer.__class__.__name__) + self.assertEqual(deserialized_layer.activation.__name__, "sparsemax") + if __name__ == '__main__': tf.test.main() diff --git a/tensorflow_addons/custom_ops/activations/BUILD b/tensorflow_addons/custom_ops/activations/BUILD new file mode 100644 index 0000000000..a199fbc689 --- /dev/null +++ b/tensorflow_addons/custom_ops/activations/BUILD @@ -0,0 +1,47 @@ +licenses(["notice"]) # Apache 2.0 + +package(default_visibility = ["//visibility:public"]) + +load("@local_config_tf//:build_defs.bzl", "D_GLIBCXX_USE_CXX11_ABI") +load("@local_config_cuda//cuda:build_defs.bzl", "if_cuda_is_configured", "if_cuda") + +cc_library( + name = "gelu_op_gpu", + srcs = [ + "cc/kernels/gelu_op.h", + "cc/kernels/gelu_op_gpu.cu.cc", + ], + copts = if_cuda_is_configured([ + "-DGOOGLE_CUDA=1", + "-x cuda", + "-nvcc_options=relaxed-constexpr", + "-nvcc_options=ftz=true", + ]), + deps = [ + "@local_config_tf//:libtensorflow_framework", + "@local_config_tf//:tf_header_lib", + ] + if_cuda_is_configured([ + "@local_config_cuda//cuda:cuda_libs", + "@local_config_cuda//cuda:cuda_headers", + ]), + alwayslink = 1, +) + +cc_binary( + name = "_activation_ops.so", + srcs = [ + "cc/kernels/gelu_op.cc", + "cc/kernels/gelu_op.h", + "cc/ops/gelu_op.cc", + ], + copts = [ + "-pthread", + "-std=c++11", + D_GLIBCXX_USE_CXX11_ABI, + ] + if_cuda(["-DGOOGLE_CUDA=1"]), + linkshared = 1, + deps = [ + "@local_config_tf//:libtensorflow_framework", + "@local_config_tf//:tf_header_lib", + ] + if_cuda_is_configured([":gelu_op_gpu"]), +) diff --git a/tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op.cc b/tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op.cc new file mode 100644 index 0000000000..a48cd652ac --- /dev/null +++ b/tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op.cc @@ -0,0 +1,77 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#define EIGEN_USE_THREADS + +#include "tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op.h" +#include "tensorflow/core/framework/op_kernel.h" +#include "tensorflow/core/framework/register_types.h" +#include "third_party/eigen3/unsupported/Eigen/CXX11/Tensor" + +namespace tensorflow { + +using CPUDevice = Eigen::ThreadPoolDevice; + +#define REGISTER_GELU_KERNELS(type) \ + REGISTER_KERNEL_BUILDER( \ + Name("Gelu").Device(DEVICE_CPU).TypeConstraint("T"), \ + GeluOp); \ + REGISTER_KERNEL_BUILDER( \ + Name("GeluGrad").Device(DEVICE_CPU).TypeConstraint("T"), \ + GeluGradOp); + +// Gelu only makes sense with floating points. +TF_CALL_GPU_NUMBER_TYPES(REGISTER_GELU_KERNELS); +#undef REGISTER_GELU_KERNELS + +#if GOOGLE_CUDA + +using GPUDevice = Eigen::GpuDevice; + +// Forward declarations of the functor specializations for GPU. +namespace functor { +#define DECLARE_GPU_SPEC(T) \ + template <> \ + void Gelu::operator()( \ + const GPUDevice& d, typename TTypes::ConstTensor features, \ + bool approximate, typename TTypes::Tensor activations); \ + extern template struct Gelu; \ + \ + template <> \ + void GeluGrad::operator()( \ + const GPUDevice& d, typename TTypes::ConstTensor gradients, \ + typename TTypes::ConstTensor features, bool approximate, \ + typename TTypes::Tensor backprops); \ + extern template struct GeluGrad; + +TF_CALL_GPU_NUMBER_TYPES(DECLARE_GPU_SPEC); +#undef DECLARE_GPU_SPEC +} // namespace functor + +// Registration of the GPU implementations. +#define REGISTER_GELU_GPU_KERNELS(type) \ + REGISTER_KERNEL_BUILDER( \ + Name("Gelu").Device(DEVICE_GPU).TypeConstraint("T"), \ + GeluOp); \ + REGISTER_KERNEL_BUILDER( \ + Name("GeluGrad").Device(DEVICE_GPU).TypeConstraint("T"), \ + GeluGradOp); + +TF_CALL_GPU_NUMBER_TYPES(REGISTER_GELU_GPU_KERNELS); +#undef REGISTER_GELU_GPU_KERNELS + +#endif // GOOGLE_CUDA + +} // namespace tensorflow diff --git a/tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op.h b/tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op.h new file mode 100644 index 0000000000..a0469f3571 --- /dev/null +++ b/tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op.h @@ -0,0 +1,144 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_ADDONS_GELU_OP_H_ +#define TENSORFLOW_ADDONS_GELU_OP_H_ + +#define EIGEN_USE_THREADS + +#include "tensorflow/core/framework/numeric_op.h" +#include "tensorflow/core/framework/op_kernel.h" +#include "third_party/eigen3/unsupported/Eigen/CXX11/Tensor" + +namespace tensorflow { +namespace functor { + +// Functor used by GeluOp to do the computations. +template +struct Gelu { + // Computes Gelu activation. + // + // features: any shape. + // approximate: whether to enable approximation. + // activations: same shape as "features". + void operator()(const Device& d, typename TTypes::ConstTensor features, + bool approximate, typename TTypes::Tensor activations) { + if (approximate) { + // y = 0.5 * x * (1 + tanh(sqrt(2 / pi) * (x + 0.044715 * x^3))) + activations.device(d) = + static_cast(0.5) * features * + (static_cast(1) + + (static_cast(M_2_SQRTPI * M_SQRT1_2) * + (features + static_cast(0.044715) * features.cube())) + .tanh()); + } else { + // y = x * normcdf(x) = 0.5 * x * (1 + erf(x / sqrt(2))) + activations.device(d) = + static_cast(0.5) * features * + (static_cast(1) + (features * static_cast(M_SQRT1_2)).erf()); + } + } +}; + +// Functor used by GeluGradOp to do the computations. +template +struct GeluGrad { + // Computes GeluGrad backprops. + // + // gradients: gradients backpropagated to the Gelu op. + // features: the inputs that were passed to the Gelu op. + // approximate: whether to enable approximation. + // backprops: gradients to backpropagate to the Gelu inputs. + void operator()(const Device& d, typename TTypes::ConstTensor gradients, + typename TTypes::ConstTensor features, bool approximate, + typename TTypes::Tensor backprops) { + if (approximate) { + const T kAlpha = static_cast(M_2_SQRTPI * M_SQRT1_2); + const T kBeta = kAlpha * static_cast(0.044715) * static_cast(3); + const auto y = + (kAlpha * ((static_cast(0.044715) * features.cube()) + features)) + .tanh(); + backprops.device(d) = ((-features * y.square() + features) * + (kBeta * features.square() + kAlpha) + + static_cast(1) + y) * + gradients * static_cast(0.5); + } else { + backprops.device(d) = + gradients * (static_cast(M_2_SQRTPI * M_SQRT1_2 * 0.5) * features * + (-features.square() * static_cast(0.5)).exp() + + (static_cast(0.5) * + (static_cast(1) + + (features * static_cast(M_SQRT1_2)).erf()))); + } + } +}; + +} // namespace functor + +template +class GeluOp : public UnaryElementWiseOp> { + public: + explicit GeluOp(OpKernelConstruction* context) + : UnaryElementWiseOp>::UnaryElementWiseOp(context) { + OP_REQUIRES_OK(context, context->GetAttr("approximate", &approximate_)); + } + + void Operate(OpKernelContext* context, const Tensor& input, Tensor* output) { + functor::Gelu functor; + functor(context->eigen_device(), input.flat(), approximate_, + output->flat()); + } + + private: + bool approximate_; +}; + +template +class GeluGradOp : public BinaryElementWiseOp> { + public: + explicit GeluGradOp(OpKernelConstruction* context) + : BinaryElementWiseOp>::BinaryElementWiseOp( + context) { + OP_REQUIRES_OK(context, context->GetAttr("approximate", &approximate_)); + } + + void OperateNoTemplate(OpKernelContext* context, const Tensor& g, + const Tensor& a, bool approximate, Tensor* output); + + template + void Operate(OpKernelContext* context, const Tensor& g, const Tensor& a, + Tensor* output) { + OperateNoTemplate(context, g, a, approximate_, output); + } + + private: + bool approximate_; +}; + +template +void GeluGradOp::OperateNoTemplate(OpKernelContext* context, + const Tensor& g, const Tensor& a, + bool approximate, + Tensor* output) { + functor::GeluGrad functor; + functor(context->eigen_device(), g.flat(), a.flat(), + approximate, output->flat()); +} + +} // namespace tensorflow + +#undef EIGEN_USE_THREADS + +#endif // TENSORFLOW_ADDONS_GELU_OP_H_ diff --git a/tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op_gpu.cu.cc b/tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op_gpu.cu.cc new file mode 100644 index 0000000000..37d21e66e0 --- /dev/null +++ b/tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op_gpu.cu.cc @@ -0,0 +1,36 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#if GOOGLE_CUDA + +#define EIGEN_USE_GPU + +#include "tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op.h" +#include "tensorflow/core/framework/register_types.h" +#include "third_party/eigen3/Eigen/Core" + +namespace tensorflow { + +using GPUDevice = Eigen::GpuDevice; + +#define DEFINE_GPU_KERNELS(T) \ + template struct functor::Gelu; \ + template struct functor::GeluGrad; + +TF_CALL_GPU_NUMBER_TYPES(DEFINE_GPU_KERNELS); + +} // namespace tensorflow + +#endif // GOOGLE_CUDA diff --git a/tensorflow_addons/custom_ops/activations/cc/ops/gelu_op.cc b/tensorflow_addons/custom_ops/activations/cc/ops/gelu_op.cc new file mode 100644 index 0000000000..03406894b8 --- /dev/null +++ b/tensorflow_addons/custom_ops/activations/cc/ops/gelu_op.cc @@ -0,0 +1,37 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/core/framework/common_shape_fns.h" +#include "tensorflow/core/framework/op.h" +#include "tensorflow/core/framework/shape_inference.h" + +namespace tensorflow { + +REGISTER_OP("Gelu") + .Input("features: T") + .Output("activations: T") + .Attr("T: {half, float, double}") + .Attr("approximate: bool = true") + .SetShapeFn(shape_inference::UnchangedShape); + +REGISTER_OP("GeluGrad") + .Input("gradients: T") + .Input("features: T") + .Output("backprops: T") + .Attr("T: {half, float, double}") + .Attr("approximate: bool = true") + .SetShapeFn(shape_inference::MergeBothInputsShapeFn); + +} // namespace tensorflow diff --git a/tensorflow_addons/custom_ops/image/BUILD b/tensorflow_addons/custom_ops/image/BUILD index 1f4236dc9f..a0fdbc4da5 100644 --- a/tensorflow_addons/custom_ops/image/BUILD +++ b/tensorflow_addons/custom_ops/image/BUILD @@ -68,9 +68,33 @@ cc_library( alwayslink = 1, ) +cc_library( + name = "euclidean_distance_transform_op_gpu", + srcs = [ + "cc/kernels/euclidean_distance_transform_op.h", + "cc/kernels/euclidean_distance_transform_op_gpu.cu.cc", + ], + copts = if_cuda_is_configured([ + "-DGOOGLE_CUDA=1", + "-x cuda", + "-nvcc_options=relaxed-constexpr", + "-nvcc_options=ftz=true", + ]), + deps = [ + "@local_config_tf//:libtensorflow_framework", + "@local_config_tf//:tf_header_lib", + ] + if_cuda_is_configured([ + "@local_config_cuda//cuda:cuda_libs", + "@local_config_cuda//cuda:cuda_headers", + ]), + alwayslink = 1, +) + cc_binary( name = "_image_ops.so", srcs = [ + "cc/kernels/connected_components.cc", + "cc/kernels/connected_components.h", "cc/kernels/euclidean_distance_transform_op.cc", "cc/kernels/euclidean_distance_transform_op.h", "cc/kernels/image_projective_transform_op.cc", @@ -86,5 +110,8 @@ cc_binary( deps = [ "@local_config_tf//:libtensorflow_framework", "@local_config_tf//:tf_header_lib", - ] + if_cuda_is_configured([":image_projective_transform_op_gpu"]), + ] + if_cuda_is_configured([ + ":image_projective_transform_op_gpu", + ":euclidean_distance_transform_op_gpu", + ]), ) diff --git a/tensorflow_addons/custom_ops/image/cc/kernels/connected_components.cc b/tensorflow_addons/custom_ops/image/cc/kernels/connected_components.cc new file mode 100644 index 0000000000..1dbe83fe2a --- /dev/null +++ b/tensorflow_addons/custom_ops/image/cc/kernels/connected_components.cc @@ -0,0 +1,138 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +// See docs for ImageConnectedComponents in ../ops/image_ops.cc, and description +// of the algorithm in connected_components.h. + +#define EIGEN_USE_THREADS + +#include "tensorflow_addons/custom_ops/image/cc/kernels/connected_components.h" +#include "tensorflow/core/framework/op_kernel.h" +#include "tensorflow/core/framework/register_types.h" +#include "tensorflow/core/framework/types.h" +#include "tensorflow/core/platform/types.h" + +namespace tensorflow { + +using tensorflow::functor::BlockedImageUnionFindFunctor; +using tensorflow::functor::FindRootFunctor; +using tensorflow::functor::ImageConnectedComponentsFunctor; +using tensorflow::functor::TensorRangeFunctor; + +using OutputType = typename BlockedImageUnionFindFunctor::OutputType; + +// Computes connected components on batches of 2D images. +template +class ImageConnectedComponents : public OpKernel { + public: + explicit ImageConnectedComponents(OpKernelConstruction* ctx) + : OpKernel(ctx) {} + + void Compute(OpKernelContext* ctx) override { + const Tensor& images_t = ctx->input(0); + OP_REQUIRES(ctx, images_t.shape().dims() == 3, + errors::InvalidArgument("Input images must have rank 3")); + Tensor forest_t, rank_t; + OP_REQUIRES_OK(ctx, ctx->allocate_temp(tensorflow::DT_INT64, + images_t.shape(), &forest_t)); + OP_REQUIRES_OK(ctx, ctx->allocate_temp(tensorflow::DT_INT64, + images_t.shape(), &rank_t)); + Tensor* output_t; + OP_REQUIRES_OK(ctx, ctx->allocate_output(0, images_t.shape(), &output_t)); + + // Fill forest with values from 0 to n - 1, so that each node points to + // itself. + TensorRangeFunctor()(ctx->eigen_device(), + forest_t.flat()); + auto rank = rank_t.tensor(); + rank.device(ctx->eigen_device()) = rank.constant(OutputType(0)); + + const auto images = images_t.tensor(); + auto forest = forest_t.tensor(); + ImageConnectedComponentsFunctor()( + ctx, output_t->flat(), images, forest, rank); + } +}; + +using CPUDevice = Eigen::ThreadPoolDevice; + +namespace functor { + +// Connected components CPU implementation. See `connected_components.h` for a +// description of the algorithm. +template +struct ImageConnectedComponentsFunctor { + void operator()(OpKernelContext* ctx, + typename TTypes::Flat output, + typename TTypes::ConstTensor images, + typename TTypes::Tensor forest, + typename TTypes::Tensor rank) { + const int64 num_images = images.dimension(0), + num_rows = images.dimension(1), num_cols = images.dimension(2), + num_elements = images.size(); + // Bail out early for an empty image--no work to do. + if (num_elements == 0) { + return; + } + auto worker_threads = ctx->device()->tensorflow_cpu_worker_threads(); + BlockedImageUnionFindFunctor union_find( + images.data(), num_rows, num_cols, forest.data(), rank.data()); + while (union_find.can_merge()) { + union_find.merge_blocks(); + int64 num_blocks_vertically = union_find.num_blocks_vertically(); + int64 num_blocks_horizontally = union_find.num_blocks_horizontally(); + // Merging each block calls union_down for each pixel in a row of the + // block, and union_right for each pixel in a column of the block. Assume + // 20 instructions for each call to union_down or union_right. find() may + // loop more while searching for the root, but this should not be very + // significant. + int cost = (union_find.block_height() + union_find.block_width()) * 20; + Shard(worker_threads->num_threads, worker_threads->workers, + num_images * num_blocks_vertically * num_blocks_horizontally, cost, + [&union_find, num_blocks_vertically, num_blocks_horizontally]( + int64 start_block, int64 limit_block) { + for (int64 i = start_block; i < limit_block; i++) { + int64 block_x = i % num_blocks_horizontally; + int64 block_y = + (i / num_blocks_horizontally) % num_blocks_vertically; + int64 image = + i / (num_blocks_horizontally * num_blocks_vertically); + union_find.merge_internal_block_edges(image, block_y, block_x); + } + }); + } + FindRootFunctor()(ctx->eigen_device(), output, + images.data(), union_find); + } +}; + +} // end namespace functor + +#define REGISTER_IMAGE_CONNECTED_COMPONENTS(TYPE) \ + REGISTER_KERNEL_BUILDER(Name("ImageConnectedComponents") \ + .Device(DEVICE_CPU) \ + .TypeConstraint("dtype"), \ + ImageConnectedComponents) +// Connected components (arguably) make sense for number, bool, and string types +TF_CALL_NUMBER_TYPES(REGISTER_IMAGE_CONNECTED_COMPONENTS); +TF_CALL_bool(REGISTER_IMAGE_CONNECTED_COMPONENTS); +TF_CALL_string(REGISTER_IMAGE_CONNECTED_COMPONENTS); +#undef REGISTER_IMAGE_CONNECTED_COMPONENTS + +// TODO(ringwalt): Implement on GPU. We probably want to stick to the original +// algorithm by Stava and Benes there for efficiency (computing small blocks in +// shared memory in CUDA thread blocks, instead of starting with single-pixel +// blocks). + +} // end namespace tensorflow \ No newline at end of file diff --git a/tensorflow_addons/custom_ops/image/cc/kernels/connected_components.h b/tensorflow_addons/custom_ops/image/cc/kernels/connected_components.h new file mode 100644 index 0000000000..7d645641bc --- /dev/null +++ b/tensorflow_addons/custom_ops/image/cc/kernels/connected_components.h @@ -0,0 +1,305 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +// See docs for ImageConnectedComponents in ../ops/image_ops.cc, and description +// of the algorithm in connected_components.h. + +#ifndef TENSORFLOW_ADDONS_CONNECTED_COMPONENTS_H_ +#define TENSORFLOW_ADDONS_CONNECTED_COMPONENTS_H_ + +// Connected component analysis. The op is described in ../ops/image_ops.cc. A +// description of the algorithm appears below. + +#define EIGEN_USE_THREADS + +#include "tensorflow/core/framework/op_kernel.h" +#include "tensorflow/core/framework/tensor_types.h" +#include "tensorflow/core/platform/types.h" +#include "tensorflow/core/util/work_sharder.h" +#include "third_party/eigen3/unsupported/Eigen/CXX11/Tensor" + +namespace tensorflow { + +namespace functor { + +template +bool is_nonzero(T value) { + return value != T(0); +} + +template <> +bool is_nonzero(string value) { + return value.size() != 0; +} + +// Processes each pixel of an image for union-find, in parallel blocks. This is +// loosely based on the algorithm in "GPU Computing Gems" by Ondrej Stava and +// Bedrich Benes, available here: +// http://hpcg.purdue.edu/bbenes/papers/Stava2011CCL.pdf +// The bulk of the process uses blocks of each image, which have each been +// processed separately. As long as there are multiple blocks in the image, we +// double the height and width of the blocks, creating new blocks which each +// consist of 2x2 previous sub-blocks. On each new block, we process adjacent +// pixels from the previous sub-blocks serially. However, the new blocks are not +// connected, so we can process each block in parallel. +// The GPU algorithm first processes blocks of a fixed size in GPU shared +// memory, with one image block per CUDA thread block. On the CPU, we just start +// with a block size of a single pixel, and borrow the rest of the algorithm +// unchanged. +template +class BlockedImageUnionFindFunctor { + public: + using OutputType = int64; + + EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE BlockedImageUnionFindFunctor( + const T* images, const int64 num_rows, const int64 num_cols, + OutputType* forest, OutputType* rank) + : images_(images), + num_rows_(num_rows), + num_cols_(num_cols), + block_height_(1), + block_width_(1), + forest_(forest), + rank_(rank) {} + + // Returns the root of the tree that the pixel at the given index belongs to. + EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE OutputType + find(OutputType index) const { + while (forest_[index] != index) { + index = forest_[index]; + } + return index; + } + + // Returns the number of blocks along the y axis. + EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE int64 num_blocks_vertically() const { + return (num_rows_ + block_height_ - 1) / block_height_; + } + + // Returns the number of blocks along the x axis. + EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE int64 num_blocks_horizontally() const { + return (num_cols_ + block_width_ - 1) / block_width_; + } + + // Returns the total number of blocks in each image. + EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE int64 num_blocks() const { + return num_blocks_vertically() * num_blocks_horizontally(); + } + + EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE int64 block_height() const { + return block_height_; + } + + EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE int64 block_width() const { + return block_width_; + } + + // Returns whether we may merge again (the image contains more than one + // block). + EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE bool can_merge() const { + return block_height_ < num_rows_ || block_width_ < num_cols_; + } + + // Doubles the block size. After this method, you must call + // `merge_internal_block_edges` for each image and each *new* block's xy + // coordinates (typically in parallel). + EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE void merge_blocks() { + block_height_ *= 2; + block_width_ *= 2; + } + + // Processes pairs of pixels within the block which were adjacent in the four + // sub-blocks. This must be done at each stage so that the connected + // components in each block are joined correctly. + EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE void merge_internal_block_edges( + int64 image_index, int64 block_vertical_index, + int64 block_horizontal_index) const { + int64 block_start_y = block_vertical_index * block_height_; + int64 block_start_x = block_horizontal_index * block_width_; + // Merge the 4 sub-blocks horizontally (fixing the vertical seam). + int64 block_center_x = block_start_x + block_width_ / 2 - 1; + if (0 <= block_center_x && block_center_x + 1 < num_cols_) { + int64 merge_blocks_limit_y = + std::min(num_rows_, block_start_y + block_height_); + for (int64 y = block_start_y; y < merge_blocks_limit_y; y++) { + union_right(image_index, y, block_center_x); + } + } + // Merge the 4 sub-blocks vertically (fixing the horizontal seam). + int64 block_center_y = block_start_y + block_height_ / 2 - 1; + if (0 <= block_center_y && block_center_y + 1 < num_rows_) { + int64 merge_blocks_limit_x = + std::min(num_cols_, block_start_x + block_width_); + for (int64 x = block_start_x; x < merge_blocks_limit_x; x++) { + union_down(image_index, block_center_y, x); + } + } + } + + private: + // The input image(s). + const T* const images_; + const int64 num_rows_; + const int64 num_cols_; + // Current height of each sub-block of the image. + int64 block_height_; + // Current width of each sub-block of the image. + int64 block_width_; + // Union-find forest. This has the same size as `images_`, and each entry + // holds the index of its parent in `images_` (roots hold their own index). + // Cycles should not occur. + OutputType* const forest_; + // Union-find rank of each pixel. + OutputType* const rank_; + + // Unions the pixel with the pixel below it if applicable (both pixels are + // true, and the pixel is not in the last row). + EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE void union_down(OutputType batch, + OutputType row, + OutputType col) const { + T pixel = read_pixel(batch, row, col); + if (is_nonzero(pixel)) { + const int64 index_a = col + num_cols_ * (row + num_rows_ * batch); + if (row + 1 < num_rows_ && read_pixel(batch, row + 1, col) == pixel) { + const int64 index_b = col + num_cols_ * (row + 1 + num_rows_ * batch); + do_union(index_a, index_b); + } + } + } + + // Unions the pixel with the pixel to the right of it if applicable. + EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE void union_right(OutputType batch, + OutputType row, + OutputType col) const { + T pixel = read_pixel(batch, row, col); + if (is_nonzero(pixel)) { + const int64 index_a = col + num_cols_ * (row + num_rows_ * batch); + if (col + 1 < num_cols_ && read_pixel(batch, row, col + 1) == pixel) { + const int64 index_b = col + 1 + num_cols_ * (row + num_rows_ * batch); + do_union(index_a, index_b); + } + } + } + + // Reads a pixel value in the images. + EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE T + read_pixel(const OutputType batch, const OutputType row, + const OutputType col) const { + return images_[col + num_cols_ * (row + num_rows_ * batch)]; + } + + // Unions the trees that the two pixels belong to, using their index in the + // `images_` array. + EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE void do_union( + OutputType index_a, OutputType index_b) const { + // Find the roots of index_a and index_b in the forest, and make one the + // child of the other. + index_a = find(index_a); + index_b = find(index_b); + const OutputType rank_a = rank_[index_a]; + const OutputType rank_b = rank_[index_b]; + OutputType parent, child; + if (index_a == index_b) { + return; + } else if (rank_a < rank_b) { + parent = index_a; + child = index_b; + } else { + parent = index_b; + child = index_a; + rank_[parent]++; + } + forest_[child] = parent; + } +}; + +// Runs the ImageUnionFindFunctor on all pixels. Will require different CPU and +// GPU implementations. +template +class ImageConnectedComponentsFunctor { + public: + using OutputType = typename BlockedImageUnionFindFunctor::OutputType; + + void operator()(OpKernelContext* ctx, + typename TTypes::ConstTensor images, + typename TTypes::Tensor forest, + typename TTypes::Tensor rank); +}; + +// Fills a flat Tensor with indices from 0 to n - 1. +template +class TensorRangeFunctor { + public: + using OutputType = typename BlockedImageUnionFindFunctor::OutputType; + + void operator()(const Device& device, + typename TTypes::Flat tensor) { + tensor.device(device) = tensor.generate(TensorRangeGenerator()); + } + + private: + class TensorRangeGenerator { + public: + EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE OutputType + operator()(const Eigen::array& coords) const { + return coords[0]; + } + }; +}; + +// Given the union-find forest, generates the root index for each node. This +// gives us arbitrary, usually non-consecutive ids for each connected component. +// The ids are massaged in Python to get deterministic, consecutive ids. +template +class FindRootFunctor { + public: + using OutputType = typename BlockedImageUnionFindFunctor::OutputType; + + void operator()(const Device& device, + typename TTypes::Flat component_ids, + const T* images, + const BlockedImageUnionFindFunctor& union_find) { + component_ids.device(device) = + component_ids.generate(FindRootGenerator(images, union_find)); + } + + private: + class FindRootGenerator { + const T* const images_; + const BlockedImageUnionFindFunctor union_find_; + + public: + EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE FindRootGenerator( + const T* images, BlockedImageUnionFindFunctor union_find) + : images_(images), union_find_(union_find) {} + + EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE OutputType + operator()(const Eigen::array& coords) const { + if (is_nonzero(images_[coords[0]])) { + // True pixels have an arbitrary segment id > 0. The segment ids will be + // made contiguous later. + return union_find_.find(coords[0]) + 1; + } else { + // False pixels have a segment of 0. + return 0; + } + } + }; +}; + +} // end namespace functor + +} // namespace tensorflow + +#endif // TENSORFLOW_ADDONS_CONNECTED_COMPONENTS_H_ diff --git a/tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op.cc b/tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op.cc index 8b5923edeb..5c6928b35f 100644 --- a/tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op.cc +++ b/tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op.cc @@ -77,8 +77,7 @@ TF_CALL_double(REGISTER); #undef REGISTER -// TODO: fix compile issue #349 of the gpu kernel. -#if 0 && GOOGLE_CUDA +#if GOOGLE_CUDA typedef Eigen::GpuDevice GPUDevice; @@ -87,7 +86,7 @@ namespace functor { #define DECLARE_FUNCTOR(TYPE) \ template <> \ void EuclideanDistanceTransformFunctor::operator()( \ - const GPUDevice &device, OutputType *output, const InputType *images) \ + const GPUDevice &device, OutputType *output, const InputType &images) \ const; \ extern template struct EuclideanDistanceTransformFunctor @@ -101,7 +100,6 @@ TF_CALL_double(DECLARE_FUNCTOR); REGISTER_KERNEL_BUILDER(Name("EuclideanDistanceTransform") \ .Device(DEVICE_GPU) \ .TypeConstraint("dtype"), \ - .HostMemory("output_shape"), \ EuclideanDistanceTransform) TF_CALL_half(REGISTER); diff --git a/tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op.h b/tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op.h index 65940fd099..740c239a27 100644 --- a/tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op.h +++ b/tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op.h @@ -30,6 +30,8 @@ namespace generator { using Eigen::array; using Eigen::DenseIndex; +using Eigen::numext::sqrt; +using Eigen::numext::mini; template class EuclideanDistanceTransformGenerator { @@ -41,8 +43,8 @@ class EuclideanDistanceTransformGenerator { EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE EuclideanDistanceTransformGenerator( typename TTypes::ConstTensor input) : input_(input) { - height_ = input_.dimensions()[1]; - width_ = input_.dimensions()[2]; + height_ = input_.dimension(1); + width_ = input_.dimension(2); } EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE T @@ -52,17 +54,17 @@ class EuclideanDistanceTransformGenerator { if (input_(coords) == T(0)) return T(0); - float minDistance = static_cast(std::numeric_limits::max()); + T minDistance = Eigen::NumTraits::highest(); for (int h = 0; h < height_; ++h) { for (int w = 0; w < width_; ++w) { if (input_({coords[0], h, w, coords[3]}) == T(0)) { - float dist = std::sqrt((x - h) * (x - h) + (y - w) * (y - w)); - minDistance = std::min(minDistance, dist); + T dist = sqrt(T((x - h) * (x - h) + (y - w) * (y - w))); + minDistance = mini(minDistance, dist); } } } - return T(minDistance); + return minDistance; } }; diff --git a/tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op_gpu.cu.cc b/tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op_gpu.cu.cc new file mode 100644 index 0000000000..47e0b45194 --- /dev/null +++ b/tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op_gpu.cu.cc @@ -0,0 +1,40 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#if GOOGLE_CUDA + +#define EIGEN_USE_GPU + +#include "tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op.h" +#include "tensorflow/core/framework/register_types.h" +#include "tensorflow/core/framework/types.h" +#include "tensorflow/core/platform/types.h" + +namespace tensorflow { + +namespace functor { + +// Explicit instantiation of the GPU functor. +typedef Eigen::GpuDevice GPUDevice; + +template struct EuclideanDistanceTransformFunctor; +template struct EuclideanDistanceTransformFunctor; +template struct EuclideanDistanceTransformFunctor; + +} // end namespace functor + +} // end namespace tensorflow + +#endif // GOOGLE_CUDA diff --git a/tensorflow_addons/custom_ops/image/cc/ops/image_ops.cc b/tensorflow_addons/custom_ops/image/cc/ops/image_ops.cc index 0477dfb29f..35aa4295f1 100644 --- a/tensorflow_addons/custom_ops/image/cc/ops/image_ops.cc +++ b/tensorflow_addons/custom_ops/image/cc/ops/image_ops.cc @@ -101,6 +101,23 @@ transformed_images: 4D `Tensor`, image(s) in NHWC format, generated by applying the `transforms` to the `images`. Satisfies the description above. )doc"; +static const char ImageConnectedComponentsDoc[] = R"doc( +Find the connected components of image(s). +For each image (along the 0th axis), all connected components of adjacent pixels +with the same non-zero value are detected and given unique ids. +The returned `components` tensor has 0s for the zero pixels of `images`, and +arbitrary nonzero ids for the connected components of nonzero values. Ids are +unique across all of the images, and are in row-major order by the first pixel +in the component. +Uses union-find with union by rank but not path compression, giving a runtime of +`O(n log n)`. See: + https://en.wikipedia.org/wiki/Disjoint-set_data_structure#Time_Complexity +image: Image(s) with shape (N, H, W). +components: Component ids for each pixel in "image". Same shape as "image". Zero + pixels all have an output of 0, and all components of adjacent pixels with + the same value are given consecutive ids, starting from 1. +)doc"; + } // namespace REGISTER_OP("EuclideanDistanceTransform") @@ -120,4 +137,16 @@ REGISTER_OP("ImageProjectiveTransformV2") .Output("transformed_images: dtype") .SetShapeFn(ResizeShapeFn) .Doc(kImageProjectiveTransformDoc); -} // namespace tensorflow + +REGISTER_OP("ImageConnectedComponents") + .Input("image: dtype") + .Output("components: int64") + .Attr( + "dtype: {int64, int32, uint16, int16, uint8, int8, half, float, " + "double, bool, string}") + .SetShapeFn([](InferenceContext *c) { + return shape_inference::UnchangedShape(c); + }) + .Doc(ImageConnectedComponentsDoc); + +} // namespace tensorflow \ No newline at end of file diff --git a/tensorflow_addons/custom_ops/layers/BUILD b/tensorflow_addons/custom_ops/layers/BUILD index ed0c567f59..bb9c00ddf5 100644 --- a/tensorflow_addons/custom_ops/layers/BUILD +++ b/tensorflow_addons/custom_ops/layers/BUILD @@ -10,14 +10,13 @@ cc_binary( srcs = [ "cc/kernels/correlation_cost_op.cc", "cc/kernels/correlation_cost_op.h", - "cc/kernels/correlation_cost_op_gpu.cu.cc", "cc/ops/correlation_cost_op.cc", ], copts = [ "-pthread", "-std=c++11", D_GLIBCXX_USE_CXX11_ABI, - ], + ] + if_cuda(["-DGOOGLE_CUDA=1"]), linkshared = 1, deps = [ "@local_config_tf//:libtensorflow_framework", diff --git a/tensorflow_addons/custom_ops/layers/cc/kernels/correlation_cost_op.cc b/tensorflow_addons/custom_ops/layers/cc/kernels/correlation_cost_op.cc index e1f4b1cdbc..1c3580e9e1 100644 --- a/tensorflow_addons/custom_ops/layers/cc/kernels/correlation_cost_op.cc +++ b/tensorflow_addons/custom_ops/layers/cc/kernels/correlation_cost_op.cc @@ -331,7 +331,7 @@ TF_CALL_float(REGISTER_CORRELATIONCOST_OP_CPU); #undef REGISTER_CORRELATIONCOST_OP_CPU // Register the GPU kernels. -#ifdef GOOGLE_CUDA +#if GOOGLE_CUDA #define REGISTER_CORRELATIONCOST_OP_GPU(T) \ REGISTER_KERNEL_BUILDER( \ diff --git a/tensorflow_addons/image/BUILD b/tensorflow_addons/image/BUILD index 79f5b1fcd5..27b42df54d 100644 --- a/tensorflow_addons/image/BUILD +++ b/tensorflow_addons/image/BUILD @@ -15,6 +15,7 @@ py_library( "utils.py", "sparse_image_warp.py", "interpolate_spline.py", + "connected_components.py", ]), data = [ ":sparse_image_warp_test_data", @@ -82,7 +83,6 @@ py_test( ], ) -# TODO: use cuda_test later. py_test( name = "transform_ops_test", size = "medium", @@ -147,3 +147,16 @@ py_test( ":image", ], ) + +py_test( + name = "connected_components_test", + size = "medium", + srcs = [ + "connected_components_test.py", + ], + main = "connected_components_test.py", + srcs_version = "PY2AND3", + deps = [ + ":image", + ], +) diff --git a/tensorflow_addons/image/README.md b/tensorflow_addons/image/README.md index 57f6fede11..6742c14792 100644 --- a/tensorflow_addons/image/README.md +++ b/tensorflow_addons/image/README.md @@ -3,6 +3,7 @@ ## Maintainers | Submodule | Maintainers | Contact Info | |:---------- |:----------- |:--------------| +| connected_components | @sayoojbk | sayoojbk@gmail.com | | dense_image_warp | @WindQAQ | windqaq@gmail.com | | distance_transform_ops | @mels630 | mels630@gmail.com | | distort_image_ops | @WindQAQ | windqaq@gmail.com | @@ -13,6 +14,7 @@ ## Components | Submodule | Image Processing Function | Reference | |:---------- |:----------- |:----------- | +| connected_components | connected_components | | | dense_image_warp | dense_image_warp | | | dense_image_warp | interpolate_bilinear | | | distance_transform_ops | euclidean_distance_transform | | @@ -28,6 +30,7 @@ | translate_ops | translate | | | translate_ops | translations_to_projective_transforms | | + ## Contribution Guidelines #### Standard API In order to conform with the current API standard, all image ops diff --git a/tensorflow_addons/image/__init__.py b/tensorflow_addons/image/__init__.py index d0d886735d..21a8b77c35 100644 --- a/tensorflow_addons/image/__init__.py +++ b/tensorflow_addons/image/__init__.py @@ -29,3 +29,4 @@ from tensorflow_addons.image.sparse_image_warp import sparse_image_warp from tensorflow_addons.image.interpolate_spline import interpolate_spline from tensorflow_addons.image.translate_ops import translate +from tensorflow_addons.image.connected_components import connected_components diff --git a/tensorflow_addons/image/connected_components.py b/tensorflow_addons/image/connected_components.py new file mode 100644 index 0000000000..921b8841fe --- /dev/null +++ b/tensorflow_addons/image/connected_components.py @@ -0,0 +1,96 @@ +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Connected Components.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import tensorflow as tf + +from tensorflow_addons.utils.resource_loader import get_path_to_datafile + +_image_ops_so = tf.load_op_library( + get_path_to_datafile("custom_ops/image/_image_ops.so")) + + +@tf.function +def connected_components(images, name=None): + """Labels the connected components in a batch of images. + + A component is a set of pixels in a single input image, which are + all adjacent and all have the same non-zero value. The components + using a squared connectivity of one (all True entries are joined with + their neighbors above,below, left, and right). Components across all + images have consecutive ids 1 through n. + Components are labeled according to the first pixel of the + component appearing in row-major order (lexicographic order by + image_index_in_batch, row, col). + Zero entries all have an output id of 0. + This op is equivalent with `scipy.ndimage.measurements.label` + on a 2D array with the default structuring element + (which is the connectivity used here). + Args: + images: A 2D (H, W) or 3D (N, H, W) Tensor of boolean image(s). + name: The name of the op. + Returns: + Components with the same shape as `images`. + False entries in `images` have value 0, and + all True entries map to a component id > 0. + Raises: + TypeError: if `images` is not 2D or 3D. + """ + with tf.name_scope(name or "connected_components"): + image_or_images = tf.convert_to_tensor(images, name="images") + if len(image_or_images.get_shape()) == 2: + images = image_or_images[None, :, :] + elif len(image_or_images.get_shape()) == 3: + images = image_or_images + else: + raise TypeError( + "images should have rank 2 (HW) or 3 (NHW). Static shape is %s" + % image_or_images.get_shape()) + components = _image_ops_so.image_connected_components(images) + + # TODO(ringwalt): Component id renaming should be done in the op, + # to avoid constructing multiple additional large tensors. + components_flat = tf.reshape(components, [-1]) + unique_ids, id_index = tf.unique(components_flat) + id_is_zero = tf.where(tf.equal(unique_ids, 0))[:, 0] + # Map each nonzero id to consecutive values. + nonzero_consecutive_ids = tf.range( + tf.shape(unique_ids)[0] - tf.shape(id_is_zero)[0]) + 1 + + def no_zero(): + # No need to insert a zero into the ids. + return nonzero_consecutive_ids + + def has_zero(): + # Insert a zero in the consecutive ids + # where zero appears in unique_ids. + # id_is_zero has length 1. + zero_id_ind = tf.cast(id_is_zero[0], tf.int32) + ids_before = nonzero_consecutive_ids[:zero_id_ind] + ids_after = nonzero_consecutive_ids[zero_id_ind:] + return tf.concat([ids_before, [0], ids_after], axis=0) + + new_ids = tf.cond( + tf.equal(tf.shape(id_is_zero)[0], 0), no_zero, has_zero) + components = tf.reshape( + tf.gather(new_ids, id_index), tf.shape(components)) + if len(image_or_images.get_shape()) == 2: + return components[0, :, :] + else: + return components diff --git a/tensorflow_addons/image/connected_components_test.py b/tensorflow_addons/image/connected_components_test.py new file mode 100644 index 0000000000..97d6b8b29b --- /dev/null +++ b/tensorflow_addons/image/connected_components_test.py @@ -0,0 +1,157 @@ +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for connected component analysis.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import logging +import tensorflow as tf +import numpy as np + +from tensorflow_addons.image.connected_components import connected_components +from tensorflow_addons.utils import test_utils + +# Image for testing connected_components, with a single, winding component. +SNAKE = np.asarray([[0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 1, 1, 1, 0, 0, 0, 0], + [0, 0, 0, 0, 1, 1, 1, 1, 0], [0, 0, 0, 0, 0, 0, 0, 1, 0], + [0, 1, 1, 1, 1, 1, 1, 1, 0], [0, 1, 0, 0, 0, 0, 0, 0, 0], + [0, 1, 0, 1, 1, 1, 1, 1, 0], [0, 1, 0, 0, 0, 0, 0, 1, 0], + [0, 1, 1, 1, 1, 1, 1, 1, 0], [0, 0, 0, 0, 0, 0, 0, 0, + 0]]) # pylint: disable + + +@test_utils.run_all_in_graph_and_eager_modes +class ConnectedComponentsTest(tf.test.TestCase): + def testDisconnected(self): + arr = tf.cast( + [[1, 0, 0, 1, 0, 0, 0, 0, 1], [0, 1, 0, 0, 0, 1, 0, 1, 0], + [1, 0, 1, 0, 0, 0, 1, 0, 0], [0, 0, 0, 0, 1, 0, 0, 0, 0], + [0, 0, 1, 0, 0, 0, 0, 0, 0]], tf.bool) # pylint: disable + expected = ([[1, 0, 0, 2, 0, 0, 0, 0, 3], [0, 4, 0, 0, 0, 5, 0, 6, 0], + [7, 0, 8, 0, 0, 0, 9, 0, 0], [0, 0, 0, 0, 10, 0, 0, 0, 0], + [0, 0, 11, 0, 0, 0, 0, 0, 0]]) # pylint: disable + self.assertAllEqual(self.evaluate(connected_components(arr)), expected) + + def testSimple(self): + arr = [[0, 1, 0], [1, 1, 1], [0, 1, 0]] + + # Single component with id 1. + self.assertAllEqual( + self.evaluate(connected_components(tf.cast(arr, tf.bool))), arr) + + def testSnake(self): + + # Single component with id 1. + self.assertAllEqual( + self.evaluate(connected_components(tf.cast(SNAKE, tf.bool))), + SNAKE) + + def testSnake_disconnected(self): + for i in range(SNAKE.shape[0]): + for j in range(SNAKE.shape[1]): + + # If we disconnect any part of the snake except for the endpoints, + # there will be 2 components. + if SNAKE[i, j] and (i, j) not in [(1, 1), (6, 3)]: + disconnected_snake = SNAKE.copy() + disconnected_snake[i, j] = 0 + components = self.evaluate( + connected_components( + tf.cast(disconnected_snake, tf.bool))) + self.assertEqual(components.max(), 2, + 'disconnect (%d, %d)' % (i, j)) + bins = np.bincount(components.ravel()) + # Nonzero number of pixels labeled 0, 1, or 2. + self.assertGreater(bins[0], 0) + self.assertGreater(bins[1], 0) + self.assertGreater(bins[2], 0) + + def testMultipleImages(self): + images = [[[1, 1, 1, 1], [1, 0, 0, 1], [1, 0, 0, 1], [1, 1, 1, 1]], + [[1, 0, 0, 1], [0, 0, 0, 0], [0, 0, 0, 0], [1, 0, 0, 1]], + [[1, 1, 0, 1], [0, 1, 1, 0], [1, 0, 1, 0], + [0, 0, 1, 1]]] # pylint: disable + expected = [[[1, 1, 1, 1], [1, 0, 0, 1], [1, 0, 0, 1], [1, 1, 1, 1]], + [[2, 0, 0, 3], [0, 0, 0, 0], [0, 0, 0, 0], [4, 0, 0, 5]], + [[6, 6, 0, 7], [0, 6, 6, 0], [8, 0, 6, 0], + [0, 0, 6, 6]]] # pylint: disable + + self.assertAllEqual( + self.evaluate(connected_components(tf.cast(images, tf.bool))), + expected) + + def testZeros(self): + + self.assertAllEqual( + connected_components( + self.evaluate(tf.zeros((100, 20, 50), tf.bool))), + np.zeros((100, 20, 50))) + + def testOnes(self): + + self.assertAllEqual( + self.evaluate( + connected_components(tf.ones((100, 20, 50), tf.bool))), + np.tile(np.arange(100)[:, None, None] + 1, [1, 20, 50])) + + def testOnes_small(self): + + self.assertAllEqual( + self.evaluate(connected_components(tf.ones((3, 5), tf.bool))), + np.ones((3, 5))) + + def testRandom_scipy(self): + np.random.seed(42) + images = np.random.randint(0, 2, size=(10, 100, 200)).astype(np.bool) + expected = connected_components_reference_implementation(images) + if expected is None: + return + + self.assertAllEqual( + self.evaluate(connected_components(images)), expected) + + +def connected_components_reference_implementation(images): + try: + # pylint disable=g-import-not-at-top + from scipy.ndimage import measurements + except ImportError: + logging.exception( + 'Skipping test method because scipy could not be loaded') + return + image_or_images = np.asarray(images) + if len(image_or_images.shape) == 2: + images = image_or_images[None, :, :] + elif len(image_or_images.shape) == 3: + images = image_or_images + components = np.asarray([measurements.label(image)[0] for image in images]) + # Get the count of nonzero ids for each image, and offset each image's nonzero + # ids using the cumulative sum. + num_ids_per_image = components.reshape( + [-1, components.shape[1] * components.shape[2]]).max(axis=-1) + positive_id_start_per_image = np.cumsum(num_ids_per_image) + for i in range(components.shape[0]): + new_id_start = positive_id_start_per_image[i - 1] if i > 0 else 0 + components[i, components[i] > 0] += new_id_start + if len(image_or_images.shape) == 2: + return components[0, :, :] + else: + return components + + +if __name__ == '__main__': + tf.test.main() diff --git a/tensorflow_addons/image/interpolate_spline_test.py b/tensorflow_addons/image/interpolate_spline_test.py index 106edf8beb..33dcf0fc26 100644 --- a/tensorflow_addons/image/interpolate_spline_test.py +++ b/tensorflow_addons/image/interpolate_spline_test.py @@ -21,9 +21,8 @@ from scipy import interpolate as sc_interpolate import tensorflow as tf -import tensorflow.compat.v1 as tf1 # TODO: locate placeholder -from tensorflow_addons.utils import test_utils from tensorflow_addons.image import interpolate_spline +from tensorflow_addons.utils import test_utils class _InterpolationProblem(object): @@ -34,14 +33,14 @@ def get_problem(self, optimizable=False, extrapolate=True, """Make data for an interpolation problem where all x vectors are n-d. Args: - optimizable: If True, then make train_points a tf.Variable. - extrapolate: If False, then clamp the query_points values to be within - the max and min of train_points. - dtype: The data type to use. + optimizable: If True, then make train_points a tf.Variable. + extrapolate: If False, then clamp the query_points values to be + within the max and min of train_points. + dtype: The data type to use. Returns: - query_points, query_values, train_points, train_values: training and - test tensors for interpolation problem + query_points, query_values, train_points, train_values: training + and test tensors for interpolation problem. """ # The values generated here depend on a seed of 0. @@ -57,7 +56,7 @@ def get_problem(self, optimizable=False, extrapolate=True, init_points = init_points.astype(dtype) train_points = (tf.Variable(init_points) if optimizable else tf.constant(init_points)) - train_values = self.tf_function(train_points) + train_values = self.test_function(train_points) query_points_np = np.random.uniform( size=[batch_size, num_query_points, self.DATA_DIM]) @@ -67,7 +66,7 @@ def get_problem(self, optimizable=False, extrapolate=True, np.max(init_points)) query_points = tf.constant(query_points_np) - query_values = self.np_function(query_points_np) + query_values = self.test_function(query_points_np) return query_points, query_values, train_points, train_values @@ -99,16 +98,8 @@ class _QuadraticPlusSinProblem1D(_InterpolationProblem): ] } - def np_function(self, x): - """Takes np array, evaluates the test function, and returns np - array.""" - return np.sum( - np.power((x - 0.5), 3) - 0.25 * x + 10 * np.sin(x * 10), - axis=2, - keepdims=True) - - def tf_function(self, x): - """Takes tf tensor, evaluates the test function, and returns tf + def test_function(self, x): + """Takes a tensor, evaluates the test function, and returns a tensor.""" return tf.reduce_mean( tf.pow((x - 0.5), 3) - 0.25 * x + 10 * tf.sin(x * 10), @@ -144,16 +135,8 @@ class _QuadraticPlusSinProblemND(_InterpolationProblem): ], } - def np_function(self, x): - """Takes np array, evaluates the test function, and returns np - array.""" - return np.sum( - np.square(x - 0.5) + 0.25 * x + 1 * np.sin(x * 15), - axis=2, - keepdims=True) - - def tf_function(self, x): - """Takes tf tensor, evaluates the test function, and returns tf + def test_function(self, x): + """Takes a tensor, evaluates the test function, and returns a tensor.""" return tf.reduce_sum( tf.square(x - 0.5) + 0.25 * x + 1 * tf.sin(x * 15), @@ -161,6 +144,7 @@ def tf_function(self, x): keepdims=True) +@test_utils.run_all_in_graph_and_eager_modes class InterpolateSplineTest(tf.test.TestCase): def test_1d_linear_interpolation(self): """For 1d linear interpolation, we can compare directly to scipy.""" @@ -171,41 +155,35 @@ def test_1d_linear_interpolation(self): interpolation_order = 1 with tf.name_scope('interpolator'): - interpolator = interpolate_spline( - train_points, train_values, query_points, interpolation_order) - with self.cached_session() as sess: - fetches = [ - query_points, train_points, train_values, interpolator - ] - query_points_, train_points_, train_values_, interp_ = sess.run( # pylint: disable=C0301 - fetches) - - # Just look at the first element of the minibatch. - # Also, trim the final singleton dimension. - interp_ = interp_[0, :, 0] - query_points_ = query_points_[0, :, 0] - train_points_ = train_points_[0, :, 0] - train_values_ = train_values_[0, :, 0] - - # Compute scipy interpolation. - scipy_interp_function = sc_interpolate.interp1d( - train_points_, train_values_, kind='linear') - - scipy_interpolation = scipy_interp_function(query_points_) - scipy_interpolation_on_train = scipy_interp_function( - train_points_) - - # Even with float64 precision, the interpolants disagree with scipy a - # bit due to the fact that we add the EPSILON to prevent sqrt(0), etc. - tol = 1e-3 - - self.assertAllClose( - train_values_, - scipy_interpolation_on_train, - atol=tol, - rtol=tol) - self.assertAllClose( - interp_, scipy_interpolation, atol=tol, rtol=tol) + interp = self.evaluate( + interpolate_spline(train_points, train_values, query_points, + interpolation_order)) + + query_points, train_points, train_values, = self.evaluate( + [query_points, train_points, train_values]) + + # Just look at the first element of the minibatch. + # Also, trim the final singleton dimension. + interp = interp[0, :, 0] + query_points = query_points[0, :, 0] + train_points = train_points[0, :, 0] + train_values = train_values[0, :, 0] + + # Compute scipy interpolation. + scipy_interp_function = sc_interpolate.interp1d( + train_points, train_values, kind='linear') + + scipy_interpolation = scipy_interp_function(query_points) + scipy_interpolation_on_train = scipy_interp_function(train_points) + + # Even with float64 precision, the interpolants disagree with scipy a + # bit due to the fact that we add the EPSILON to prevent sqrt(0), etc. + tol = 1e-3 + + self.assertAllClose( + train_values, scipy_interpolation_on_train, atol=tol, rtol=tol) + self.assertAllClose( + interp, scipy_interpolation, atol=tol, rtol=tol) def test_1d_interpolation(self): """Regression test for interpolation with 1-D points.""" @@ -216,17 +194,15 @@ def test_1d_interpolation(self): for order in (1, 2, 3): for reg_weight in (0, 0.01): - interpolator = interpolate_spline(train_points, train_values, - query_points, order, - reg_weight) + interp = self.evaluate( + interpolate_spline(train_points, train_values, + query_points, order, reg_weight)) target_interpolation = tp.HARDCODED_QUERY_VALUES[(order, reg_weight)] target_interpolation = np.array(target_interpolation) - with self.cached_session() as sess: - interp_val = sess.run(interpolator) - self.assertAllClose(interp_val[0, :, 0], - target_interpolation) + + self.assertAllClose(interp[0, :, 0], target_interpolation) def test_nd_linear_interpolation(self): """Regression test for interpolation with N-D points.""" @@ -237,19 +213,16 @@ def test_nd_linear_interpolation(self): for order in (1, 2, 3): for reg_weight in (0, 0.01): - interpolator = interpolate_spline(train_points, train_values, - query_points, order, - reg_weight) + interp = self.evaluate( + interpolate_spline(train_points, train_values, + query_points, order, reg_weight)) target_interpolation = tp.HARDCODED_QUERY_VALUES[(order, reg_weight)] target_interpolation = np.array(target_interpolation) - with self.cached_session() as sess: - interp_val = sess.run(interpolator) - self.assertAllClose(interp_val[0, :, 0], - target_interpolation) - @test_utils.run_deprecated_v1 + self.assertAllClose(interp[0, :, 0], target_interpolation) + def test_nd_linear_interpolation_unspecified_shape(self): """Ensure that interpolation supports dynamic batch_size and num_points.""" @@ -257,72 +230,64 @@ def test_nd_linear_interpolation_unspecified_shape(self): (query_points, _, train_points, train_values) = tp.get_problem(dtype='float64') - # Construct placeholders such that the batch size, number of train points, - # and number of query points are not known at graph construction time. feature_dim = query_points.shape[-1] value_dim = train_values.shape[-1] - train_points_ph = tf1.placeholder( - dtype=train_points.dtype, shape=[None, None, feature_dim]) - train_values_ph = tf1.placeholder( - dtype=train_values.dtype, shape=[None, None, value_dim]) - query_points_ph = tf1.placeholder( - dtype=query_points.dtype, shape=[None, None, feature_dim]) order = 1 reg_weight = 0.01 - interpolator = interpolate_spline(train_points_ph, train_values_ph, - query_points_ph, order, reg_weight) + # Get concrete functions such that the batch size, number of train points, + # and number of query points are not known at graph construction time. + fn = tf.function(interpolate_spline).get_concrete_function( + tf.TensorSpec( + shape=[None, None, feature_dim], dtype=train_points.dtype), + tf.TensorSpec( + shape=[None, None, value_dim], dtype=train_values.dtype), + tf.TensorSpec( + shape=[None, None, feature_dim], dtype=query_points.dtype), + order, reg_weight) target_interpolation = tp.HARDCODED_QUERY_VALUES[(order, reg_weight)] target_interpolation = np.array(target_interpolation) - with self.cached_session() as sess: - (train_points_value, train_values_value, - query_points_value) = sess.run( - [train_points, train_values, query_points]) + interp_val = self.evaluate( + fn(train_points, train_values, query_points)) - interp_val = sess.run( - interpolator, - feed_dict={ - train_points_ph: train_points_value, - train_values_ph: train_values_value, - query_points_ph: query_points_value - }) - self.assertAllClose(interp_val[0, :, 0], target_interpolation) + self.assertAllClose(interp_val[0, :, 0], target_interpolation) def test_fully_unspecified_shape(self): """Ensure that erreor is thrown when input/output dim unspecified.""" - self.skipTest("TODO: port to tf2.0 / eager") tp = _QuadraticPlusSinProblemND() (query_points, _, train_points, train_values) = tp.get_problem(dtype='float64') - # Construct placeholders such that the batch size, number of train points, - # and number of query points are not known at graph construction time. feature_dim = query_points.shape[-1] value_dim = train_values.shape[-1] - train_points_ph = tf1.placeholder( - dtype=train_points.dtype, shape=[None, None, feature_dim]) - train_points_ph_invalid = tf1.placeholder( - dtype=train_points.dtype, shape=[None, None, None]) - train_values_ph = tf1.placeholder( - dtype=train_values.dtype, shape=[None, None, value_dim]) - train_values_ph_invalid = tf1.placeholder( - dtype=train_values.dtype, shape=[None, None, None]) - query_points_ph = tf1.placeholder( - dtype=query_points.dtype, shape=[None, None, feature_dim]) order = 1 reg_weight = 0.01 + # Get concrete functions such that the batch size, number of train points, + # and number of query points are not known at graph construction time. with self.assertRaises(ValueError): - _ = interpolate_spline(train_points_ph_invalid, train_values_ph, - query_points_ph, order, reg_weight) + fn = tf.function(interpolate_spline).get_concrete_function( + tf.TensorSpec( + shape=[None, None, None], dtype=train_points.dtype), + tf.TensorSpec( + shape=[None, None, value_dim], dtype=train_values.dtype), + tf.TensorSpec( + shape=[None, None, feature_dim], dtype=query_points.dtype), + order, reg_weight) with self.assertRaises(ValueError): - _ = interpolate_spline(train_points_ph, train_values_ph_invalid, - query_points_ph, order, reg_weight) + fn = tf.function(interpolate_spline).get_concrete_function( + tf.TensorSpec( + shape=[None, None, feature_dim], dtype=train_points.dtype), + tf.TensorSpec( + shape=[None, None, None], dtype=train_values.dtype), + tf.TensorSpec( + shape=[None, None, feature_dim], dtype=query_points.dtype), + order, reg_weight) def test_interpolation_gradient(self): """Make sure that backprop can run. Correctness of gradients is @@ -342,22 +307,21 @@ def test_interpolation_gradient(self): regularization = 0.001 for interpolation_order in (1, 2, 3, 4): - optimizer = tf1.train.MomentumOptimizer(0.001, 0.9) - - @tf.function - def train_step(): - with tf.GradientTape() as gt: - interpolator = interpolate_spline( - train_points, train_values, query_points, - interpolation_order, regularization) - loss = tf.reduce_mean( - tf.square(query_values - interpolator)) - grad = gt.gradient(loss, [train_points]) - grad, _ = tf.clip_by_global_norm(grad, 1.0) - opt_func = optimizer.apply_gradients(zip(grad, [train_points])) - - for epoch in range(100): - train_step() + + def loss_fn(): + interpolator = interpolate_spline( + train_points, train_values, query_points, + interpolation_order, regularization) + loss = tf.reduce_mean(tf.square(query_values - interpolator)) + return loss + + optimizer = tf.keras.optimizers.SGD( + learning_rate=0.001, momentum=0.9, clipnorm=1.0) + opt_op = optimizer.minimize(loss_fn, [train_points]) + + self.evaluate(tf.compat.v1.global_variables_initializer()) + for _ in range(100): + self.evaluate(opt_op) if __name__ == '__main__': diff --git a/tensorflow_addons/image/sparse_image_warp_test.py b/tensorflow_addons/image/sparse_image_warp_test.py index 4c2659c7d5..7fdf2aad6e 100644 --- a/tensorflow_addons/image/sparse_image_warp_test.py +++ b/tensorflow_addons/image/sparse_image_warp_test.py @@ -19,13 +19,14 @@ import numpy as np import tensorflow as tf -import tensorflow.compat.v1 as tf1 # TODO: port TF1 test files? +from tensorflow_addons.image import sparse_image_warp from tensorflow_addons.image.sparse_image_warp import _get_boundary_locations from tensorflow_addons.image.sparse_image_warp import _get_grid_locations -from tensorflow_addons.image import sparse_image_warp +from tensorflow_addons.utils import test_utils from tensorflow_addons.utils.resource_loader import get_path_to_datafile +@test_utils.run_all_in_graph_and_eager_modes class SparseImageWarpTest(tf.test.TestCase): def setUp(self): np.random.seed(0) @@ -82,7 +83,7 @@ def assertZeroShift(self, order, regularization, num_boundary_points): image = np.random.uniform( size=[batch_size, image_height, image_width, channels]) - input_image_op = tf.constant(np.float32(image)) + input_image = tf.constant(np.float32(image)) control_point_locations = [[1., 1.], [2., 2.], [2., 1.]] control_point_locations = tf.constant( @@ -93,19 +94,16 @@ def assertZeroShift(self, order, regularization, num_boundary_points): control_point_displacements = tf.constant( np.float32(control_point_displacements)) - (warped_image_op, flow_field) = sparse_image_warp( - input_image_op, + (warped_image, flow) = sparse_image_warp( + input_image, control_point_locations, control_point_locations + control_point_displacements, interpolation_order=order, regularization_weight=regularization, num_boundary_points=num_boundary_points) - with self.cached_session() as sess: - warped_image, input_image, _ = sess.run( - [warped_image_op, input_image_op, flow_field]) - - self.assertAllClose(warped_image, input_image) + warped_image, input_image = self.evaluate([warped_image, input_image]) + self.assertAllClose(warped_image, input_image) def testMoveSinglePixel(self): """Run assertMoveSinglePixel for various hyperparameters and data @@ -125,7 +123,7 @@ def assertMoveSinglePixel(self, order, num_boundary_points, type_to_use): image = np.zeros([batch_size, image_height, image_width, channels]) image[:, 3, 3, :] = 1.0 - input_image_op = tf.constant(image, dtype=type_to_use) + input_image = tf.constant(image, dtype=type_to_use) # Place a control point at the one white pixel. control_point_locations = [[3., 3.]] @@ -138,116 +136,110 @@ def assertMoveSinglePixel(self, order, num_boundary_points, type_to_use): np.float32(np.expand_dims(control_point_displacements, 0)), dtype=type_to_use) - (warped_image_op, flow_field) = sparse_image_warp( - input_image_op, + (warped_image, flow) = sparse_image_warp( + input_image, control_point_locations, control_point_locations + control_point_displacements, interpolation_order=order, num_boundary_points=num_boundary_points) - with self.cached_session() as sess: - warped_image, input_image, flow = sess.run( - [warped_image_op, input_image_op, flow_field]) - # Check that it moved the pixel correctly. - self.assertAllClose( - warped_image[0, 4, 5, :], - input_image[0, 4, 4, :], - atol=1e-5, - rtol=1e-5) - - # Test that there is no flow at the corners. - for i in (0, image_height - 1): - for j in (0, image_width - 1): - self.assertAllClose( - flow[0, i, j, :], np.zeros([2]), atol=1e-5, rtol=1e-5) + warped_image, input_image, flow = self.evaluate( + [warped_image, input_image, flow]) + # Check that it moved the pixel correctly. + self.assertAllClose( + warped_image[0, 4, 5, :], + input_image[0, 4, 4, :], + atol=1e-5, + rtol=1e-5) + + # Test that there is no flow at the corners. + for i in (0, image_height - 1): + for j in (0, image_width - 1): + self.assertAllClose( + flow[0, i, j, :], np.zeros([2]), atol=1e-5, rtol=1e-5) - def load_image(self, image_file, sess): - image_op = tf.image.decode_png( + def load_image(self, image_file): + image = tf.image.decode_png( tf.io.read_file(image_file), dtype=tf.dtypes.uint8, channels=4)[:, :, 0:3] - return sess.run(image_op) + return self.evaluate(image) def testSmileyFace(self): """Check warping accuracy by comparing to hardcoded warped images.""" input_file = get_path_to_datafile( "image/test_data/Yellow_Smiley_Face.png") - with self.cached_session() as sess: - input_image = self.load_image(input_file, sess) + input_image = self.load_image(input_file) control_points = np.asarray([[64, 59], [180 - 64, 59], [39, 111], [180 - 39, 111], [90, 143], [58, 134], [180 - 58, 134]]) # pyformat: disable control_point_displacements = np.asarray([[-10.5, 10.5], [10.5, 10.5], [0, 0], [0, 0], [0, -10], [-20, 10.25], [10, 10.75]]) - control_points_op = tf.constant( + control_points = tf.constant( np.expand_dims(np.float32(control_points[:, [1, 0]]), 0)) - control_point_displacements_op = tf.constant( + control_point_displacements = tf.constant( np.expand_dims( np.float32(control_point_displacements[:, [1, 0]]), 0)) float_image = np.expand_dims(np.float32(input_image) / 255, 0) - input_image_op = tf.constant(float_image) + input_image = tf.constant(float_image) for interpolation_order in (1, 2, 3): for num_boundary_points in (0, 1, 4): - warp_op, _ = sparse_image_warp( - input_image_op, - control_points_op, - control_points_op + control_point_displacements_op, + warped_image, _ = sparse_image_warp( + input_image, + control_points, + control_points + control_point_displacements, interpolation_order=interpolation_order, num_boundary_points=num_boundary_points) - with self.cached_session() as sess: - warped_image = sess.run(warp_op) - out_image = np.uint8(warped_image[0, :, :, :] * 255) - target_file = get_path_to_datafile( - "image/test_data/Yellow_Smiley_Face_Warp-interp" + - "-{}-clamp-{}.png".format(interpolation_order, - num_boundary_points)) - - target_image = self.load_image(target_file, sess) - - # Check that the target_image and out_image difference is no - # bigger than 2 (on a scale of 0-255). Due to differences in - # floating point computation on different devices, the float - # output in warped_image may get rounded to a different int - # than that in the saved png file loaded into target_image. - self.assertAllClose( - target_image, out_image, atol=2, rtol=1e-3) + + warped_image = self.evaluate(warped_image) + out_image = np.uint8(warped_image[0, :, :, :] * 255) + target_file = get_path_to_datafile( + "image/test_data/Yellow_Smiley_Face_Warp-interp" + + "-{}-clamp-{}.png".format(interpolation_order, + num_boundary_points)) + + target_image = self.load_image(target_file) + + # Check that the target_image and out_image difference is no + # bigger than 2 (on a scale of 0-255). Due to differences in + # floating point computation on different devices, the float + # output in warped_image may get rounded to a different int + # than that in the saved png file loaded into target_image. + self.assertAllClose(target_image, out_image, atol=2, rtol=1e-3) def testThatBackpropRuns(self): """Run optimization to ensure that gradients can be computed.""" - self.skipTest("TODO: port to tf2.0 / eager") batch_size = 1 image_height = 9 image_width = 12 image = tf.Variable( - np.float32( - np.random.uniform( - size=[batch_size, image_height, image_width, 3]))) + np.random.uniform(size=[batch_size, image_height, image_width, 3]), + dtype=tf.float32) control_point_locations = [[3., 3.]] control_point_locations = tf.constant( np.float32(np.expand_dims(control_point_locations, 0))) control_point_displacements = [[0.25, -0.5]] control_point_displacements = tf.constant( np.float32(np.expand_dims(control_point_displacements, 0))) - warped_image, _ = sparse_image_warp( - image, - control_point_locations, - control_point_locations + control_point_displacements, - num_boundary_points=3) - - loss = tf.reduce_mean(tf.abs(warped_image - image)) - optimizer = tf1.train.MomentumOptimizer(0.001, 0.9) - grad = tf.gradients(loss, [image]) - grad, _ = tf.clip_by_global_norm(grad, 1.0) - opt_func = optimizer.apply_gradients(zip(grad, [image])) - init_op = tf1.variables.global_variables_initializer( - ) # TODO: fix TF1 ref. - - with self.cached_session() as sess: - sess.run(init_op) - for _ in range(5): - sess.run([loss, opt_func]) + + def loss_fn(): + warped_image, _ = sparse_image_warp( + image, + control_point_locations, + control_point_locations + control_point_displacements, + num_boundary_points=3) + loss = tf.reduce_mean(tf.abs(warped_image - image)) + return loss + + optimizer = tf.keras.optimizers.SGD( + learning_rate=0.001, momentum=0.9, clipnorm=1.0) + opt_op = optimizer.minimize(loss_fn, [image]) + + self.evaluate(tf.compat.v1.global_variables_initializer()) + for _ in range(5): + self.evaluate(opt_op) if __name__ == "__main__": diff --git a/tensorflow_addons/layers/BUILD b/tensorflow_addons/layers/BUILD index 59aeb562b5..01f475a2fb 100644 --- a/tensorflow_addons/layers/BUILD +++ b/tensorflow_addons/layers/BUILD @@ -6,6 +6,7 @@ py_library( name = "layers", srcs = [ "__init__.py", + "gelu.py", "maxout.py", "normalizations.py", "optical_flow.py", @@ -23,6 +24,19 @@ py_library( ], ) +py_test( + name = "gelu_test", + size = "small", + srcs = [ + "gelu_test.py", + ], + main = "gelu_test.py", + srcs_version = "PY2AND3", + deps = [ + ":layers", + ], +) + py_test( name = "layers_wrappers_test", size = "small", diff --git a/tensorflow_addons/layers/README.md b/tensorflow_addons/layers/README.md index 4e4e4b48dc..ab28337966 100644 --- a/tensorflow_addons/layers/README.md +++ b/tensorflow_addons/layers/README.md @@ -3,6 +3,7 @@ ## Maintainers | Submodule | Maintainers | Contact Info | |:---------- |:----------- |:------------- | +| gelu | @AakashKumarNain | aakashnain@outlook.com | | maxout | | | | normalizations | @smokrow | moritz.kroeger@tu-dortmund.de | | opticalflow | | | @@ -13,6 +14,7 @@ ## Components | Submodule | Layer | Reference | |:---------- |:----------- |:------------- | +| gelu | GeLU | https://arxiv.org/abs/1606.08415 | | maxout | Maxout | https://arxiv.org/abs/1302.4389 | | normalizations | GroupNormalization | https://arxiv.org/abs/1803.08494 | | normalizations | InstanceNormalization | https://arxiv.org/abs/1607.08022 | diff --git a/tensorflow_addons/layers/__init__.py b/tensorflow_addons/layers/__init__.py index 382f2aa80e..d527e16362 100644 --- a/tensorflow_addons/layers/__init__.py +++ b/tensorflow_addons/layers/__init__.py @@ -18,10 +18,11 @@ from __future__ import division from __future__ import print_function +from tensorflow_addons.layers.gelu import GeLU from tensorflow_addons.layers.maxout import Maxout from tensorflow_addons.layers.normalizations import GroupNormalization from tensorflow_addons.layers.normalizations import InstanceNormalization from tensorflow_addons.layers.optical_flow import CorrelationCost from tensorflow_addons.layers.poincare import PoincareNormalize from tensorflow_addons.layers.sparsemax import Sparsemax -from tensorflow_addons.layers.wrappers import WeightNormalization +from tensorflow_addons.layers.wrappers import WeightNormalization \ No newline at end of file diff --git a/tensorflow_addons/layers/gelu.py b/tensorflow_addons/layers/gelu.py new file mode 100644 index 0000000000..159e00f729 --- /dev/null +++ b/tensorflow_addons/layers/gelu.py @@ -0,0 +1,57 @@ +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Implements GeLU activation.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import tensorflow as tf +from tensorflow_addons.utils import keras_utils +from tensorflow_addons.activations import gelu + + +@keras_utils.register_keras_custom_object +class GeLU(tf.keras.layers.Layer): + """Gaussian Error Linear Unit. + + A smoother version of ReLU generally used + in the BERT or BERT architecture based models. + Original paper: https://arxiv.org/abs/1606.08415 + + Input shape: + Arbitrary. Use the keyword argument `input_shape` + (tuple of integers, does not include the samples axis) + when using this layer as the first layer in a model. + + Output shape: + Same shape as the input. + """ + + def __init__(self, approximate=True, **kwargs): + super(GeLU, self).__init__(**kwargs) + self.approximate = approximate + self.supports_masking = True + + def call(self, inputs): + return gelu(inputs, approximate=self.approximate) + + def get_config(self): + config = {'approximate': self.approximate} + base_config = super(GeLU, self).get_config() + return dict(list(base_config.items()) + list(config.items())) + + def compute_output_shape(self, input_shape): + return input_shape diff --git a/tensorflow_addons/layers/gelu_test.py b/tensorflow_addons/layers/gelu_test.py new file mode 100644 index 0000000000..99331fb44e --- /dev/null +++ b/tensorflow_addons/layers/gelu_test.py @@ -0,0 +1,39 @@ +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for GeLU activation.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import numpy as np +import tensorflow as tf +from absl.testing import parameterized +from tensorflow_addons.layers.gelu import GeLU +from tensorflow_addons.utils import test_utils + + +@parameterized.parameters([np.float16, np.float32, np.float64]) +@test_utils.run_all_in_graph_and_eager_modes +class TestGeLU(tf.test.TestCase): + def test_random(self, dtype): + x = np.array([[0.5, 1.2, -0.3]]).astype(dtype) + val = np.array([[0.345714, 1.0617027, -0.11462909]]).astype(dtype) + test_utils.layer_test( + GeLU, kwargs={'dtype': dtype}, input_data=x, expected_output=val) + + +if __name__ == '__main__': + tf.test.main() diff --git a/tensorflow_addons/layers/optical_flow_test.py b/tensorflow_addons/layers/optical_flow_test.py index 060f572c5f..7dedd49f50 100644 --- a/tensorflow_addons/layers/optical_flow_test.py +++ b/tensorflow_addons/layers/optical_flow_test.py @@ -159,7 +159,7 @@ def _keras(self, data_format): x = [input_a, input_b] y = layer(x) - model = tf.python.keras.models.Model(x, y) + model = tf.keras.models.Model(x, y) actual_output = model.predict([val_a, val_b]) expected_output_type = 'float32' diff --git a/tensorflow_addons/layers/sparsemax_test.py b/tensorflow_addons/layers/sparsemax_test.py index 4f7367b430..7c60fab575 100644 --- a/tensorflow_addons/layers/sparsemax_test.py +++ b/tensorflow_addons/layers/sparsemax_test.py @@ -20,6 +20,7 @@ import numpy as np import tensorflow as tf +from absl.testing import parameterized from tensorflow_addons.layers import Sparsemax from tensorflow_addons.utils import test_utils @@ -50,10 +51,10 @@ def _np_sparsemax(z): return np.maximum(0, z - tau_z) -@test_utils.run_all_with_types(['float32', 'float64']) +@parameterized.parameters([np.float32, np.float64]) @test_utils.run_all_in_graph_and_eager_modes class SparsemaxTest(tf.test.TestCase): - def test_sparsemax_layer_against_numpy(self, dtype=None): + def test_sparsemax_layer_against_numpy(self, dtype): """check sparsemax kernel against numpy.""" random = np.random.RandomState(1) @@ -61,6 +62,7 @@ def test_sparsemax_layer_against_numpy(self, dtype=None): test_utils.layer_test( Sparsemax, + kwargs={'dtype': dtype}, input_data=z, expected_output=_np_sparsemax(z).astype(dtype)) diff --git a/tensorflow_addons/layers/wrappers.py b/tensorflow_addons/layers/wrappers.py index 084532e542..a5df48664e 100644 --- a/tensorflow_addons/layers/wrappers.py +++ b/tensorflow_addons/layers/wrappers.py @@ -58,7 +58,6 @@ class WeightNormalization(tf.keras.layers.Wrapper): def __init__(self, layer, data_init=True, **kwargs): super(WeightNormalization, self).__init__(layer, **kwargs) self.data_init = data_init - self._initialized = False self._track_trackable(layer, name='layer') def build(self, input_shape): @@ -69,48 +68,67 @@ def build(self, input_shape): if not self.layer.built: self.layer.build(input_shape) - if not hasattr(self.layer, 'kernel'): - raise ValueError('`WeightNormalization` must wrap a layer that' - ' contains a `kernel` for weights') + if not hasattr(self.layer, 'kernel'): + raise ValueError('`WeightNormalization` must wrap a layer that' + ' contains a `kernel` for weights') + + # The kernel's filter or unit dimension is -1 + self.layer_depth = int(self.layer.kernel.shape[-1]) + self.kernel_norm_axes = list(range(self.layer.kernel.shape.rank - 1)) + + self.g = self.add_variable( + name='g', + shape=(self.layer_depth,), + initializer='ones', + dtype=self.layer.kernel.dtype, + trainable=True) + self.v = self.layer.kernel + + self._initialized = self.add_variable( + name='initialized', + shape=None, + initializer='zeros', + dtype=tf.dtypes.bool, + trainable=False) - # The kernel's filter or unit dimension is -1 - self.layer_depth = int(self.layer.kernel.shape[-1]) - self.kernel_norm_axes = list( - range(self.layer.kernel.shape.rank - 1)) - - self.v = self.layer.kernel - self.g = self.add_variable( - name="g", - shape=(self.layer_depth,), - initializer=tf.keras.initializers.get('ones'), - dtype=self.layer.kernel.dtype, - trainable=True) + if self.data_init: + # Used for data initialization in self._data_dep_init. + layer_config = tf.keras.layers.serialize(self.layer) + layer_config['config']['trainable'] = False + self._naked_clone_layer = tf.keras.layers.deserialize(layer_config) + self._naked_clone_layer.build(input_shape) + self._naked_clone_layer.set_weights(self.layer.get_weights()) + self._naked_clone_layer.activation = None - super(WeightNormalization, self).build() + self.built = True - @tf.function def call(self, inputs): """Call `Layer`""" - if not self._initialized: - self._initialize_weights(inputs) - self._compute_weights() # Recompute weights for each forward pass - output = self.layer(inputs) - return output + def _do_nothing(): + return tf.identity(self.g) - def compute_output_shape(self, input_shape): - return tf.TensorShape( - self.layer.compute_output_shape(input_shape).as_list()) + def _update_weights(): + # Ensure we read `self.g` after _update_weights. + with tf.control_dependencies(self._initialize_weights(inputs)): + return tf.identity(self.g) - def _compute_weights(self): - """Generate normalized weights. + g = tf.cond(self._initialized, _do_nothing, _update_weights) - This method will update the value of self.layer.kernel with the - normalized value, so that the layer is ready for call(). - """ with tf.name_scope('compute_weights'): + # Replace kernel by normalized weight variable. self.layer.kernel = tf.nn.l2_normalize( - self.v, axis=self.kernel_norm_axes) * self.g + self.v, axis=self.kernel_norm_axes) * g + + # Ensure we calculate result after updating kernel. + update_kernel = tf.identity(self.layer.kernel) + with tf.control_dependencies([update_kernel]): + outputs = self.layer(inputs) + return outputs + + def compute_output_shape(self, input_shape): + return tf.TensorShape( + self.layer.compute_output_shape(input_shape).as_list()) def _initialize_weights(self, inputs): """Initialize weight g. @@ -118,36 +136,43 @@ def _initialize_weights(self, inputs): The initial value of g could either from the initial value in v, or by the input value if self.data_init is True. """ - if self.data_init: - self._data_dep_init(inputs) - else: - self._init_norm() - self._initialized = True + with tf.control_dependencies([ + tf.debugging.assert_equal( # pylint: disable=bad-continuation + self._initialized, + False, + message='The layer has been initialized.') + ]): + if self.data_init: + assign_tensors = self._data_dep_init(inputs) + else: + assign_tensors = self._init_norm() + assign_tensors.append(self._initialized.assign(True)) + return assign_tensors def _init_norm(self): """Set the weight g with the norm of the weight vector.""" with tf.name_scope('init_norm'): - flat = tf.reshape(self.v, [-1, self.layer_depth]) - self.g.assign( - tf.reshape(tf.linalg.norm(flat, axis=0), (self.layer_depth,))) + v_flat = tf.reshape(self.v, [-1, self.layer_depth]) + v_norm = tf.linalg.norm(v_flat, axis=0) + g_tensor = self.g.assign(tf.reshape(v_norm, (self.layer_depth,))) + return [g_tensor] def _data_dep_init(self, inputs): """Data dependent initialization.""" - with tf.name_scope('data_dep_init'): # Generate data dependent init values - existing_activation = self.layer.activation - self.layer.activation = None - x_init = self.layer(inputs) + x_init = self._naked_clone_layer(inputs) data_norm_axes = list(range(x_init.shape.rank - 1)) m_init, v_init = tf.nn.moments(x_init, data_norm_axes) scale_init = 1. / tf.math.sqrt(v_init + 1e-10) - # Assign data dependent init values - self.g = self.g * scale_init - if hasattr(self.layer, 'bias'): - self.layer.bias = -m_init * scale_init - self.layer.activation = existing_activation + # Assign data dependent init values + g_tensor = self.g.assign(self.g * scale_init) + if hasattr(self.layer, 'bias'): + bias_tensor = self.layer.bias.assign(-m_init * scale_init) + return [g_tensor, bias_tensor] + else: + return [g_tensor] def get_config(self): config = {'data_init': self.data_init} diff --git a/tensorflow_addons/layers/wrappers_test.py b/tensorflow_addons/layers/wrappers_test.py index 9d83bbec50..b4bdb9c494 100644 --- a/tensorflow_addons/layers/wrappers_test.py +++ b/tensorflow_addons/layers/wrappers_test.py @@ -26,82 +26,52 @@ @test_utils.run_all_in_graph_and_eager_modes class WeightNormalizationTest(tf.test.TestCase): - def test_weightnorm_dense_train(self): - model = tf.keras.models.Sequential() - model.add( - wrappers.WeightNormalization( - tf.keras.layers.Dense(2), input_shape=(3, 4))) - model.compile( - optimizer=tf.keras.optimizers.RMSprop(learning_rate=0.001), - loss='mse') - model.fit( - np.random.random((10, 3, 4)), - np.random.random((10, 3, 2)), - epochs=3, - batch_size=10) - self.assertTrue(hasattr(model.layers[0], 'g')) - - def test_weightnorm_dense_train_notinit(self): - model = tf.keras.models.Sequential() - model.add( - wrappers.WeightNormalization( - tf.keras.layers.Dense(2), input_shape=(3, 4), data_init=False)) - - model.compile( - optimizer=tf.keras.optimizers.RMSprop(learning_rate=0.001), - loss='mse') - model.fit( - np.random.random((10, 3, 4)), - np.random.random((10, 3, 2)), - epochs=3, - batch_size=10) - self.assertTrue(hasattr(model.layers[0], 'g')) - - def test_weightnorm_conv2d(self): - model = tf.keras.models.Sequential() - model.add( - wrappers.WeightNormalization( - tf.keras.layers.Conv2D(5, (2, 2), padding='same'), - input_shape=(4, 4, 3))) - - model.add(tf.keras.layers.Activation('relu')) - model.compile( - optimizer=tf.keras.optimizers.RMSprop(learning_rate=0.001), - loss='mse') - model.fit( - np.random.random((2, 4, 4, 3)), - np.random.random((2, 4, 4, 5)), - epochs=3, - batch_size=10) - - self.assertTrue(hasattr(model.layers[0], 'g')) - - def test_weightnorm_applylayer(self): - images = tf.random.uniform((2, 4, 4, 3)) - wn_wrapper = wrappers.WeightNormalization( - tf.keras.layers.Conv2D(32, [2, 2]), input_shape=(4, 4, 3)) - wn_wrapper.apply(images) - self.assertTrue(hasattr(wn_wrapper, 'g')) - - def test_weightnorm_nonlayer(self): - images = tf.random.uniform((2, 4, 43)) - with self.assertRaises(AssertionError): - wrappers.WeightNormalization(images) - - def test_weightnorm_nokernel(self): - with self.assertRaises(ValueError): - wrappers.WeightNormalization(tf.keras.layers.MaxPooling2D( - 2, 2)).build((2, 2)) - - def test_weightnorm_keras(self): - input_data = np.random.random((10, 3, 4)).astype(np.float32) + def test_weightnorm(self): + test_utils.layer_test( + wrappers.WeightNormalization, + kwargs={ + 'layer': tf.keras.layers.Conv2D(5, (2, 2)), + }, + input_shape=(2, 4, 4, 3)) + + def _check_data_init(self, data_init, input_data, expected_output): + layer = tf.keras.layers.Dense( + input_data.shape[-1], + activation=None, + kernel_initializer='identity', + bias_initializer='zeros') test_utils.layer_test( wrappers.WeightNormalization, kwargs={ - 'layer': tf.keras.layers.Dense(2), - 'input_shape': (3, 4) + 'layer': layer, + 'data_init': data_init, }, - input_data=input_data) + input_data=input_data, + expected_output=expected_output) + + def test_weightnorm_with_data_init_is_false(self): + input_data = np.array([[[-4, -4], [4, 4]]], dtype=np.float32) + self._check_data_init( + data_init=False, input_data=input_data, expected_output=input_data) + + def test_weightnorm_with_data_init_is_true(self): + input_data = np.array([[[-4, -4], [4, 4]]], dtype=np.float32) + self._check_data_init( + data_init=True, + input_data=input_data, + expected_output=input_data / 4) + + def test_weightnorm_non_layer(self): + images = tf.random.uniform((2, 4, 43)) + with self.assertRaises(AssertionError): + wrappers.WeightNormalization(images) + + def test_weightnorm_non_kernel_layer(self): + images = tf.random.uniform((2, 2, 2)) + with self.assertRaisesRegexp(ValueError, 'contains a `kernel`'): + non_kernel_layer = tf.keras.layers.MaxPooling2D(2, 2) + wn_wrapper = wrappers.WeightNormalization(non_kernel_layer) + wn_wrapper(images) if __name__ == "__main__": diff --git a/tensorflow_addons/losses/README.md b/tensorflow_addons/losses/README.md index e0951d41c1..c1ed11c35d 100644 --- a/tensorflow_addons/losses/README.md +++ b/tensorflow_addons/losses/README.md @@ -17,6 +17,7 @@ | focal_loss | SigmoidFocalCrossEntropy | https://arxiv.org/abs/1708.02002 | | lifted | LiftedStructLoss | https://arxiv.org/abs/1511.06452 | | npairs | NpairsLoss | http://www.nec-labs.com/uploads/images/Department-Images/MediaAnalytics/papers/nips16_npairmetriclearning.pdf | +| npairs | NpairsMultilabelLoss | http://www.nec-labs.com/uploads/images/Department-Images/MediaAnalytics/papers/nips16_npairmetriclearning.pdf | | sparsemax_loss | SparsemaxLoss | https://arxiv.org/abs/1602.02068 | | triplet | TripletSemiHardLoss | https://arxiv.org/abs/1503.03832 | diff --git a/tensorflow_addons/losses/__init__.py b/tensorflow_addons/losses/__init__.py index ce94d7b91e..ff8e5094fa 100644 --- a/tensorflow_addons/losses/__init__.py +++ b/tensorflow_addons/losses/__init__.py @@ -21,6 +21,6 @@ from tensorflow_addons.losses.contrastive import contrastive_loss, ContrastiveLoss from tensorflow_addons.losses.focal_loss import sigmoid_focal_crossentropy, SigmoidFocalCrossEntropy from tensorflow_addons.losses.lifted import lifted_struct_loss, LiftedStructLoss -from tensorflow_addons.losses.npairs import npairs_loss, NpairsLoss +from tensorflow_addons.losses.npairs import npairs_loss, NpairsLoss, npairs_multilabel_loss, NpairsMultilabelLoss from tensorflow_addons.losses.sparsemax_loss import sparsemax_loss, SparsemaxLoss from tensorflow_addons.losses.triplet import triplet_semihard_loss, TripletSemiHardLoss diff --git a/tensorflow_addons/losses/npairs.py b/tensorflow_addons/losses/npairs.py index adba81566e..319fba0fc9 100644 --- a/tensorflow_addons/losses/npairs.py +++ b/tensorflow_addons/losses/npairs.py @@ -64,6 +64,71 @@ def npairs_loss(y_true, y_pred): return tf.math.reduce_mean(loss) +@keras_utils.register_keras_custom_object +@tf.function +def npairs_multilabel_loss(y_true, y_pred): + """Computes the npairs loss between multilabel data `y_true` and `y_pred`. + + Npairs loss expects paired data where a pair is composed of samples from + the same labels and each pairs in the minibatch have different labels. + The loss takes each row of the pair-wise similarity matrix, `y_pred`, + as logits and the remapped multi-class labels, `y_true`, as labels. + + To deal with multilabel inputs, the count of label intersection + is computed as follows: + + ``` + L_{i,j} = | set_of_labels_for(i) \cap set_of_labels_for(j) | + ``` + + Each row of the count based label matrix is further normalized so that + each row sums to one. + + `y_true` should be a binary indicator for classes. + That is, if `y_true[i, j] = 1`, then `i`th sample is in `j`th class; + if `y_true[i, j] = 0`, then `i`th sample is not in `j`th class. + + The similarity matrix `y_pred` between two embedding matrices `a` and `b` + with shape `[batch_size, hidden_size]` can be computed as follows: + + ```python + # y_pred = a * b^T + y_pred = tf.matmul(a, b, transpose_a=False, transpose_b=True) + ``` + + See: http://www.nec-labs.com/uploads/images/Department-Images/MediaAnalytics/papers/nips16_npairmetriclearning.pdf + + Args: + y_true: Either 2-D integer `Tensor` with shape + `[batch_size, num_classes]`, or `SparseTensor` with dense shape + `[batch_size, num_classes]`. If `y_true` is a `SparseTensor`, then + it will be converted to `Tensor` via `tf.sparse.to_dense` first. + + y_pred: 2-D float `Tensor` with shape `[batch_size, batch_size]` of + similarity matrix between embedding matrices. + + Returns: + npairs_multilabel_loss: float scalar. + """ + y_pred = tf.convert_to_tensor(y_pred) + y_true = tf.cast(y_true, y_pred.dtype) + + # Convert to dense tensor if `y_true` is a `SparseTensor` + if isinstance(y_true, tf.SparseTensor): + y_true = tf.sparse.to_dense(y_true) + + # Enable efficient multiplication because y_true contains lots of zeros + # https://www.tensorflow.org/api_docs/python/tf/linalg/matmul + y_true = tf.linalg.matmul( + y_true, y_true, transpose_b=True, a_is_sparse=True, b_is_sparse=True) + y_true /= tf.math.reduce_sum(y_true, 1, keepdims=True) + + loss = tf.nn.softmax_cross_entropy_with_logits( + logits=y_pred, labels=y_true) + + return tf.math.reduce_mean(loss) + + @keras_utils.register_keras_custom_object class NpairsLoss(tf.keras.losses.Loss): """Computes the npairs loss between `y_true` and `y_pred`. @@ -93,3 +158,48 @@ def __init__(self, name="npairs_loss"): def call(self, y_true, y_pred): return npairs_loss(y_true, y_pred) + + +@keras_utils.register_keras_custom_object +class NpairsMultilabelLoss(tf.keras.losses.Loss): + """Computes the npairs loss between multilabel data `y_true` and `y_pred`. + + Npairs loss expects paired data where a pair is composed of samples from + the same labels and each pairs in the minibatch have different labels. + The loss takes each row of the pair-wise similarity matrix, `y_pred`, + as logits and the remapped multi-class labels, `y_true`, as labels. + + To deal with multilabel inputs, the count of label intersection + is computed as follows: + + ``` + L_{i,j} = | set_of_labels_for(i) \cap set_of_labels_for(j) | + ``` + + Each row of the count based label matrix is further normalized so that + each row sums to one. + + `y_true` should be a binary indicator for classes. + That is, if `y_true[i, j] = 1`, then `i`th sample is in `j`th class; + if `y_true[i, j] = 0`, then `i`th sample is not in `j`th class. + + The similarity matrix `y_pred` between two embedding matrices `a` and `b` + with shape `[batch_size, hidden_size]` can be computed as follows: + + ```python + # y_pred = a * b^T + y_pred = tf.matmul(a, b, transpose_a=False, transpose_b=True) + ``` + + See: http://www.nec-labs.com/uploads/images/Department-Images/MediaAnalytics/papers/nips16_npairmetriclearning.pdf + + Args: + name: (Optional) name for the loss. + """ + + def __init__(self, name="npairs_multilabel_loss"): + super(NpairsMultilabelLoss, self).__init__( + reduction=tf.keras.losses.Reduction.NONE, name=name) + + def call(self, y_true, y_pred): + return npairs_multilabel_loss(y_true, y_pred) diff --git a/tensorflow_addons/losses/npairs_test.py b/tensorflow_addons/losses/npairs_test.py index 0f0ecc12b3..043c7d983d 100644 --- a/tensorflow_addons/losses/npairs_test.py +++ b/tensorflow_addons/losses/npairs_test.py @@ -54,5 +54,84 @@ def test_unweighted(self): self.assertAllClose(loss, 0.253856) +@test_utils.run_all_in_graph_and_eager_modes +class NpairsMultilabelLossTest(tf.test.TestCase): + def config(self): + nml_obj = npairs.NpairsMultilabelLoss(name="nml") + self.assertEqual(nml_obj.name, "nml") + self.assertEqual(nml_obj.reduction, tf.keras.losses.Reduction.NONE) + + def test_single_label(self): + """Test single label, which is the same with `NpairsLoss`.""" + nml_obj = npairs.NpairsMultilabelLoss() + # batch size = 4, hidden size = 2 + y_true = tf.constant( + [[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]], + dtype=tf.int64) + # features of anchors + f = tf.constant([[1., 1.], [1., -1.], [-1., 1.], [-1., -1.]], + dtype=tf.float32) + # features of positive samples + fp = tf.constant([[1., 1.], [1., -1.], [-1., 1.], [-1., -1.]], + dtype=tf.float32) + # similarity matrix + y_pred = tf.matmul(f, fp, transpose_a=False, transpose_b=True) + loss = nml_obj(y_true, y_pred) + + # Loss = 1/4 * \sum_i log(1 + \sum_{j != i} exp(f_i*fp_j^T-f_i*f_i^T)) + # Compute loss for i = 0, 1, 2, 3 without multiplier 1/4 + # i = 0 => log(1 + sum([exp(-2), exp(-2), exp(-4)])) = 0.253846 + # i = 1 => log(1 + sum([exp(-2), exp(-4), exp(-2)])) = 0.253846 + # i = 2 => log(1 + sum([exp(-2), exp(-4), exp(-2)])) = 0.253846 + # i = 3 => log(1 + sum([exp(-4), exp(-2), exp(-2)])) = 0.253846 + # Loss = (0.253856 + 0.253856 + 0.253856 + 0.253856) / 4 = 0.253856 + + self.assertAllClose(loss, 0.253856) + + # Test sparse tensor + y_true = tf.sparse.from_dense(y_true) + loss = nml_obj(y_true, y_pred) + self.assertAllClose(loss, 0.253856) + + def test_multilabel(self): + nml_obj = npairs.NpairsMultilabelLoss() + # batch size = 4, hidden size = 2 + y_true = tf.constant( + [[1, 1, 0, 0], [0, 1, 1, 0], [0, 0, 1, 1], [0, 0, 0, 1]], + dtype=tf.int64) + # features of anchors + f = tf.constant([[1., 1.], [1., -1.], [-1., 1.], [-1., -1.]], + dtype=tf.float32) + # features of positive samples + fp = tf.constant([[1., 1.], [1., -1.], [-1., 1.], [-1., -1.]], + dtype=tf.float32) + # similarity matrix + y_pred = tf.matmul(f, fp, transpose_a=False, transpose_b=True) + loss = nml_obj(y_true, y_pred) + + # Loss = \sum_i log(1 + \sum_{j != i} exp(f_i*fp_j^T-f_i*f_i^T)) + # Because of multilabel, the label matrix is normalized so that each + # row sums to one. That's why the multiplier before log exists. + # Compute loss for i = 0, 1, 2, 3 without multiplier 1/4 + # i = 0 => 2/3 * log(1 + sum([exp(-2), exp(-2), exp(-4)])) + + # 1/3 * log(1 + sum([exp(2) , exp(0) , exp(-2)])) = 0.920522 + # i = 1 => 1/4 * log(1 + sum([exp(2) , exp(-2), exp(0) ])) + + # 1/2 * log(1 + sum([exp(-2), exp(-4), exp(-2)])) + + # 1/4 * log(1 + sum([exp(2) , exp(4) , exp(2) ])) = 1.753856 + # i = 2 => 1/4 * log(1 + sum([exp(2) , exp(4) , exp(2) ])) + + # 1/2 * log(1 + sum([exp(-2), exp(-4), exp(-2)])) + + # 1/4 * log(1 + sum([exp(0) , exp(-2), exp(2) ])) = 1.753856 + # i = 4 => 1/2 * log(1 + sum([exp(-2), exp(0) , exp(2) ])) + + # 1/2 * log(1 + sum([exp(-4), exp(-2), exp(-2)])) = 1.253856 + # Loss = (0.920522 + 1.753856 + 1.753856 + 1.253856) / 4 = 1.420522 + + self.assertAllClose(loss, 1.420522) + + # Test sparse tensor + y_true = tf.sparse.from_dense(y_true) + loss = nml_obj(y_true, y_pred) + self.assertAllClose(loss, 1.420522) + + if __name__ == "__main__": tf.test.main() diff --git a/tensorflow_addons/losses/sparsemax_loss.py b/tensorflow_addons/losses/sparsemax_loss.py index 82e175ad80..6d6f0e1364 100644 --- a/tensorflow_addons/losses/sparsemax_loss.py +++ b/tensorflow_addons/losses/sparsemax_loss.py @@ -23,8 +23,8 @@ from tensorflow_addons.utils import keras_utils -@tf.function @keras_utils.register_keras_custom_object +@tf.function def sparsemax_loss(logits, sparsemax, labels, name=None): """Sparsemax loss function [1]. diff --git a/tensorflow_addons/losses/sparsemax_loss_test.py b/tensorflow_addons/losses/sparsemax_loss_test.py index b29f70cb8b..6788480951 100644 --- a/tensorflow_addons/losses/sparsemax_loss_test.py +++ b/tensorflow_addons/losses/sparsemax_loss_test.py @@ -226,6 +226,12 @@ def test_gradient_against_estimate(self, dtype=None): lambda logits: sparsemax_loss(logits, sparsemax(logits), q), [z]) self.assertAllCloseAccordingToType(jacob_sym, jacob_num) + def test_serialization(self, dtype=None): + ref_fn = sparsemax_loss + config = tf.keras.losses.serialize(ref_fn) + fn = tf.keras.losses.deserialize(config) + self.assertEqual(ref_fn, fn) + if __name__ == '__main__': tf.test.main() diff --git a/tensorflow_addons/metrics/cohens_kappa.py b/tensorflow_addons/metrics/cohens_kappa.py index 477cee1330..40a7680922 100644 --- a/tensorflow_addons/metrics/cohens_kappa.py +++ b/tensorflow_addons/metrics/cohens_kappa.py @@ -41,15 +41,21 @@ class CohenKappa(Metric): ```python actuals = np.array([4, 4, 3, 4, 2, 4, 1, 1], dtype=np.int32) preds = np.array([4, 4, 3, 4, 4, 2, 1, 1], dtype=np.int32) - - m = tf.keras.metrics.CohenKappa(num_classes=5) - m.update_state(actuals, preds, "quadratic") - print('Final result: ', m.result().numpy()) # Result: 0.68932 + weights = np.array([1, 1, 2, 5, 10, 2, 3, 3], dtype=np.int32) + + m = tfa.metrics.CohenKappa(num_classes=5) + m.update_state(actuals, preds) + print('Final result: ', m.result().numpy()) # Result: 0.61904764 + + # To use this with weights, sample_weight argument can be used. + m = tfa.metrics.CohenKappa(num_classes=5) + m.update_state(actuals, preds, sample_weight=weights) + print('Final result: ', m.result().numpy()) # Result: 0.37209308 ``` Usage with tf.keras API: ```python model = keras.models.Model(inputs, outputs) - model.add_metric(tf.keras.metrics.CohenKappa(num_classes=5)(outputs)) + model.add_metric(tfa.metrics.CohenKappa(num_classes=5)(outputs)) model.compile('sgd', loss='mse') ``` @@ -180,4 +186,4 @@ def reset_states(self): for v in self.variables: K.set_value( - v, np.zeros((self.num_classes, self.num_classes), np.int32)) \ No newline at end of file + v, np.zeros((self.num_classes, self.num_classes), np.int32)) diff --git a/tensorflow_addons/metrics/f1_test.py b/tensorflow_addons/metrics/f1_test.py index 9ea1ccd2e1..e11165bb2c 100755 --- a/tensorflow_addons/metrics/f1_test.py +++ b/tensorflow_addons/metrics/f1_test.py @@ -21,6 +21,8 @@ import tensorflow as tf from tensorflow_addons.metrics import F1Score from tensorflow_addons.utils import test_utils +from tensorflow.keras import layers +import numpy as np @test_utils.run_all_in_graph_and_eager_modes @@ -106,6 +108,23 @@ def test_f1_random_score_none(self): for avg, res in test_params: self._test_f1(avg, actuals, preds, res) + # Keras model check + def test_keras_model(self): + model = tf.keras.Sequential() + model.add(layers.Dense(64, activation='relu')) + model.add(layers.Dense(64, activation='relu')) + model.add(layers.Dense(1, activation='softmax')) + fb = F1Score(1, 'macro') + model.compile( + optimizer='rmsprop', + loss='categorical_crossentropy', + metrics=['acc', fb]) + # data preparation + data = np.random.random((10, 3)) + labels = np.random.random((10, 1)) + labels = np.where(labels > 0.5, 1, 0) + model.fit(data, labels, epochs=1, batch_size=32, verbose=0) + if __name__ == '__main__': tf.test.main() diff --git a/tensorflow_addons/metrics/f_scores.py b/tensorflow_addons/metrics/f_scores.py index c6495528a9..5c7b283894 100755 --- a/tensorflow_addons/metrics/f_scores.py +++ b/tensorflow_addons/metrics/f_scores.py @@ -175,7 +175,9 @@ def __init__(self, initializer='zeros', dtype=self.dtype) - def update_state(self, y_true, y_pred): + # TODO: Add sample_weight support, currently it is + # ignored during calculations. + def update_state(self, y_true, y_pred, sample_weight=None): y_true = tf.cast(y_true, tf.int32) y_pred = tf.cast(y_pred, tf.int32) @@ -338,6 +340,8 @@ def __init__(self, num_classes, average, name='f1_score', super(F1Score, self).__init__( num_classes, average, 1.0, name=name, dtype=dtype) + # TODO: Add sample_weight support, currently it is + # ignored during calculations. def get_config(self): base_config = super(F1Score, self).get_config() del base_config["beta"] diff --git a/tensorflow_addons/metrics/fbeta_test.py b/tensorflow_addons/metrics/fbeta_test.py index d035c3de18..69a5b730d1 100644 --- a/tensorflow_addons/metrics/fbeta_test.py +++ b/tensorflow_addons/metrics/fbeta_test.py @@ -21,6 +21,8 @@ import tensorflow as tf from tensorflow_addons.metrics import FBetaScore from tensorflow_addons.utils import test_utils +from tensorflow.keras import layers +import numpy as np @test_utils.run_all_in_graph_and_eager_modes @@ -112,6 +114,23 @@ def test_fbeta_random_score_none(self): for beta, res in test_params: self._test_fbeta(None, beta, actuals, preds, res) + # Keras model check + def test_keras_model(self): + model = tf.keras.Sequential() + model.add(layers.Dense(64, activation='relu')) + model.add(layers.Dense(64, activation='relu')) + model.add(layers.Dense(1, activation='softmax')) + fb = FBetaScore(1, 'macro') + model.compile( + optimizer='rmsprop', + loss='categorical_crossentropy', + metrics=['acc', fb]) + # data preparation + data = np.random.random((10, 3)) + labels = np.random.random((10, 1)) + labels = np.where(labels > 0.5, 1, 0) + model.fit(data, labels, epochs=1, batch_size=32, verbose=0) + if __name__ == '__main__': tf.test.main() diff --git a/tensorflow_addons/optimizers/BUILD b/tensorflow_addons/optimizers/BUILD old mode 100644 new mode 100755 diff --git a/tensorflow_addons/optimizers/README.md b/tensorflow_addons/optimizers/README.md old mode 100644 new mode 100755 diff --git a/tensorflow_addons/optimizers/__init__.py b/tensorflow_addons/optimizers/__init__.py old mode 100644 new mode 100755 diff --git a/tensorflow_addons/optimizers/lazy_adam_test.py b/tensorflow_addons/optimizers/lazy_adam_test.py index cea6484df5..b9ed5f8860 100644 --- a/tensorflow_addons/optimizers/lazy_adam_test.py +++ b/tensorflow_addons/optimizers/lazy_adam_test.py @@ -322,7 +322,7 @@ def testSlotsUniqueEager(self): opt = lazy_adam.LazyAdam(1.) opt.minimize(lambda: v1 + v2, var_list=[v1, v2]) # There should be iteration, and two unique slot variables for v1 and v2. - self.assertEqual(5, len(set(opt.variables()))) + self.assertEqual(5, len(opt.variables())) self.assertEqual( self.evaluate(opt.variables()[0]), self.evaluate(opt.iterations)) diff --git a/tensorflow_addons/rnn/cell_test.py b/tensorflow_addons/rnn/cell_test.py index bfc7146aa0..82d09260e8 100644 --- a/tensorflow_addons/rnn/cell_test.py +++ b/tensorflow_addons/rnn/cell_test.py @@ -156,7 +156,7 @@ def test_config(self): cell = rnn_cell.NASCell(10, projection=5, use_bias=True) expected_config = { - "dtype": None, + "dtype": "float32", "name": "nas_cell", "trainable": True, "units": 10, @@ -241,7 +241,7 @@ def test_config(self): cell = rnn_cell.LayerNormLSTMCell(10) expected_config = { - "dtype": None, + "dtype": "float32", "name": "layer_norm_lstm_cell", "trainable": True, "units": 10, diff --git a/tensorflow_addons/seq2seq/attention_wrapper.py b/tensorflow_addons/seq2seq/attention_wrapper.py index aa8b38a6b6..275245fded 100644 --- a/tensorflow_addons/seq2seq/attention_wrapper.py +++ b/tensorflow_addons/seq2seq/attention_wrapper.py @@ -1538,7 +1538,8 @@ def __init__(self, initial_cell_state=None, name=None, attention_layer=None, - attention_fn=None): + attention_fn=None, + **kwargs): """Construct the `AttentionWrapper`. **NOTE** If you are using the `BeamSearchDecoder` with a cell wrapped @@ -1619,6 +1620,7 @@ def __init__(self, attention_layer) and outputs (attention, alignments, next_attention_state). If provided, the attention_layer_size should be the size of the outputs of attention_fn. + **kwargs: Other keyword arguments for layer creation. Raises: TypeError: `attention_layer_size` is not None and @@ -1629,7 +1631,7 @@ def __init__(self, of `attention_layer_size`; if `attention_layer_size` and `attention_layer` are set simultaneously. """ - super(AttentionWrapper, self).__init__(name=name) + super(AttentionWrapper, self).__init__(name=name, **kwargs) rnn_cell_impl.assert_like_rnncell("cell", cell) if isinstance(attention_mechanism, (list, tuple)): self._is_multi = True diff --git a/tensorflow_addons/seq2seq/attention_wrapper_test.py b/tensorflow_addons/seq2seq/attention_wrapper_test.py index 5b4e724187..b9adefbe82 100644 --- a/tensorflow_addons/seq2seq/attention_wrapper_test.py +++ b/tensorflow_addons/seq2seq/attention_wrapper_test.py @@ -28,10 +28,6 @@ from tensorflow_addons.seq2seq import basic_decoder from tensorflow_addons.seq2seq import sampler as sampler_py -# TODO: Find public API alternatives to these -from tensorflow.python import keras -from tensorflow.python.keras import initializers - @test_utils.run_all_in_graph_and_eager_modes class AttentionMechanismTest(tf.test.TestCase, parameterized.TestCase): @@ -131,22 +127,22 @@ def test_passing_memory_from_call(self, attention_cls): def test_save_load_layer(self, attention_cls): vocab = 20 embedding_dim = 6 - inputs = keras.layers.Input(shape=[self.timestep]) - encoder_input = keras.layers.Embedding( + inputs = tf.keras.Input(shape=[self.timestep]) + encoder_input = tf.keras.layers.Embedding( vocab, embedding_dim, mask_zero=True)(inputs) - encoder_output = keras.layers.LSTM( + encoder_output = tf.keras.layers.LSTM( self.memory_size, return_sequences=True)(encoder_input) attention = attention_cls(self.units, encoder_output) - query = keras.layers.Input(shape=[self.units]) - state = keras.layers.Input(shape=[self.timestep]) + query = tf.keras.Input(shape=[self.units]) + state = tf.keras.Input(shape=[self.timestep]) score = attention([query, state]) x = np.random.randint(vocab, size=(self.batch, self.timestep)) x_test = np.random.randint(vocab, size=(self.batch, self.timestep)) y = np.random.randn(self.batch, self.timestep) - model = keras.models.Model([inputs, query, state], score) + model = tf.keras.Model([inputs, query, state], score) # Fall back to v1 style Keras training loop until issue with # using outputs of a layer in another layer's constructor. model.compile("rmsprop", "mse", experimental_run_tf_function=False) @@ -155,7 +151,7 @@ def test_save_load_layer(self, attention_cls): config = model.get_config() weights = model.get_weights() - loaded_model = keras.models.Model.from_config( + loaded_model = tf.keras.Model.from_config( config, custom_objects={attention_cls.__name__: attention_cls}) loaded_model.set_weights(weights) @@ -337,11 +333,12 @@ def _testWithMaybeMultiAttention(self, # Create a memory layer with deterministic initializer to avoid # randomness in the test between graph and eager. if create_query_layer: - create_attention_kwargs["query_layer"] = keras.layers.Dense( + create_attention_kwargs["query_layer"] = tf.keras.layers.Dense( depth, kernel_initializer="ones", use_bias=False) if create_memory_layer: - create_attention_kwargs["memory_layer"] = keras.layers.Dense( - depth, kernel_initializer="ones", use_bias=False) + create_attention_kwargs["memory_layer"] = ( + tf.keras.layers.Dense( + depth, kernel_initializer="ones", use_bias=False)) attention_mechanisms.append( creator( @@ -358,7 +355,7 @@ def _testWithMaybeMultiAttention(self, attention_layer_size = attention_layer_size[0] if attention_layer is not None: attention_layer = attention_layer[0] - cell = keras.layers.LSTMCell( + cell = tf.keras.layers.LSTMCell( cell_depth, recurrent_activation="sigmoid", kernel_initializer="ones", @@ -371,8 +368,9 @@ def _testWithMaybeMultiAttention(self, attention_layer=attention_layer) if cell._attention_layers is not None: for layer in cell._attention_layers: - layer.kernel_initializer = initializers.glorot_uniform( - seed=1337) + layer.kernel_initializer = ( + tf.compat.v1.keras.initializers.glorot_uniform( + seed=1337)) sampler = sampler_py.TrainingSampler() my_decoder = basic_decoder.BasicDecoder(cell=cell, sampler=sampler) @@ -476,12 +474,13 @@ def testBahdanauNormalizedDType(self, dtype): memory_sequence_length=self.encoder_sequence_length, normalize=True, dtype=dtype) - cell = keras.layers.LSTMCell( - self.units, recurrent_activation="sigmoid") - cell = wrapper.AttentionWrapper(cell, attention_mechanism) + cell = tf.keras.layers.LSTMCell( + self.units, recurrent_activation="sigmoid", dtype=dtype) + cell = wrapper.AttentionWrapper(cell, attention_mechanism, dtype=dtype) sampler = sampler_py.TrainingSampler() - my_decoder = basic_decoder.BasicDecoder(cell=cell, sampler=sampler) + my_decoder = basic_decoder.BasicDecoder( + cell=cell, sampler=sampler, dtype=dtype) final_outputs, final_state, _ = my_decoder( decoder_inputs, @@ -504,12 +503,13 @@ def testLuongScaledDType(self, dtype): scale=True, dtype=dtype, ) - cell = keras.layers.LSTMCell( - self.units, recurrent_activation="sigmoid") - cell = wrapper.AttentionWrapper(cell, attention_mechanism) + cell = tf.keras.layers.LSTMCell( + self.units, recurrent_activation="sigmoid", dtype=dtype) + cell = wrapper.AttentionWrapper(cell, attention_mechanism, dtype=dtype) sampler = sampler_py.TrainingSampler() - my_decoder = basic_decoder.BasicDecoder(cell=cell, sampler=sampler) + my_decoder = basic_decoder.BasicDecoder( + cell=cell, sampler=sampler, dtype=dtype) final_outputs, final_state, _ = my_decoder( decoder_inputs, diff --git a/tensorflow_addons/text/crf.py b/tensorflow_addons/text/crf.py index d8d97bf216..b20b0476ae 100644 --- a/tensorflow_addons/text/crf.py +++ b/tensorflow_addons/text/crf.py @@ -188,8 +188,9 @@ def crf_log_likelihood(inputs, # Get the transition matrix if not provided. if transition_params is None: - transition_params = tf.get_variable("transitions", - [num_tags, num_tags]) + initializer = tf.keras.initializers.GlorotUniform() + transition_params = tf.Variable( + initializer([num_tags, num_tags]), "transitions") sequence_scores = crf_sequence_score(inputs, tag_indices, sequence_lengths, transition_params) diff --git a/tensorflow_addons/text/crf_test.py b/tensorflow_addons/text/crf_test.py index 84c09b539b..1c76d0b0ec 100644 --- a/tensorflow_addons/text/crf_test.py +++ b/tensorflow_addons/text/crf_test.py @@ -229,6 +229,12 @@ def testCrfLogLikelihood(self): tf_total_log_likelihood = self.evaluate(total_log_likelihood) self.assertAllClose(tf_total_log_likelihood, 0.0) + # check if `transition_params = None` raises an error + text.crf_log_likelihood( + inputs=tf.expand_dims(inputs, 0), + tag_indices=tf.expand_dims(tag_indices, 0), + sequence_lengths=tf.expand_dims(sequence_lengths, 0)) + def testViterbiDecode(self): inputs = np.array([[4, 5, -3], [3, -1, 3], [-1, 2, 1], [0, 0, 0]], dtype=np.float32) diff --git a/tensorflow_addons/version.py b/tensorflow_addons/version.py index 32892f5d79..3ed34d31fc 100644 --- a/tensorflow_addons/version.py +++ b/tensorflow_addons/version.py @@ -19,7 +19,7 @@ # We follow Semantic Versioning (https://semver.org/) _MAJOR_VERSION = '0' -_MINOR_VERSION = '5' +_MINOR_VERSION = '6' _PATCH_VERSION = '0' # When building releases, we can update this value on the release branch to diff --git a/tools/ci_build/builds/release_linux.sh b/tools/ci_build/builds/release_linux.sh index e4414926d7..7f2ef964be 100755 --- a/tools/ci_build/builds/release_linux.sh +++ b/tools/ci_build/builds/release_linux.sh @@ -15,13 +15,16 @@ # ============================================================================== set -e -x -PYTHON_VERSIONS="python2.7 python3.5 python3.6" +PYTHON_VERSIONS="python2.7 python3.5 python3.6 python3.7" +ln -sf /usr/bin/python3.5 /usr/bin/python3 # Py36 has issues with add-apt curl -sSOL https://bootstrap.pypa.io/get-pip.py add-apt-repository -y ppa:deadsnakes/ppa +apt-get -y -qq update + for version in ${PYTHON_VERSIONS}; do export PYTHON_VERSION=${version} - apt-get -y -qq update && apt-get -y -qq install ${PYTHON_VERSION} + apt-get -y -qq install ${PYTHON_VERSION} ${PYTHON_VERSION} get-pip.py -q ${PYTHON_VERSION} -m pip --version @@ -31,10 +34,12 @@ for version in ${PYTHON_VERSIONS}; do # Build bazel build \ + -c opt \ --noshow_progress \ --noshow_loading_progress \ --verbose_failures \ --test_output=errors \ + --crosstool_top=//build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0:toolchain \ build_pip_pkg # Package Whl @@ -44,5 +49,8 @@ for version in ${PYTHON_VERSIONS}; do #bazel-bin/build_pip_pkg artifacts done +# Clean up +rm get-pip.py + # Verify Wheels ./tools/ci_build/builds/wheel_verify.sh \ No newline at end of file diff --git a/tools/ci_build/builds/release_macos.sh b/tools/ci_build/builds/release_macos.sh old mode 100644 new mode 100755 index c8065a6204..cc6806ed3f --- a/tools/ci_build/builds/release_macos.sh +++ b/tools/ci_build/builds/release_macos.sh @@ -15,7 +15,7 @@ # ============================================================================== set -e -x -PYTHON_VERSIONS="2.7.15 3.5.6 3.6.6" +PYTHON_VERSIONS="2.7.15 3.5.6 3.6.6 3.7.4" curl -sSOL https://bootstrap.pypa.io/get-pip.py # Install Bazel 0.24 @@ -24,6 +24,10 @@ chmod +x bazel-0.24.1-installer-darwin-x86_64.sh ./bazel-0.24.1-installer-darwin-x86_64.sh --user export PATH="$PATH:$HOME/bin" +# Install delocate +python3 -m pip install -q delocate + +brew update && brew upgrade pyenv eval "$(pyenv init -)" for version in ${PYTHON_VERSIONS}; do @@ -31,13 +35,14 @@ for version in ${PYTHON_VERSIONS}; do pyenv install -s $PYENV_VERSION python get-pip.py -q - python -m pip install -q delocate + python -m pip --version #Link TF dependency yes 'y' | sudo ./configure.sh --quiet # Build bazel build \ + -c opt \ --noshow_progress \ --noshow_loading_progress \ --verbose_failures \ @@ -51,5 +56,8 @@ for version in ${PYTHON_VERSIONS}; do #bazel-bin/build_pip_pkg artifacts done +# Clean up +rm get-pip.py + ## Verify Wheel ./tools/ci_build/builds/wheel_verify.sh \ No newline at end of file diff --git a/tools/ci_testing/run_tests.sh b/tools/ci_build/builds/tf_auditwheel_patch.sh similarity index 71% rename from tools/ci_testing/run_tests.sh rename to tools/ci_build/builds/tf_auditwheel_patch.sh index d0c3608cee..52f4c19500 100755 --- a/tools/ci_testing/run_tests.sh +++ b/tools/ci_build/builds/tf_auditwheel_patch.sh @@ -1,4 +1,4 @@ -#!/usr/bin/env bash +#!/bin/bash # Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -12,15 +12,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -# # ============================================================================== -set -x - -## CPU Tests -/bin/bash ci_testing/addons_cpu.sh - -## GPU Tests -#/bin/bash ci_testing/addons_gpu.sh +set -e -exit $? \ No newline at end of file +TF_SHARED_LIBRARY_NAME=$(grep -r TF_SHARED_LIBRARY_NAME .bazelrc | awk -F= '{print$2}') +POLICY_JSON="/usr/local/lib/python3.6/dist-packages/auditwheel/policy/policy.json" +sed -i "s/libresolv.so.2\"/libresolv.so.2\", $TF_SHARED_LIBRARY_NAME/g" $POLICY_JSON diff --git a/tools/ci_build/builds/wheel_verify.sh b/tools/ci_build/builds/wheel_verify.sh index a23388b32d..e43042e281 100755 --- a/tools/ci_build/builds/wheel_verify.sh +++ b/tools/ci_build/builds/wheel_verify.sh @@ -16,12 +16,16 @@ set -e +if [[ $(uname) == "Darwin" ]]; then + CMD="delocate-wheel -w wheelhouse" +else + pip3.6 install -U auditwheel==2.0.0 + tools/ci_build/builds/tf_auditwheel_patch.sh + CMD="auditwheel repair --plat manylinux2010_x86_64" +fi + ls artifacts/* for f in artifacts/*.whl; do - if [[ $(uname) == "Darwin" ]]; then - delocate-wheel -w wheelhouse $f - else - auditwheel repair $f - fi + $CMD $f done ls wheelhouse/* \ No newline at end of file diff --git a/tools/ci_build/ci_sanity.sh b/tools/ci_build/ci_sanity.sh index a4d5d3df84..642c31ae75 100755 --- a/tools/ci_build/ci_sanity.sh +++ b/tools/ci_build/ci_sanity.sh @@ -227,13 +227,13 @@ do_bazel_nobuild() { } do_check_futures_test() { - cd "$ROOT_DIR/tools/test" - python check_futures_test.py + cd "$ROOT_DIR/tools/ci_build/verify" + python check_futures.py } do_check_file_name_test() { - cd "$ROOT_DIR/tools/test" - python file_name_test.py + cd "$ROOT_DIR/tools/ci_build/verify" + python check_file_name.py } do_check_code_format_test() { diff --git a/tools/test/file_name_test.py b/tools/ci_build/verify/check_file_name.py similarity index 95% rename from tools/test/file_name_test.py rename to tools/ci_build/verify/check_file_name.py index 23867570a9..2b290b0a2b 100644 --- a/tools/test/file_name_test.py +++ b/tools/ci_build/verify/check_file_name.py @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -22,7 +22,7 @@ import os -BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../..')) +BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..')) def main(): diff --git a/tools/test/check_futures_test.py b/tools/ci_build/verify/check_futures.py similarity index 99% rename from tools/test/check_futures_test.py rename to tools/ci_build/verify/check_futures.py index 05fd197a45..c17af2dc94 100644 --- a/tools/test/check_futures_test.py +++ b/tools/ci_build/verify/check_futures.py @@ -32,7 +32,7 @@ import six -BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../..')) +BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..')) FUTURES_PATTERN = re.compile(r'^from __future__ import (\w+)\s*$') FUTURES_PATTERN_2 = re.compile( r'^from __future__ import (\w+), (\w+), (\w+)\s*$') diff --git a/tools/ci_testing/addons_gpu.sh b/tools/ci_testing/addons_gpu.sh index ddaed75315..12b2cffdf4 100755 --- a/tools/ci_testing/addons_gpu.sh +++ b/tools/ci_testing/addons_gpu.sh @@ -43,6 +43,7 @@ yes 'y' | ./configure.sh bazel test -c opt -k \ --jobs=${N_JOBS} --test_timeout 300,450,1200,3600 \ --test_output=errors --local_test_jobs=8 \ + --crosstool_top=//build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0:toolchain \ //tensorflow_addons/... exit $? diff --git a/tools/ci_testing/install_py36.sh b/tools/ci_testing/install_py36.sh deleted file mode 100755 index 77d4ba30ec..0000000000 --- a/tools/ci_testing/install_py36.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2019 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# ============================================================================== -# Needed until docker image defaults to at least py35. - -set -e -x - -curl -sSOL https://bootstrap.pypa.io/get-pip.py -add-apt-repository -y ppa:deadsnakes/ppa - -apt-get -y -qq update && apt-get -y -qq install python3.6 - -python3.6 get-pip.py -q -python3.6 -m pip --version -rm get-pip.py - -ln -sfn /usr/bin/python3.6 /usr/bin/python3 -pip3 install scipy # Pre-installed in custom-op \ No newline at end of file diff --git a/tools/run_docker.sh b/tools/run_docker.sh index 4c91947250..e03be614b5 100755 --- a/tools/run_docker.sh +++ b/tools/run_docker.sh @@ -62,10 +62,10 @@ fi DOCKER_OPTS='' case ${DEVICE} in cpu) - DOCKER_IMAGE=tensorflow/tensorflow:custom-op + DOCKER_IMAGE=tensorflow/tensorflow:custom-op-ubuntu16 ;; gpu) - DOCKER_IMAGE=tensorflow/tensorflow:custom-op-gpu + DOCKER_IMAGE=tensorflow/tensorflow:custom-op-gpu-ubuntu16 DOCKER_OPTS="--runtime=nvidia ${DOCKER_OPTS}" ;; *) @@ -75,8 +75,8 @@ case ${DEVICE} in esac case ${PYTHON} in - py2) ENVIRONMENT_CMD="ln -sf /usr/bin/python2 /usr/bin/python";; - py3) ENVIRONMENT_CMD="tools/ci_testing/install_py36.sh && ln -sf /usr/bin/python3.6 /usr/bin/python";; + py2) ENVIRONMENT_CMD="ln -sf /usr/bin/python2 /usr/bin/python && python -m pip install -U pip";; + py3) ENVIRONMENT_CMD="ln -sf /usr/bin/python3.6 /usr/bin/python && python -m pip install -U pip";; *) echo "Invalid or missing python $OPTARG" exit 1 From 892d6022550a9cb6180ebb5cde89dd3a01a607e8 Mon Sep 17 00:00:00 2001 From: pkan2 Date: Sun, 8 Sep 2019 18:02:51 -0500 Subject: [PATCH 16/20] Revert "add CG optimizer" This reverts commit 953fa39bd19aabf04615c82c86b4032cb28a3826. --- CONTRIBUTING.md | 10 +- README.md | 9 +- .../crosstool_wrapper_driver_is_not_gcc.tpl | 0 build_deps/gpu/cuda_configure.bzl | 8 +- build_deps/requirements.txt | 3 +- build_deps/requirements_gpu.txt | 3 +- build_deps/tf_dependency/tf_configure.bzl | 2 +- .../gcc7_manylinux2010-nvcc-cuda10.0/BUILD | 115 -- .../cc_toolchain_config.bzl | 1493 ----------------- .../bin/crosstool_wrapper_driver_is_not_gcc | 268 --- build_pip_pkg.sh | 1 - configure.sh | 22 +- examples/image_ops.ipynb | 601 ------- examples/layers_normalizations.ipynb | 4 +- examples/layers_weightnormalization.ipynb | 4 +- examples/losses_triplet.ipynb | 6 +- examples/optimizers_lazyadam.ipynb | 4 +- examples/template.ipynb | 6 +- makefile | 1 + setup.py | 39 +- tensorflow_addons/__init__.py | 58 +- tensorflow_addons/activations/BUILD | 19 +- tensorflow_addons/activations/README.md | 14 +- tensorflow_addons/activations/__init__.py | 1 - tensorflow_addons/activations/gelu.py | 55 - tensorflow_addons/activations/gelu_test.py | 106 -- tensorflow_addons/activations/sparsemax.py | 2 +- .../activations/sparsemax_test.py | 14 - .../custom_ops/activations/BUILD | 47 - .../activations/cc/kernels/gelu_op.cc | 77 - .../activations/cc/kernels/gelu_op.h | 144 -- .../activations/cc/kernels/gelu_op_gpu.cu.cc | 36 - .../custom_ops/activations/cc/ops/gelu_op.cc | 37 - tensorflow_addons/custom_ops/image/BUILD | 29 +- .../image/cc/kernels/connected_components.cc | 138 -- .../image/cc/kernels/connected_components.h | 305 ---- .../euclidean_distance_transform_op.cc | 6 +- .../kernels/euclidean_distance_transform_op.h | 14 +- .../euclidean_distance_transform_op_gpu.cu.cc | 40 - .../custom_ops/image/cc/ops/image_ops.cc | 31 +- tensorflow_addons/custom_ops/layers/BUILD | 3 +- .../layers/cc/kernels/correlation_cost_op.cc | 2 +- tensorflow_addons/image/BUILD | 15 +- tensorflow_addons/image/README.md | 3 - tensorflow_addons/image/__init__.py | 1 - .../image/connected_components.py | 96 -- .../image/connected_components_test.py | 157 -- .../image/interpolate_spline_test.py | 234 +-- .../image/sparse_image_warp_test.py | 150 +- tensorflow_addons/layers/BUILD | 14 - tensorflow_addons/layers/README.md | 2 - tensorflow_addons/layers/__init__.py | 3 +- tensorflow_addons/layers/gelu.py | 57 - tensorflow_addons/layers/gelu_test.py | 39 - tensorflow_addons/layers/optical_flow_test.py | 2 +- tensorflow_addons/layers/sparsemax_test.py | 6 +- tensorflow_addons/layers/wrappers.py | 123 +- tensorflow_addons/layers/wrappers_test.py | 114 +- tensorflow_addons/losses/README.md | 1 - tensorflow_addons/losses/__init__.py | 2 +- tensorflow_addons/losses/npairs.py | 110 -- tensorflow_addons/losses/npairs_test.py | 79 - tensorflow_addons/losses/sparsemax_loss.py | 2 +- .../losses/sparsemax_loss_test.py | 6 - tensorflow_addons/metrics/cohens_kappa.py | 18 +- tensorflow_addons/metrics/f1_test.py | 19 - tensorflow_addons/metrics/f_scores.py | 6 +- tensorflow_addons/metrics/fbeta_test.py | 19 - tensorflow_addons/optimizers/BUILD | 0 tensorflow_addons/optimizers/README.md | 0 tensorflow_addons/optimizers/__init__.py | 0 .../optimizers/lazy_adam_test.py | 2 +- tensorflow_addons/rnn/cell_test.py | 4 +- .../seq2seq/attention_wrapper.py | 6 +- .../seq2seq/attention_wrapper_test.py | 50 +- tensorflow_addons/text/crf.py | 5 +- tensorflow_addons/text/crf_test.py | 6 - tensorflow_addons/version.py | 2 +- tools/ci_build/builds/release_linux.sh | 12 +- tools/ci_build/builds/release_macos.sh | 12 +- tools/ci_build/builds/wheel_verify.sh | 14 +- tools/ci_build/ci_sanity.sh | 8 +- tools/ci_testing/addons_gpu.sh | 1 - tools/ci_testing/install_py36.sh | 31 + .../run_tests.sh} | 15 +- tools/run_docker.sh | 8 +- .../check_futures_test.py} | 2 +- .../file_name_test.py} | 4 +- 88 files changed, 564 insertions(+), 4673 deletions(-) mode change 100644 => 100755 build_deps/gpu/crosstool/clang/bin/crosstool_wrapper_driver_is_not_gcc.tpl delete mode 100755 build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0/BUILD delete mode 100755 build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0/cc_toolchain_config.bzl delete mode 100755 build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0/clang/bin/crosstool_wrapper_driver_is_not_gcc delete mode 100644 examples/image_ops.ipynb delete mode 100644 tensorflow_addons/activations/gelu.py delete mode 100644 tensorflow_addons/activations/gelu_test.py delete mode 100644 tensorflow_addons/custom_ops/activations/BUILD delete mode 100644 tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op.cc delete mode 100644 tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op.h delete mode 100644 tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op_gpu.cu.cc delete mode 100644 tensorflow_addons/custom_ops/activations/cc/ops/gelu_op.cc delete mode 100644 tensorflow_addons/custom_ops/image/cc/kernels/connected_components.cc delete mode 100644 tensorflow_addons/custom_ops/image/cc/kernels/connected_components.h delete mode 100644 tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op_gpu.cu.cc delete mode 100644 tensorflow_addons/image/connected_components.py delete mode 100644 tensorflow_addons/image/connected_components_test.py delete mode 100644 tensorflow_addons/layers/gelu.py delete mode 100644 tensorflow_addons/layers/gelu_test.py mode change 100755 => 100644 tensorflow_addons/optimizers/BUILD mode change 100755 => 100644 tensorflow_addons/optimizers/README.md mode change 100755 => 100644 tensorflow_addons/optimizers/__init__.py mode change 100755 => 100644 tools/ci_build/builds/release_macos.sh create mode 100755 tools/ci_testing/install_py36.sh rename tools/{ci_build/builds/tf_auditwheel_patch.sh => ci_testing/run_tests.sh} (71%) rename tools/{ci_build/verify/check_futures.py => test/check_futures_test.py} (99%) rename tools/{ci_build/verify/check_file_name.py => test/file_name_test.py} (95%) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3aae3caf1f..7bbfab997a 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -52,8 +52,8 @@ Try these useful commands below: * Format code automatically: `bash tools/run_docker.sh -c 'make code-format'` * Run sanity check: `bash tools/run_docker.sh -c 'make sanity-check'` * Run CPU unit tests: `bash tools/run_docker.sh -c 'make unit-test'` -* Run GPU unit tests: `bash tools/run_docker.sh -d gpu -c 'make gpu-unit-test'` -* All of the above: `bash tools/run_docker.sh -d gpu -c 'make'` +* Run GPU unit tests: `bash tools/run_docker.sh -c 'make gpu-unit-test'` +* All of the above: `bash tools/run_docker.sh -c 'make'` ## Coding style @@ -76,7 +76,8 @@ bash tools/run_docker.sh -c 'make unit-test' or run manually: ```bash -docker run --rm -it -v ${PWD}:/addons -w /addons tensorflow/tensorflow:custom-op-ubuntu16 /bin/bash +docker run --rm -it -v ${PWD}:/addons -w /addons tensorflow/tensorflow:nightly-custom-op /bin/bash + ./configure.sh # Links project with TensorFlow dependency bazel test -c opt -k \ @@ -93,12 +94,11 @@ bash tools/run_docker.sh -d gpu -c 'make gpu-unit-test' or run manually: ```bash -docker run --runtime=nvidia --rm -it -v ${PWD}:/addons -w /addons tensorflow/tensorflow:custom-op-gpu-ubuntu16 /bin/bash +docker run --runtime=nvidia --rm -it -v ${PWD}:/addons -w /addons tensorflow/tensorflow:custom-op-gpu /bin/bash ./configure.sh # Links project with TensorFlow dependency bazel test -c opt -k \ --test_timeout 300,450,1200,3600 \ ---crosstool_top=//build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0:toolchain \ --test_output=all \ --jobs=1 \ //tensorflow_addons/... diff --git a/README.md b/README.md index ecabd4ae65..1b53efd48d 100644 --- a/README.md +++ b/README.md @@ -47,6 +47,7 @@ To install the latest version, run the following: pip install tensorflow-addons ``` +**Note:** You will also need [`tensorflow==2.0.0-beta1`](https://www.tensorflow.org/beta) installed. To use addons: @@ -72,9 +73,6 @@ https://bazel.build/) build system. git clone https://github.com/tensorflow/addons.git cd addons -# If building GPU Ops (Requires CUDA 10.0 and CuDNN 7) -export TF_NEED_CUDA=1 - # This script links project with TensorFlow dependency ./configure.sh @@ -95,11 +93,6 @@ User experience and project maintainability are core concepts in TF-Addons. In order to achieve these we require that our additions conform to established API patterns seen in core TensorFlow. -#### GPU/CPU Custom-Ops -A major benefit of TensorFlow Addons is that there are precompiled ops. Should -a CUDA 10 installation not be found then the op will automatically fall back to -a CPU implementation. - #### Proxy Maintainership Addons has been designed to compartmentalize subpackages and submodules so that they can be maintained by users who have expertise and a vested interest diff --git a/build_deps/gpu/crosstool/clang/bin/crosstool_wrapper_driver_is_not_gcc.tpl b/build_deps/gpu/crosstool/clang/bin/crosstool_wrapper_driver_is_not_gcc.tpl old mode 100644 new mode 100755 diff --git a/build_deps/gpu/cuda_configure.bzl b/build_deps/gpu/cuda_configure.bzl index f996eeb08a..475a1281da 100644 --- a/build_deps/gpu/cuda_configure.bzl +++ b/build_deps/gpu/cuda_configure.bzl @@ -174,7 +174,7 @@ def _get_win_cuda_defines(repository_ctx): # If we are not on Windows, return empty vaules for Windows specific fields. # This ensures the CROSSTOOL file parser is happy. if not _is_windows(repository_ctx): - return dict({ + return { "%{msvc_env_tmp}": "", "%{msvc_env_path}": "", "%{msvc_env_include}": "", @@ -184,7 +184,7 @@ def _get_win_cuda_defines(repository_ctx): "%{msvc_link_path}": "", "%{msvc_lib_path}": "", "%{cxx_builtin_include_directory}": "", - }) + } vc_path = find_vc_path(repository_ctx) if not vc_path: @@ -957,8 +957,6 @@ def _get_cuda_config(repository_ctx): ) def _tpl(repository_ctx, tpl, substitutions = {}, out = None): - if substitutions == None: - substitutions = {} if not out: out = tpl.replace(":", "/") repository_ctx.template( @@ -1303,7 +1301,7 @@ def _create_local_cuda_repository(repository_ctx): _tpl( repository_ctx, "crosstool:CROSSTOOL", - cuda_defines.update(_get_win_cuda_defines(repository_ctx)), + cuda_defines + _get_win_cuda_defines(repository_ctx), out = "crosstool/CROSSTOOL", ) diff --git a/build_deps/requirements.txt b/build_deps/requirements.txt index fda98be524..332a109199 100644 --- a/build_deps/requirements.txt +++ b/build_deps/requirements.txt @@ -1,2 +1 @@ -# TensorFlow greater than this date is manylinux2010 compliant -tf-nightly-2.0-preview>=2.0.0.dev20190802 +tf-nightly-2.0-preview==2.0.0.dev20190731 diff --git a/build_deps/requirements_gpu.txt b/build_deps/requirements_gpu.txt index e0f02a4f63..24d74c5f53 100644 --- a/build_deps/requirements_gpu.txt +++ b/build_deps/requirements_gpu.txt @@ -1,2 +1 @@ -# TensorFlow greater than this date is manylinux2010 compliant -tf-nightly-gpu-2.0-preview>=2.0.0.dev20190802 +tf-nightly-gpu-2.0-preview==2.0.0.dev20190731 diff --git a/build_deps/tf_dependency/tf_configure.bzl b/build_deps/tf_dependency/tf_configure.bzl index 485773b938..fc187ee552 100644 --- a/build_deps/tf_dependency/tf_configure.bzl +++ b/build_deps/tf_dependency/tf_configure.bzl @@ -168,7 +168,7 @@ def _symlink_genrule_for_dir( # Copy the headers to create a sandboxable setup. cmd = "cp -f" - command.append(cmd + ' "%s" "%s" | true' % (src_files[i], dest)) + command.append(cmd + ' "%s" "%s"' % (src_files[i], dest)) outs.append(' "' + dest_dir + dest_files[i] + '",') genrule = _genrule( genrule_name, diff --git a/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0/BUILD b/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0/BUILD deleted file mode 100755 index 234bd0ed92..0000000000 --- a/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0/BUILD +++ /dev/null @@ -1,115 +0,0 @@ -# This file is expanded from a template by cuda_configure.bzl -# Update cuda_configure.bzl#verify_build_defines when adding new variables. - -load(":cc_toolchain_config.bzl", "cc_toolchain_config") - -licenses(["restricted"]) - -package(default_visibility = ["//visibility:public"]) - -toolchain( - name = "toolchain-linux-x86_64", - exec_compatible_with = [ - "@bazel_tools//platforms:linux", - "@bazel_tools//platforms:x86_64", - ], - target_compatible_with = [ - "@bazel_tools//platforms:linux", - "@bazel_tools//platforms:x86_64", - ], - toolchain = ":cc-compiler-local", - toolchain_type = "@bazel_tools//tools/cpp:toolchain_type", -) - -cc_toolchain_suite( - name = "toolchain", - toolchains = { - "local|compiler": ":cc-compiler-local", - "darwin|compiler": ":cc-compiler-darwin", - "k8": ":cc-compiler-local", - "darwin": ":cc-compiler-darwin", - }, -) - -cc_toolchain( - name = "cc-compiler-local", - all_files = ":crosstool_wrapper_driver_is_not_gcc", - compiler_files = ":empty", - dwp_files = ":empty", - linker_files = ":crosstool_wrapper_driver_is_not_gcc", - objcopy_files = ":empty", - strip_files = ":empty", - # To support linker flags that need to go to the start of command line - # we need the toolchain to support parameter files. Parameter files are - # last on the command line and contain all shared libraries to link, so all - # regular options will be left of them. - supports_param_files = 1, - toolchain_config = ":cc-compiler-local-config", - toolchain_identifier = "local_linux", -) - -cc_toolchain_config( - name = "cc-compiler-local-config", - builtin_include_directories = [ - "/dt7/usr/include/c++/7", - "/dt7/usr/include/c++/7/x86_64-pc-linux-gnu", - "/dt7/usr/include/c++/7/backward", - "/dt7/usr/lib/gcc/x86_64-pc-linux-gnu/7/include", - "/dt7/usr/lib/gcc/x86_64-pc-linux-gnu/7/include-fixed", - "/dt7/usr/include", - "/usr/local/cuda-10.0/targets/x86_64-linux/include", - "/usr/local/cuda-10.0/include", - "/usr/local/cuda-10.0/extras/CUPTI/include", - "/usr/include", - ], - cpu = "local", - extra_no_canonical_prefixes_flags = ["-fno-canonical-system-headers"], - host_compiler_path = "clang/bin/crosstool_wrapper_driver_is_not_gcc", - host_compiler_prefix = "/usr/bin", - host_compiler_warnings = [], - host_unfiltered_compile_flags = [], - linker_bin_path = "/usr/bin", -) - -cc_toolchain( - name = "cc-compiler-darwin", - all_files = ":crosstool_wrapper_driver_is_not_gcc", - compiler_files = ":empty", - dwp_files = ":empty", - linker_files = ":crosstool_wrapper_driver_is_not_gcc", - objcopy_files = ":empty", - strip_files = ":empty", - supports_param_files = 0, - toolchain_config = ":cc-compiler-local-darwin", - toolchain_identifier = "local_darwin", -) - -cc_toolchain_config( - name = "cc-compiler-local-darwin", - builtin_include_directories = [ - "/dt7/usr/include/c++/7", - "/dt7/usr/include/c++/7/x86_64-pc-linux-gnu", - "/dt7/usr/include/c++/7/backward", - "/dt7/usr/lib/gcc/x86_64-pc-linux-gnu/7/include", - "/dt7/usr/lib/gcc/x86_64-pc-linux-gnu/7/include-fixed", - "/dt7/usr/include", - "/usr/include", - ], - cpu = "darwin", - extra_no_canonical_prefixes_flags = ["-fno-canonical-system-headers"], - host_compiler_path = "clang/bin/crosstool_wrapper_driver_is_not_gcc", - host_compiler_prefix = "/usr/bin", - host_compiler_warnings = [], - host_unfiltered_compile_flags = [], - linker_bin_path = "/usr/bin", -) - -filegroup( - name = "empty", - srcs = [], -) - -filegroup( - name = "crosstool_wrapper_driver_is_not_gcc", - srcs = ["clang/bin/crosstool_wrapper_driver_is_not_gcc"], -) diff --git a/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0/cc_toolchain_config.bzl b/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0/cc_toolchain_config.bzl deleted file mode 100755 index ba002b4543..0000000000 --- a/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0/cc_toolchain_config.bzl +++ /dev/null @@ -1,1493 +0,0 @@ -"""cc_toolchain_config rule for configuring CUDA toolchains on Linux, Mac, and Windows.""" - -load( - "@bazel_tools//tools/cpp:cc_toolchain_config_lib.bzl", - "action_config", - "env_entry", - "env_set", - "feature", - "feature_set", - "flag_group", - "flag_set", - "tool", - "tool_path", - "variable_with_value", -) -load( - "@bazel_tools//tools/build_defs/cc:action_names.bzl", - "ASSEMBLE_ACTION_NAME", - "CC_FLAGS_MAKE_VARIABLE_ACTION_NAME", - "CLIF_MATCH_ACTION_NAME", - "CPP_COMPILE_ACTION_NAME", - "CPP_HEADER_PARSING_ACTION_NAME", - "CPP_LINK_DYNAMIC_LIBRARY_ACTION_NAME", - "CPP_LINK_EXECUTABLE_ACTION_NAME", - "CPP_LINK_NODEPS_DYNAMIC_LIBRARY_ACTION_NAME", - "CPP_LINK_STATIC_LIBRARY_ACTION_NAME", - "CPP_MODULE_CODEGEN_ACTION_NAME", - "CPP_MODULE_COMPILE_ACTION_NAME", - "C_COMPILE_ACTION_NAME", - "LINKSTAMP_COMPILE_ACTION_NAME", - "LTO_BACKEND_ACTION_NAME", - "LTO_INDEXING_ACTION_NAME", - "OBJCPP_COMPILE_ACTION_NAME", - "OBJCPP_EXECUTABLE_ACTION_NAME", - "OBJC_ARCHIVE_ACTION_NAME", - "OBJC_COMPILE_ACTION_NAME", - "OBJC_EXECUTABLE_ACTION_NAME", - "OBJC_FULLY_LINK_ACTION_NAME", - "PREPROCESS_ASSEMBLE_ACTION_NAME", - "STRIP_ACTION_NAME", -) - -ACTION_NAMES = struct( - assemble = ASSEMBLE_ACTION_NAME, - c_compile = C_COMPILE_ACTION_NAME, - cc_flags_make_variable = CC_FLAGS_MAKE_VARIABLE_ACTION_NAME, - clif_match = CLIF_MATCH_ACTION_NAME, - cpp_compile = CPP_COMPILE_ACTION_NAME, - cpp_header_parsing = CPP_HEADER_PARSING_ACTION_NAME, - cpp_link_dynamic_library = CPP_LINK_DYNAMIC_LIBRARY_ACTION_NAME, - cpp_link_executable = CPP_LINK_EXECUTABLE_ACTION_NAME, - cpp_link_nodeps_dynamic_library = CPP_LINK_NODEPS_DYNAMIC_LIBRARY_ACTION_NAME, - cpp_link_static_library = CPP_LINK_STATIC_LIBRARY_ACTION_NAME, - cpp_module_codegen = CPP_MODULE_CODEGEN_ACTION_NAME, - cpp_module_compile = CPP_MODULE_COMPILE_ACTION_NAME, - ld_embed_data = "ld_embed_data", - linkstamp_compile = LINKSTAMP_COMPILE_ACTION_NAME, - lto_backend = LTO_BACKEND_ACTION_NAME, - lto_indexing = LTO_INDEXING_ACTION_NAME, - objc_archive = OBJC_ARCHIVE_ACTION_NAME, - objc_compile = OBJC_COMPILE_ACTION_NAME, - objc_executable = OBJC_EXECUTABLE_ACTION_NAME, - objc_fully_link = OBJC_FULLY_LINK_ACTION_NAME, - objcopy_embed_data = "objcopy_embed_data", - objcpp_compile = OBJCPP_COMPILE_ACTION_NAME, - objcpp_executable = OBJCPP_EXECUTABLE_ACTION_NAME, - preprocess_assemble = PREPROCESS_ASSEMBLE_ACTION_NAME, - strip = STRIP_ACTION_NAME, -) - -def _impl(ctx): - if (ctx.attr.cpu == "darwin"): - toolchain_identifier = "local_darwin" - elif (ctx.attr.cpu == "local"): - toolchain_identifier = "local_linux" - elif (ctx.attr.cpu == "x64_windows"): - toolchain_identifier = "local_windows" - else: - fail("Unreachable") - - host_system_name = "local" - - target_system_name = "local" - - if (ctx.attr.cpu == "darwin"): - target_cpu = "darwin" - elif (ctx.attr.cpu == "local"): - target_cpu = "local" - elif (ctx.attr.cpu == "x64_windows"): - target_cpu = "x64_windows" - else: - fail("Unreachable") - - if (ctx.attr.cpu == "local"): - target_libc = "local" - elif (ctx.attr.cpu == "darwin"): - target_libc = "macosx" - elif (ctx.attr.cpu == "x64_windows"): - target_libc = "msvcrt" - else: - fail("Unreachable") - - if (ctx.attr.cpu == "darwin" or - ctx.attr.cpu == "local"): - compiler = "compiler" - elif (ctx.attr.cpu == "x64_windows"): - compiler = "msvc-cl" - else: - fail("Unreachable") - - abi_version = "local" - - abi_libc_version = "local" - - cc_target_os = None - - builtin_sysroot = None - - all_link_actions = [ - ACTION_NAMES.cpp_link_executable, - ACTION_NAMES.cpp_link_dynamic_library, - ACTION_NAMES.cpp_link_nodeps_dynamic_library, - ] - - cpp_link_dynamic_library_action = action_config( - action_name = ACTION_NAMES.cpp_link_dynamic_library, - implies = [ - "nologo", - "shared_flag", - "linkstamps", - "output_execpath_flags", - "input_param_flags", - "user_link_flags", - "linker_subsystem_flag", - "linker_param_file", - "msvc_env", - "no_stripping", - "has_configured_linker_path", - "def_file", - ], - tools = [tool(path = ctx.attr.msvc_link_path)], - ) - - cpp_link_nodeps_dynamic_library_action = action_config( - action_name = ACTION_NAMES.cpp_link_nodeps_dynamic_library, - implies = [ - "nologo", - "shared_flag", - "linkstamps", - "output_execpath_flags", - "input_param_flags", - "user_link_flags", - "linker_subsystem_flag", - "linker_param_file", - "msvc_env", - "no_stripping", - "has_configured_linker_path", - "def_file", - ], - tools = [tool(path = ctx.attr.msvc_link_path)], - ) - - cpp_link_static_library_action = action_config( - action_name = ACTION_NAMES.cpp_link_static_library, - implies = [ - "nologo", - "archiver_flags", - "input_param_flags", - "linker_param_file", - "msvc_env", - ], - tools = [tool(path = ctx.attr.msvc_lib_path)], - ) - - assemble_action = action_config( - action_name = ACTION_NAMES.assemble, - implies = [ - "compiler_input_flags", - "compiler_output_flags", - "nologo", - "msvc_env", - "sysroot", - ], - tools = [tool(path = ctx.attr.msvc_ml_path)], - ) - - preprocess_assemble_action = action_config( - action_name = ACTION_NAMES.preprocess_assemble, - implies = [ - "compiler_input_flags", - "compiler_output_flags", - "nologo", - "msvc_env", - "sysroot", - ], - tools = [tool(path = ctx.attr.msvc_ml_path)], - ) - - c_compile_action = action_config( - action_name = ACTION_NAMES.c_compile, - implies = [ - "compiler_input_flags", - "compiler_output_flags", - "nologo", - "msvc_env", - "parse_showincludes", - "user_compile_flags", - "sysroot", - "unfiltered_compile_flags", - ], - tools = [tool(path = ctx.attr.msvc_cl_path)], - ) - - cpp_compile_action = action_config( - action_name = ACTION_NAMES.cpp_compile, - implies = [ - "compiler_input_flags", - "compiler_output_flags", - "nologo", - "msvc_env", - "parse_showincludes", - "user_compile_flags", - "sysroot", - "unfiltered_compile_flags", - ], - tools = [tool(path = ctx.attr.msvc_cl_path)], - ) - - cpp_link_executable_action = action_config( - action_name = ACTION_NAMES.cpp_link_executable, - implies = [ - "nologo", - "linkstamps", - "output_execpath_flags", - "input_param_flags", - "user_link_flags", - "linker_subsystem_flag", - "linker_param_file", - "msvc_env", - "no_stripping", - ], - tools = [tool(path = ctx.attr.msvc_link_path)], - ) - - if (ctx.attr.cpu == "darwin" or - ctx.attr.cpu == "local"): - action_configs = [] - elif (ctx.attr.cpu == "x64_windows"): - action_configs = [ - assemble_action, - preprocess_assemble_action, - c_compile_action, - cpp_compile_action, - cpp_link_executable_action, - cpp_link_dynamic_library_action, - cpp_link_nodeps_dynamic_library_action, - cpp_link_static_library_action, - ] - else: - fail("Unreachable") - - no_windows_export_all_symbols_feature = feature(name = "no_windows_export_all_symbols") - - pic_feature = feature( - name = "pic", - enabled = True, - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [ - flag_group(flags = ["-fPIC"], expand_if_available = "pic"), - flag_group( - flags = ["-fPIE"], - expand_if_not_available = "pic", - ), - ], - ), - ], - ) - - preprocessor_defines_feature = feature( - name = "preprocessor_defines", - enabled = True, - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.assemble, - ACTION_NAMES.preprocess_assemble, - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_header_parsing, - ACTION_NAMES.cpp_module_compile, - ], - flag_groups = [ - flag_group( - flags = ["/D%{preprocessor_defines}"], - iterate_over = "preprocessor_defines", - ), - ], - ), - ], - ) - - generate_pdb_file_feature = feature( - name = "generate_pdb_file", - requires = [ - feature_set(features = ["dbg"]), - feature_set(features = ["fastbuild"]), - ], - ) - - linkstamps_feature = feature( - name = "linkstamps", - flag_sets = [ - flag_set( - actions = all_link_actions, - flag_groups = [ - flag_group( - flags = ["%{linkstamp_paths}"], - iterate_over = "linkstamp_paths", - expand_if_available = "linkstamp_paths", - ), - ], - ), - ], - ) - - unfiltered_compile_flags_feature = feature( - name = "unfiltered_compile_flags", - flag_sets = ([ - flag_set( - actions = [ - ACTION_NAMES.preprocess_assemble, - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_header_parsing, - ACTION_NAMES.cpp_module_compile, - ACTION_NAMES.cpp_module_codegen, - ], - flag_groups = [ - flag_group( - flags = ctx.attr.host_unfiltered_compile_flags, - ), - ], - ), - ] if ctx.attr.host_unfiltered_compile_flags else []), - ) - - determinism_feature = feature( - name = "determinism", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [ - flag_group( - flags = [ - "-Wno-builtin-macro-redefined", - "-D__DATE__=\"redacted\"", - "-D__TIMESTAMP__=\"redacted\"", - "-D__TIME__=\"redacted\"", - ], - ), - ], - ), - ], - ) - - nologo_feature = feature( - name = "nologo", - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_module_compile, - ACTION_NAMES.cpp_module_codegen, - ACTION_NAMES.cpp_header_parsing, - ACTION_NAMES.assemble, - ACTION_NAMES.preprocess_assemble, - ACTION_NAMES.cpp_link_executable, - ACTION_NAMES.cpp_link_dynamic_library, - ACTION_NAMES.cpp_link_nodeps_dynamic_library, - ACTION_NAMES.cpp_link_static_library, - ], - flag_groups = [flag_group(flags = ["/nologo"])], - ), - ], - ) - - supports_pic_feature = feature(name = "supports_pic", enabled = True) - - output_execpath_flags_feature = feature( - name = "output_execpath_flags", - flag_sets = [ - flag_set( - actions = all_link_actions, - flag_groups = [ - flag_group( - flags = ["/OUT:%{output_execpath}"], - expand_if_available = "output_execpath", - ), - ], - ), - ], - ) - - default_link_flags_feature = feature( - name = "default_link_flags", - enabled = True, - flag_sets = [ - flag_set( - actions = all_link_actions, - flag_groups = [flag_group(flags = ["/MACHINE:X64"])], - ), - ], - ) - - if (ctx.attr.cpu == "local"): - hardening_feature = feature( - name = "hardening", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [ - flag_group( - flags = [ - "-U_FORTIFY_SOURCE", - "-D_FORTIFY_SOURCE=1", - "-fstack-protector", - ], - ), - ], - ), - flag_set( - actions = [ - ACTION_NAMES.cpp_link_dynamic_library, - ACTION_NAMES.cpp_link_nodeps_dynamic_library, - ], - flag_groups = [flag_group(flags = ["-Wl,-z,relro,-z,now"])], - ), - flag_set( - actions = [ACTION_NAMES.cpp_link_executable], - flag_groups = [flag_group(flags = ["-pie", "-Wl,-z,relro,-z,now"])], - ), - ], - ) - elif (ctx.attr.cpu == "darwin"): - hardening_feature = feature( - name = "hardening", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [ - flag_group( - flags = [ - "-U_FORTIFY_SOURCE", - "-D_FORTIFY_SOURCE=1", - "-fstack-protector", - ], - ), - ], - ), - flag_set( - actions = [ACTION_NAMES.cpp_link_executable], - flag_groups = [flag_group(flags = ["-pie"])], - ), - ], - ) - else: - hardening_feature = None - - supports_dynamic_linker_feature = feature(name = "supports_dynamic_linker", enabled = True) - - targets_windows_feature = feature( - name = "targets_windows", - enabled = True, - implies = ["copy_dynamic_libraries_to_binary"], - ) - - msvc_env_feature = feature( - name = "msvc_env", - env_sets = [ - env_set( - actions = [ - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_module_compile, - ACTION_NAMES.cpp_module_codegen, - ACTION_NAMES.cpp_header_parsing, - ACTION_NAMES.assemble, - ACTION_NAMES.preprocess_assemble, - ACTION_NAMES.cpp_link_executable, - ACTION_NAMES.cpp_link_dynamic_library, - ACTION_NAMES.cpp_link_nodeps_dynamic_library, - ACTION_NAMES.cpp_link_static_library, - ], - env_entries = [ - env_entry(key = "PATH", value = ctx.attr.msvc_env_path), - env_entry( - key = "INCLUDE", - value = ctx.attr.msvc_env_include, - ), - env_entry(key = "LIB", value = ctx.attr.msvc_env_lib), - env_entry(key = "TMP", value = ctx.attr.msvc_env_tmp), - env_entry(key = "TEMP", value = ctx.attr.msvc_env_tmp), - ], - ), - ], - ) - - linker_subsystem_flag_feature = feature( - name = "linker_subsystem_flag", - flag_sets = [ - flag_set( - actions = all_link_actions, - flag_groups = [flag_group(flags = ["/SUBSYSTEM:CONSOLE"])], - ), - ], - ) - - dynamic_link_msvcrt_no_debug_feature = feature( - name = "dynamic_link_msvcrt_no_debug", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [flag_group(flags = ["/MD"])], - ), - flag_set( - actions = all_link_actions, - flag_groups = [flag_group(flags = ["/DEFAULTLIB:msvcrt.lib"])], - ), - ], - requires = [ - feature_set(features = ["fastbuild"]), - feature_set(features = ["opt"]), - ], - ) - - warnings_feature = feature( - name = "warnings", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [ - flag_group( - flags = ["-Wall"] + ctx.attr.host_compiler_warnings, - ), - ], - ), - ], - ) - - dynamic_link_msvcrt_debug_feature = feature( - name = "dynamic_link_msvcrt_debug", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [flag_group(flags = ["/MDd"])], - ), - flag_set( - actions = all_link_actions, - flag_groups = [flag_group(flags = ["/DEFAULTLIB:msvcrtd.lib"])], - ), - ], - requires = [feature_set(features = ["dbg"])], - ) - - compiler_output_flags_feature = feature( - name = "compiler_output_flags", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.assemble], - flag_groups = [ - flag_group( - flag_groups = [ - flag_group( - flags = ["/Fo%{output_file}", "/Zi"], - expand_if_not_available = "output_preprocess_file", - ), - ], - expand_if_available = "output_file", - expand_if_not_available = "output_assembly_file", - ), - ], - ), - flag_set( - actions = [ - ACTION_NAMES.preprocess_assemble, - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_header_parsing, - ACTION_NAMES.cpp_module_compile, - ACTION_NAMES.cpp_module_codegen, - ], - flag_groups = [ - flag_group( - flag_groups = [ - flag_group( - flags = ["/Fo%{output_file}"], - expand_if_not_available = "output_preprocess_file", - ), - ], - expand_if_available = "output_file", - expand_if_not_available = "output_assembly_file", - ), - flag_group( - flag_groups = [ - flag_group( - flags = ["/Fa%{output_file}"], - expand_if_available = "output_assembly_file", - ), - ], - expand_if_available = "output_file", - ), - flag_group( - flag_groups = [ - flag_group( - flags = ["/P", "/Fi%{output_file}"], - expand_if_available = "output_preprocess_file", - ), - ], - expand_if_available = "output_file", - ), - ], - ), - ], - ) - - default_compile_flags_feature = feature( - name = "default_compile_flags", - enabled = True, - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.assemble, - ACTION_NAMES.preprocess_assemble, - ACTION_NAMES.linkstamp_compile, - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_header_parsing, - ACTION_NAMES.cpp_module_compile, - ACTION_NAMES.cpp_module_codegen, - ACTION_NAMES.lto_backend, - ACTION_NAMES.clif_match, - ], - flag_groups = [ - flag_group( - flags = [ - "/DCOMPILER_MSVC", - "/DNOMINMAX", - "/D_WIN32_WINNT=0x0600", - "/D_CRT_SECURE_NO_DEPRECATE", - "/D_CRT_SECURE_NO_WARNINGS", - "/D_SILENCE_STDEXT_HASH_DEPRECATION_WARNINGS", - "/bigobj", - "/Zm500", - "/J", - "/Gy", - "/GF", - "/EHsc", - "/wd4351", - "/wd4291", - "/wd4250", - "/wd4996", - ], - ), - ], - ), - ], - ) - - static_link_msvcrt_debug_feature = feature( - name = "static_link_msvcrt_debug", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [flag_group(flags = ["/MTd"])], - ), - flag_set( - actions = all_link_actions, - flag_groups = [flag_group(flags = ["/DEFAULTLIB:libcmtd.lib"])], - ), - ], - requires = [feature_set(features = ["dbg"])], - ) - - static_link_msvcrt_feature = feature(name = "static_link_msvcrt") - - if (ctx.attr.cpu == "darwin" or - ctx.attr.cpu == "local"): - dbg_feature = feature( - name = "dbg", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [flag_group(flags = ["-g"])], - ), - ], - implies = ["common"], - ) - elif (ctx.attr.cpu == "x64_windows"): - dbg_feature = feature( - name = "dbg", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [flag_group(flags = ["/Od", "/Z7", "/DDEBUG"])], - ), - flag_set( - actions = all_link_actions, - flag_groups = [flag_group(flags = ["/DEBUG:FULL", "/INCREMENTAL:NO"])], - ), - ], - implies = ["generate_pdb_file"], - ) - else: - dbg_feature = None - - undefined_dynamic_feature = feature( - name = "undefined-dynamic", - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.cpp_link_dynamic_library, - ACTION_NAMES.cpp_link_nodeps_dynamic_library, - ACTION_NAMES.cpp_link_executable, - ], - flag_groups = [flag_group(flags = ["-undefined", "dynamic_lookup"])], - ), - ], - ) - - parse_showincludes_feature = feature( - name = "parse_showincludes", - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.preprocess_assemble, - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_module_compile, - ACTION_NAMES.cpp_header_parsing, - ], - flag_groups = [flag_group(flags = ["/showIncludes"])], - ), - ], - ) - - linker_param_file_feature = feature( - name = "linker_param_file", - flag_sets = [ - flag_set( - actions = all_link_actions + - [ACTION_NAMES.cpp_link_static_library], - flag_groups = [ - flag_group( - flags = ["@%{linker_param_file}"], - expand_if_available = "linker_param_file", - ), - ], - ), - ], - ) - - static_link_msvcrt_no_debug_feature = feature( - name = "static_link_msvcrt_no_debug", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [flag_group(flags = ["/MT"])], - ), - flag_set( - actions = all_link_actions, - flag_groups = [flag_group(flags = ["/DEFAULTLIB:libcmt.lib"])], - ), - ], - requires = [ - feature_set(features = ["fastbuild"]), - feature_set(features = ["opt"]), - ], - ) - - supports_interface_shared_libraries_feature = feature( - name = "supports_interface_shared_libraries", - enabled = True, - ) - - disable_assertions_feature = feature( - name = "disable-assertions", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [flag_group(flags = ["-DNDEBUG"])], - ), - ], - ) - - if (ctx.attr.cpu == "x64_windows"): - fastbuild_feature = feature( - name = "fastbuild", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [flag_group(flags = ["/Od", "/Z7", "/DDEBUG"])], - ), - flag_set( - actions = all_link_actions, - flag_groups = [ - flag_group(flags = ["/DEBUG:FASTLINK", "/INCREMENTAL:NO"]), - ], - ), - ], - implies = ["generate_pdb_file"], - ) - elif (ctx.attr.cpu == "darwin" or - ctx.attr.cpu == "local"): - fastbuild_feature = feature(name = "fastbuild", implies = ["common"]) - else: - fastbuild_feature = None - - user_compile_flags_feature = feature( - name = "user_compile_flags", - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.preprocess_assemble, - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_header_parsing, - ACTION_NAMES.cpp_module_compile, - ACTION_NAMES.cpp_module_codegen, - ], - flag_groups = [ - flag_group( - flags = ["%{user_compile_flags}"], - iterate_over = "user_compile_flags", - expand_if_available = "user_compile_flags", - ), - ], - ), - ], - ) - - compiler_input_flags_feature = feature( - name = "compiler_input_flags", - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.assemble, - ACTION_NAMES.preprocess_assemble, - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_header_parsing, - ACTION_NAMES.cpp_module_compile, - ACTION_NAMES.cpp_module_codegen, - ], - flag_groups = [ - flag_group( - flags = ["/c", "%{source_file}"], - expand_if_available = "source_file", - ), - ], - ), - ], - ) - - no_legacy_features_feature = feature(name = "no_legacy_features") - - archiver_flags_feature = feature( - name = "archiver_flags", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.cpp_link_static_library], - flag_groups = [ - flag_group( - flags = ["/OUT:%{output_execpath}"], - expand_if_available = "output_execpath", - ), - ], - ), - ], - ) - - redirector_feature = feature( - name = "redirector", - enabled = True, - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_module_compile, - ACTION_NAMES.cpp_module_codegen, - ACTION_NAMES.cpp_header_parsing, - ACTION_NAMES.assemble, - ACTION_NAMES.preprocess_assemble, - ], - flag_groups = [ - flag_group( - flags = [ - "-B", - "external/local_config_cuda/crosstool/windows/msvc_wrapper_for_nvcc.py", - ], - ), - ], - ), - ], - ) - - linker_bin_path_feature = feature( - name = "linker-bin-path", - flag_sets = [ - flag_set( - actions = all_link_actions, - flag_groups = [flag_group(flags = ["-B" + ctx.attr.linker_bin_path])], - ), - ], - ) - - if (ctx.attr.cpu == "local"): - opt_feature = feature( - name = "opt", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [ - flag_group( - flags = ["-g0", "-O2", "-ffunction-sections", "-fdata-sections"], - ), - ], - ), - flag_set( - actions = [ - ACTION_NAMES.cpp_link_dynamic_library, - ACTION_NAMES.cpp_link_nodeps_dynamic_library, - ACTION_NAMES.cpp_link_executable, - ], - flag_groups = [flag_group(flags = ["-Wl,--gc-sections"])], - ), - ], - implies = ["common", "disable-assertions"], - ) - elif (ctx.attr.cpu == "darwin"): - opt_feature = feature( - name = "opt", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [ - flag_group( - flags = ["-g0", "-O2", "-ffunction-sections", "-fdata-sections"], - ), - ], - ), - ], - implies = ["common", "disable-assertions"], - ) - elif (ctx.attr.cpu == "x64_windows"): - opt_feature = feature( - name = "opt", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [flag_group(flags = ["/O2", "/DNDEBUG"])], - ), - ], - ) - else: - opt_feature = None - - include_paths_feature = feature( - name = "include_paths", - enabled = True, - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.assemble, - ACTION_NAMES.preprocess_assemble, - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_header_parsing, - ACTION_NAMES.cpp_module_compile, - ], - flag_groups = [ - flag_group( - flags = ["/I%{quote_include_paths}"], - iterate_over = "quote_include_paths", - ), - flag_group( - flags = ["/I%{include_paths}"], - iterate_over = "include_paths", - ), - flag_group( - flags = ["/I%{system_include_paths}"], - iterate_over = "system_include_paths", - ), - ], - ), - ], - ) - - shared_flag_feature = feature( - name = "shared_flag", - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.cpp_link_dynamic_library, - ACTION_NAMES.cpp_link_nodeps_dynamic_library, - ], - flag_groups = [flag_group(flags = ["/DLL"])], - ), - ], - ) - - windows_export_all_symbols_feature = feature(name = "windows_export_all_symbols") - - frame_pointer_feature = feature( - name = "frame-pointer", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile], - flag_groups = [flag_group(flags = ["-fno-omit-frame-pointer"])], - ), - ], - ) - - build_id_feature = feature( - name = "build-id", - flag_sets = [ - flag_set( - actions = all_link_actions, - flag_groups = [ - flag_group( - flags = ["-Wl,--build-id=md5", "-Wl,--hash-style=gnu"], - ), - ], - ), - ], - ) - - sysroot_feature = feature( - name = "sysroot", - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.assemble, - ACTION_NAMES.preprocess_assemble, - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_header_parsing, - ACTION_NAMES.cpp_module_compile, - ACTION_NAMES.cpp_module_codegen, - ACTION_NAMES.cpp_link_executable, - ACTION_NAMES.cpp_link_dynamic_library, - ACTION_NAMES.cpp_link_nodeps_dynamic_library, - ], - flag_groups = [ - flag_group( - flags = ["--sysroot=%{sysroot}"], - iterate_over = "sysroot", - expand_if_available = "sysroot", - ), - ], - ), - ], - ) - - def_file_feature = feature( - name = "def_file", - flag_sets = [ - flag_set( - actions = all_link_actions, - flag_groups = [ - flag_group( - flags = ["/DEF:%{def_file_path}", "/ignore:4070"], - expand_if_available = "def_file_path", - ), - ], - ), - ], - ) - - if (ctx.attr.cpu == "darwin"): - stdlib_feature = feature( - name = "stdlib", - flag_sets = [ - flag_set( - actions = all_link_actions, - flag_groups = [flag_group(flags = ["-lc++"])], - ), - ], - ) - elif (ctx.attr.cpu == "local"): - stdlib_feature = feature( - name = "stdlib", - flag_sets = [ - flag_set( - actions = all_link_actions, - flag_groups = [flag_group(flags = ["-lstdc++"])], - ), - ], - ) - else: - stdlib_feature = None - - no_stripping_feature = feature(name = "no_stripping") - - alwayslink_feature = feature( - name = "alwayslink", - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.cpp_link_dynamic_library, - ACTION_NAMES.cpp_link_nodeps_dynamic_library, - ACTION_NAMES.cpp_link_executable, - ], - flag_groups = [flag_group(flags = ["-Wl,-no-as-needed"])], - ), - ], - ) - - input_param_flags_feature = feature( - name = "input_param_flags", - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.cpp_link_dynamic_library, - ACTION_NAMES.cpp_link_nodeps_dynamic_library, - ], - flag_groups = [ - flag_group( - flags = ["/IMPLIB:%{interface_library_output_path}"], - expand_if_available = "interface_library_output_path", - ), - ], - ), - flag_set( - actions = all_link_actions + - [ACTION_NAMES.cpp_link_static_library], - flag_groups = [ - flag_group( - iterate_over = "libraries_to_link", - flag_groups = [ - flag_group( - iterate_over = "libraries_to_link.object_files", - flag_groups = [flag_group(flags = ["%{libraries_to_link.object_files}"])], - expand_if_equal = variable_with_value( - name = "libraries_to_link.type", - value = "object_file_group", - ), - ), - flag_group( - flag_groups = [flag_group(flags = ["%{libraries_to_link.name}"])], - expand_if_equal = variable_with_value( - name = "libraries_to_link.type", - value = "object_file", - ), - ), - flag_group( - flag_groups = [flag_group(flags = ["%{libraries_to_link.name}"])], - expand_if_equal = variable_with_value( - name = "libraries_to_link.type", - value = "interface_library", - ), - ), - flag_group( - flag_groups = [ - flag_group( - flags = ["%{libraries_to_link.name}"], - expand_if_false = "libraries_to_link.is_whole_archive", - ), - flag_group( - flags = ["/WHOLEARCHIVE:%{libraries_to_link.name}"], - expand_if_true = "libraries_to_link.is_whole_archive", - ), - ], - expand_if_equal = variable_with_value( - name = "libraries_to_link.type", - value = "static_library", - ), - ), - ], - expand_if_available = "libraries_to_link", - ), - ], - ), - ], - ) - - if (ctx.attr.cpu == "local"): - no_canonical_prefixes_feature = feature( - name = "no-canonical-prefixes", - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_link_executable, - ACTION_NAMES.cpp_link_dynamic_library, - ACTION_NAMES.cpp_link_nodeps_dynamic_library, - ], - flag_groups = [ - flag_group( - flags = [ - "-no-canonical-prefixes", - ] + ctx.attr.extra_no_canonical_prefixes_flags, - ), - ], - ), - ], - ) - elif (ctx.attr.cpu == "darwin"): - no_canonical_prefixes_feature = feature( - name = "no-canonical-prefixes", - flag_sets = [ - flag_set( - actions = [ - ACTION_NAMES.c_compile, - ACTION_NAMES.cpp_compile, - ACTION_NAMES.cpp_link_executable, - ACTION_NAMES.cpp_link_dynamic_library, - ACTION_NAMES.cpp_link_nodeps_dynamic_library, - ], - flag_groups = [flag_group(flags = ["-no-canonical-prefixes"])], - ), - ], - ) - else: - no_canonical_prefixes_feature = None - - has_configured_linker_path_feature = feature(name = "has_configured_linker_path") - - copy_dynamic_libraries_to_binary_feature = feature(name = "copy_dynamic_libraries_to_binary") - - user_link_flags_feature = feature( - name = "user_link_flags", - flag_sets = [ - flag_set( - actions = all_link_actions, - flag_groups = [ - flag_group( - flags = ["%{user_link_flags}"], - iterate_over = "user_link_flags", - expand_if_available = "user_link_flags", - ), - ], - ), - ], - ) - - cpp11_feature = feature( - name = "c++11", - flag_sets = [ - flag_set( - actions = [ACTION_NAMES.cpp_compile], - flag_groups = [flag_group(flags = ["-std=c++11"])], - ), - ], - ) - - if (ctx.attr.cpu == "local"): - common_feature = feature( - name = "common", - implies = [ - "stdlib", - "c++11", - "determinism", - "alwayslink", - "hardening", - "warnings", - "frame-pointer", - "build-id", - "no-canonical-prefixes", - "linker-bin-path", - ], - ) - elif (ctx.attr.cpu == "darwin"): - common_feature = feature( - name = "common", - implies = [ - "stdlib", - "c++11", - "determinism", - "hardening", - "warnings", - "frame-pointer", - "no-canonical-prefixes", - "linker-bin-path", - "undefined-dynamic", - ], - ) - else: - common_feature = None - - if (ctx.attr.cpu == "local"): - features = [ - cpp11_feature, - stdlib_feature, - determinism_feature, - alwayslink_feature, - pic_feature, - hardening_feature, - warnings_feature, - frame_pointer_feature, - build_id_feature, - no_canonical_prefixes_feature, - disable_assertions_feature, - linker_bin_path_feature, - common_feature, - opt_feature, - fastbuild_feature, - dbg_feature, - supports_dynamic_linker_feature, - supports_pic_feature, - ] - elif (ctx.attr.cpu == "darwin"): - features = [ - cpp11_feature, - stdlib_feature, - determinism_feature, - pic_feature, - hardening_feature, - warnings_feature, - frame_pointer_feature, - no_canonical_prefixes_feature, - disable_assertions_feature, - linker_bin_path_feature, - undefined_dynamic_feature, - common_feature, - opt_feature, - fastbuild_feature, - dbg_feature, - supports_dynamic_linker_feature, - supports_pic_feature, - ] - elif (ctx.attr.cpu == "x64_windows"): - features = [ - no_legacy_features_feature, - redirector_feature, - nologo_feature, - has_configured_linker_path_feature, - no_stripping_feature, - targets_windows_feature, - copy_dynamic_libraries_to_binary_feature, - default_compile_flags_feature, - msvc_env_feature, - include_paths_feature, - preprocessor_defines_feature, - parse_showincludes_feature, - generate_pdb_file_feature, - shared_flag_feature, - linkstamps_feature, - output_execpath_flags_feature, - archiver_flags_feature, - input_param_flags_feature, - linker_subsystem_flag_feature, - user_link_flags_feature, - default_link_flags_feature, - linker_param_file_feature, - static_link_msvcrt_feature, - static_link_msvcrt_no_debug_feature, - dynamic_link_msvcrt_no_debug_feature, - static_link_msvcrt_debug_feature, - dynamic_link_msvcrt_debug_feature, - dbg_feature, - fastbuild_feature, - opt_feature, - user_compile_flags_feature, - sysroot_feature, - unfiltered_compile_flags_feature, - compiler_output_flags_feature, - compiler_input_flags_feature, - def_file_feature, - windows_export_all_symbols_feature, - no_windows_export_all_symbols_feature, - supports_dynamic_linker_feature, - supports_interface_shared_libraries_feature, - ] - else: - fail("Unreachable") - - cxx_builtin_include_directories = ctx.attr.builtin_include_directories - - if (ctx.attr.cpu == "x64_windows"): - tool_paths = [ - tool_path(name = "ar", path = ctx.attr.msvc_lib_path), - tool_path(name = "ml", path = ctx.attr.msvc_ml_path), - tool_path(name = "cpp", path = ctx.attr.msvc_cl_path), - tool_path(name = "gcc", path = ctx.attr.msvc_cl_path), - tool_path(name = "gcov", path = "wrapper/bin/msvc_nop.bat"), - tool_path(name = "ld", path = ctx.attr.msvc_link_path), - tool_path(name = "nm", path = "wrapper/bin/msvc_nop.bat"), - tool_path( - name = "objcopy", - path = "wrapper/bin/msvc_nop.bat", - ), - tool_path( - name = "objdump", - path = "wrapper/bin/msvc_nop.bat", - ), - tool_path( - name = "strip", - path = "wrapper/bin/msvc_nop.bat", - ), - ] - elif (ctx.attr.cpu == "local"): - tool_paths = [ - tool_path(name = "gcc", path = ctx.attr.host_compiler_path), - tool_path(name = "ar", path = ctx.attr.host_compiler_prefix + "/ar"), - tool_path(name = "compat-ld", path = ctx.attr.host_compiler_prefix + "/ld"), - tool_path(name = "cpp", path = ctx.attr.host_compiler_prefix + "/cpp"), - tool_path(name = "dwp", path = ctx.attr.host_compiler_prefix + "/dwp"), - tool_path(name = "gcov", path = ctx.attr.host_compiler_prefix + "/gcov"), - tool_path(name = "ld", path = ctx.attr.host_compiler_prefix + "/ld"), - tool_path(name = "nm", path = ctx.attr.host_compiler_prefix + "/nm"), - tool_path(name = "objcopy", path = ctx.attr.host_compiler_prefix + "/objcopy"), - tool_path(name = "objdump", path = ctx.attr.host_compiler_prefix + "/objdump"), - tool_path(name = "strip", path = ctx.attr.host_compiler_prefix + "/strip"), - ] - elif (ctx.attr.cpu == "darwin"): - tool_paths = [ - tool_path(name = "gcc", path = ctx.attr.host_compiler_path), - tool_path(name = "ar", path = ctx.attr.host_compiler_prefix + "/libtool"), - tool_path(name = "compat-ld", path = ctx.attr.host_compiler_prefix + "/ld"), - tool_path(name = "cpp", path = ctx.attr.host_compiler_prefix + "/cpp"), - tool_path(name = "dwp", path = ctx.attr.host_compiler_prefix + "/dwp"), - tool_path(name = "gcov", path = ctx.attr.host_compiler_prefix + "/gcov"), - tool_path(name = "ld", path = ctx.attr.host_compiler_prefix + "/ld"), - tool_path(name = "nm", path = ctx.attr.host_compiler_prefix + "/nm"), - tool_path(name = "objcopy", path = ctx.attr.host_compiler_prefix + "/objcopy"), - tool_path(name = "objdump", path = ctx.attr.host_compiler_prefix + "/objdump"), - tool_path(name = "strip", path = ctx.attr.host_compiler_prefix + "/strip"), - ] - else: - fail("Unreachable") - - out = ctx.actions.declare_file(ctx.label.name) - ctx.actions.write(out, "Fake executable") - return [ - cc_common.create_cc_toolchain_config_info( - ctx = ctx, - features = features, - action_configs = action_configs, - artifact_name_patterns = [], - cxx_builtin_include_directories = cxx_builtin_include_directories, - toolchain_identifier = toolchain_identifier, - host_system_name = host_system_name, - target_system_name = target_system_name, - target_cpu = target_cpu, - target_libc = target_libc, - compiler = compiler, - abi_version = abi_version, - abi_libc_version = abi_libc_version, - tool_paths = tool_paths, - make_variables = [], - builtin_sysroot = builtin_sysroot, - cc_target_os = cc_target_os, - ), - DefaultInfo( - executable = out, - ), - ] - -cc_toolchain_config = rule( - attrs = { - "cpu": attr.string( - mandatory = True, - values = [ - "darwin", - "local", - "x64_windows", - ], - ), - "builtin_include_directories": attr.string_list(), - "extra_no_canonical_prefixes_flags": attr.string_list(), - "host_compiler_path": attr.string(), - "host_compiler_prefix": attr.string(), - "host_compiler_warnings": attr.string_list(), - "host_unfiltered_compile_flags": attr.string_list(), - "linker_bin_path": attr.string(), - "msvc_cl_path": attr.string(default = "msvc_not_used"), - "msvc_env_include": attr.string(default = "msvc_not_used"), - "msvc_env_lib": attr.string(default = "msvc_not_used"), - "msvc_env_path": attr.string(default = "msvc_not_used"), - "msvc_env_tmp": attr.string(default = "msvc_not_used"), - "msvc_lib_path": attr.string(default = "msvc_not_used"), - "msvc_link_path": attr.string(default = "msvc_not_used"), - "msvc_ml_path": attr.string(default = "msvc_not_used"), - }, - executable = True, - provides = [CcToolchainConfigInfo], - implementation = _impl, -) diff --git a/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0/clang/bin/crosstool_wrapper_driver_is_not_gcc b/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0/clang/bin/crosstool_wrapper_driver_is_not_gcc deleted file mode 100755 index 5c0abcdcd8..0000000000 --- a/build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0/clang/bin/crosstool_wrapper_driver_is_not_gcc +++ /dev/null @@ -1,268 +0,0 @@ -#!/usr/bin/env python -# Copyright 2019 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Crosstool wrapper for compiling CUDA programs. - -SYNOPSIS: - crosstool_wrapper_is_not_gcc [options passed in by cc_library() - or cc_binary() rule] - -DESCRIPTION: - This script is expected to be called by the cc_library() or cc_binary() bazel - rules. When the option "-x cuda" is present in the list of arguments passed - to this script, it invokes the nvcc CUDA compiler. Most arguments are passed - as is as a string to --compiler-options of nvcc. When "-x cuda" is not - present, this wrapper invokes hybrid_driver_is_not_gcc with the input - arguments as is. -""" - -from __future__ import print_function - -__author__ = 'keveman@google.com (Manjunath Kudlur)' - -from argparse import ArgumentParser -import os -import subprocess -import re -import sys -import pipes - -# Template values set by cuda_autoconf. -CPU_COMPILER = ('/dt7/usr/bin/gcc') -GCC_HOST_COMPILER_PATH = ('/dt7/usr/bin/gcc') - -NVCC_PATH = '/usr/local/cuda-10.0/bin/nvcc' -PREFIX_DIR = os.path.dirname(GCC_HOST_COMPILER_PATH) -NVCC_VERSION = '10.0' - - -def Log(s): - print('gpus/crosstool: {0}'.format(s)) - - -def GetOptionValue(argv, option): - """Extract the list of values for option from the argv list. - - Args: - argv: A list of strings, possibly the argv passed to main(). - option: The option whose value to extract, without the leading '-'. - - Returns: - A list of values, either directly following the option, - (eg., -opt val1 val2) or values collected from multiple occurrences of - the option (eg., -opt val1 -opt val2). - """ - - parser = ArgumentParser() - parser.add_argument('-' + option, nargs='*', action='append') - args, _ = parser.parse_known_args(argv) - if not args or not vars(args)[option]: - return [] - else: - return sum(vars(args)[option], []) - - -def GetHostCompilerOptions(argv): - """Collect the -isystem, -iquote, and --sysroot option values from argv. - - Args: - argv: A list of strings, possibly the argv passed to main(). - - Returns: - The string that can be used as the --compiler-options to nvcc. - """ - - parser = ArgumentParser() - parser.add_argument('-isystem', nargs='*', action='append') - parser.add_argument('-iquote', nargs='*', action='append') - parser.add_argument('--sysroot', nargs=1) - parser.add_argument('-g', nargs='*', action='append') - parser.add_argument('-fno-canonical-system-headers', action='store_true') - parser.add_argument('-no-canonical-prefixes', action='store_true') - - args, _ = parser.parse_known_args(argv) - - opts = '' - - if args.isystem: - opts += ' -isystem ' + ' -isystem '.join(sum(args.isystem, [])) - if args.iquote: - opts += ' -iquote ' + ' -iquote '.join(sum(args.iquote, [])) - if args.g: - opts += ' -g' + ' -g'.join(sum(args.g, [])) - if args.fno_canonical_system_headers: - opts += ' -fno-canonical-system-headers' - if args.no_canonical_prefixes: - opts += ' -no-canonical-prefixes' - if args.sysroot: - opts += ' --sysroot ' + args.sysroot[0] - - return opts - - -def _update_options(nvcc_options): - if NVCC_VERSION in ("7.0",): - return nvcc_options - - update_options = {"relaxed-constexpr": "expt-relaxed-constexpr"} - return [ - update_options[opt] if opt in update_options else opt - for opt in nvcc_options - ] - - -def GetNvccOptions(argv): - """Collect the -nvcc_options values from argv. - - Args: - argv: A list of strings, possibly the argv passed to main(). - - Returns: - The string that can be passed directly to nvcc. - """ - - parser = ArgumentParser() - parser.add_argument('-nvcc_options', nargs='*', action='append') - - args, _ = parser.parse_known_args(argv) - - if args.nvcc_options: - options = _update_options(sum(args.nvcc_options, [])) - return ' '.join(['--' + a for a in options]) - return '' - - -def InvokeNvcc(argv, log=False): - """Call nvcc with arguments assembled from argv. - - Args: - argv: A list of strings, possibly the argv passed to main(). - log: True if logging is requested. - - Returns: - The return value of calling os.system('nvcc ' + args) - """ - - host_compiler_options = GetHostCompilerOptions(argv) - nvcc_compiler_options = GetNvccOptions(argv) - opt_option = GetOptionValue(argv, 'O') - m_options = GetOptionValue(argv, 'm') - m_options = ''.join([' -m' + m for m in m_options if m in ['32', '64']]) - include_options = GetOptionValue(argv, 'I') - out_file = GetOptionValue(argv, 'o') - depfiles = GetOptionValue(argv, 'MF') - defines = GetOptionValue(argv, 'D') - defines = ''.join([' -D' + define for define in defines]) - undefines = GetOptionValue(argv, 'U') - undefines = ''.join([' -U' + define for define in undefines]) - std_options = GetOptionValue(argv, 'std') - # currently only c++11 is supported by Cuda 7.0 std argument - nvcc_allowed_std_options = ["c++11"] - std_options = ''.join([ - ' -std=' + define for define in std_options - if define in nvcc_allowed_std_options - ]) - - # The list of source files get passed after the -c option. I don't know of - # any other reliable way to just get the list of source files to be compiled. - src_files = GetOptionValue(argv, 'c') - - # Pass -w through from host to nvcc, but don't do anything fancier with - # warnings-related flags, since they're not necessarily the same across - # compilers. - warning_options = ' -w' if '-w' in argv else '' - - if len(src_files) == 0: - return 1 - if len(out_file) != 1: - return 1 - - opt = (' -O2' if - (len(opt_option) > 0 and int(opt_option[0]) > 0) else ' -g -G') - - includes = (' -I ' + ' -I '.join(include_options) - if len(include_options) > 0 else '') - - # Unfortunately, there are other options that have -c prefix too. - # So allowing only those look like C/C++ files. - src_files = [ - f for f in src_files if re.search('\.cpp$|\.cc$|\.c$|\.cxx$|\.C$', f) - ] - srcs = ' '.join(src_files) - out = ' -o ' + out_file[0] - - supported_cuda_compute_capabilities = ["3.0", "6.0"] - nvccopts = '-D_FORCE_INLINES ' - for capability in supported_cuda_compute_capabilities: - capability = capability.replace('.', '') - nvccopts += r'-gencode=arch=compute_%s,\"code=sm_%s,compute_%s\" ' % ( - capability, capability, capability) - nvccopts += ' ' + nvcc_compiler_options - nvccopts += undefines - nvccopts += defines - nvccopts += std_options - nvccopts += m_options - nvccopts += warning_options - - if depfiles: - # Generate the dependency file - depfile = depfiles[0] - cmd = (NVCC_PATH + ' ' + nvccopts + ' --compiler-options "' + - host_compiler_options + '"' + ' --compiler-bindir=' + - GCC_HOST_COMPILER_PATH + ' -I .' + ' -x cu ' + opt + includes + - ' ' + srcs + ' -M -o ' + depfile) - if log: Log(cmd) - exit_status = os.system(cmd) - if exit_status != 0: - return exit_status - - cmd = (NVCC_PATH + ' ' + nvccopts + ' --compiler-options "' + - host_compiler_options + ' -fPIC"' + ' --compiler-bindir=' + - GCC_HOST_COMPILER_PATH + ' -I .' + ' -x cu ' + opt + includes + - ' -c ' + srcs + out) - - # TODO(zhengxq): for some reason, 'gcc' needs this help to find 'as'. - # Need to investigate and fix. - cmd = 'PATH=' + PREFIX_DIR + ':$PATH ' + cmd - if log: Log(cmd) - return os.system(cmd) - - -def main(): - parser = ArgumentParser() - parser.add_argument('-x', nargs=1) - parser.add_argument('--cuda_log', action='store_true') - args, leftover = parser.parse_known_args(sys.argv[1:]) - - if args.x and args.x[0] == 'cuda': - if args.cuda_log: Log('-x cuda') - leftover = [pipes.quote(s) for s in leftover] - if args.cuda_log: Log('using nvcc') - return InvokeNvcc(leftover, log=args.cuda_log) - - # Strip our flags before passing through to the CPU compiler for files which - # are not -x cuda. We can't just pass 'leftover' because it also strips -x. - # We not only want to pass -x to the CPU compiler, but also keep it in its - # relative location in the argv list (the compiler is actually sensitive to - # this). - cpu_compiler_flags = [ - flag for flag in sys.argv[1:] if not flag.startswith(('--cuda_log')) - ] - - return subprocess.call([CPU_COMPILER] + cpu_compiler_flags) - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/build_pip_pkg.sh b/build_pip_pkg.sh index 82e1c78754..9ef2861aae 100755 --- a/build_pip_pkg.sh +++ b/build_pip_pkg.sh @@ -44,7 +44,6 @@ function main() { cp ${PIP_FILE_PREFIX}setup.py "${TMPDIR}" cp ${PIP_FILE_PREFIX}MANIFEST.in "${TMPDIR}" cp ${PIP_FILE_PREFIX}LICENSE "${TMPDIR}" - touch ${TMPDIR}/stub.cc rsync -avm -L --exclude='*_test.py' ${PIP_FILE_PREFIX}tensorflow_addons "${TMPDIR}" pushd ${TMPDIR} diff --git a/configure.sh b/configure.sh index a23e051365..0624a47f86 100755 --- a/configure.sh +++ b/configure.sh @@ -48,7 +48,7 @@ elif [[ ! -z "$1" ]]; then fi # Install python dependencies -read -r -p "Tensorflow 2.0 will be installed if it is not already. Are You Sure? [y/n] " reply +read -r -p "Tensorflow will be upgraded to 2.0. Are You Sure? [y/n] " reply case $reply in [yY]*) echo "Installing...";; * ) echo "Goodbye!"; exit;; @@ -70,26 +70,26 @@ TF_CFLAGS=( $(${PYTHON_VERSION} -c 'import tensorflow as tf; print(" ".join(tf.s TF_LFLAGS=( $(${PYTHON_VERSION} -c 'import tensorflow as tf; print(" ".join(tf.sysconfig.get_link_flags()))') ) TF_CXX11_ABI_FLAG=( $(${PYTHON_VERSION} -c 'import tensorflow as tf; print(tf.sysconfig.CXX11_ABI_FLAG)') ) -TF_SHARED_LIBRARY_DIR=${TF_LFLAGS[0]:2} -TF_SHARED_LIBRARY_NAME=$(generate_shared_lib_name ${TF_LFLAGS[1]}) +SHARED_LIBRARY_DIR=${TF_LFLAGS[0]:2} +SHARED_LIBRARY_NAME=$(generate_shared_lib_name ${TF_LFLAGS[1]}) write_action_env_to_bazelrc "TF_HEADER_DIR" ${TF_CFLAGS:2} -write_action_env_to_bazelrc "TF_SHARED_LIBRARY_DIR" ${TF_SHARED_LIBRARY_DIR} -write_action_env_to_bazelrc "TF_SHARED_LIBRARY_NAME" ${TF_SHARED_LIBRARY_NAME} +write_action_env_to_bazelrc "TF_SHARED_LIBRARY_DIR" ${SHARED_LIBRARY_DIR} +write_action_env_to_bazelrc "TF_SHARED_LIBRARY_NAME" ${SHARED_LIBRARY_NAME} write_action_env_to_bazelrc "TF_CXX11_ABI_FLAG" ${TF_CXX11_ABI_FLAG} - if [[ "$TF_NEED_CUDA" == "1" ]]; then - write_action_env_to_bazelrc "TF_NEED_CUDA" ${TF_NEED_CUDA} write_action_env_to_bazelrc "CUDNN_INSTALL_PATH" "/usr/lib/x86_64-linux-gnu" write_action_env_to_bazelrc "TF_CUDA_VERSION" "10.0" write_action_env_to_bazelrc "TF_CUDNN_VERSION" "7" write_action_env_to_bazelrc "CUDA_TOOLKIT_PATH" "${CUDA_HOME:=/usr/local/cuda}" - - write_to_bazelrc "test --config=cuda" write_to_bazelrc "build --config=cuda" - write_to_bazelrc "build --spawn_strategy=local" - write_to_bazelrc "build --strategy=Genrule=local" + write_to_bazelrc "test --config=cuda" + write_to_bazelrc "build:cuda --define=using_cuda=true --define=using_cuda_nvcc=true" + write_to_bazelrc "build:cuda --crosstool_top=@local_config_cuda//crosstool:toolchain" + write_to_bazelrc "build --spawn_strategy=standalone" + write_to_bazelrc "build --strategy=Genrule=standalone" + write_action_env_to_bazelrc "TF_NEED_CUDA" ${TF_NEED_CUDA} fi diff --git a/examples/image_ops.ipynb b/examples/image_ops.ipynb deleted file mode 100644 index 51e1a93908..0000000000 --- a/examples/image_ops.ipynb +++ /dev/null @@ -1,601 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "colab": { - "name": "image_ops.ipynb", - "version": "0.3.2", - "provenance": [], - "collapsed_sections": [] - }, - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "accelerator": "GPU" - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "GWEKvPCCxJke", - "colab_type": "text" - }, - "source": [ - "##### Copyright 2019 The TensorFlow Authors." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "l-m8KQ-nxK5l", - "colab_type": "code", - "colab": {} - }, - "source": [ - "#@title Licensed under the Apache License, Version 2.0 (the \"License\");\n", - "# you may not use this file except in compliance with the License.\n", - "# You may obtain a copy of the License at\n", - "#\n", - "# https://www.apache.org/licenses/LICENSE-2.0\n", - "#\n", - "# Unless required by applicable law or agreed to in writing, software\n", - "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", - "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", - "# See the License for the specific language governing permissions and\n", - "# limitations under the License." - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "O8FuVCLYxi_l", - "colab_type": "text" - }, - "source": [ - "# TensorFlow Addons Image: Operations\n", - "\n", - "\n", - " \n", - " \n", - "
\n", - " Run in Google Colab\n", - " \n", - " View source on GitHub\n", - "
" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "2a5ksOt-xsOl", - "colab_type": "text" - }, - "source": [ - "# Overview\n", - "This notebook will demonstrate how to use the some image operations in TensorFlow Addons.\n", - "\n", - "Here is the list of image operations we'll be covering in this example:\n", - "\n", - "- tfa.image.mean_filter2d\n", - "\n", - "- tfa.image.rotate\n", - "\n", - "- tfa.image.transform\n", - "\n", - "- tfa.image.random_hsv_in_yiq\n", - "\n", - "- tfa.image.adjust_hsv_in_yiq\n", - "\n", - "- tfa.image.dense_image_warp\n", - "\n", - "- tfa.image.euclidean_dist_transform" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "DMbjxr4PyMPF", - "colab_type": "text" - }, - "source": [ - "# Setup" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "2ZdFry6yAp-c", - "colab_type": "code", - "outputId": "0be2c0d6-ea06-48bc-97ef-06ccffabbf0a", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 85 - } - }, - "source": [ - "!pip install -q tensorflow-gpu==2.0.0rc0\n", - "!pip install -q tensorflow-addons~=0.5\n", - "\n", - "from __future__ import absolute_import, division, print_function, unicode_literals\n", - "\n", - "import numpy as np\n", - "import tensorflow as tf\n", - "import tensorflow_addons as tfa\n", - "\n", - "import matplotlib.pyplot as plt" - ], - "execution_count": 2, - "outputs": [ - { - "output_type": "stream", - "text": [ - "\u001b[K |████████████████████████████████| 348.9MB 69kB/s \n", - "\u001b[K |████████████████████████████████| 3.1MB 30.6MB/s \n", - "\u001b[K |████████████████████████████████| 501kB 37.4MB/s \n", - "\u001b[K |████████████████████████████████| 552kB 2.7MB/s \n", - "\u001b[?25h" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "Q6Z2rsP8yp2v", - "colab_type": "text" - }, - "source": [ - "# Prepare and Inspect Images" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "9gbgJP10z9WO", - "colab_type": "text" - }, - "source": [ - "## Download the images" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "IgUsVhBQ6dSg", - "colab_type": "code", - "colab": {} - }, - "source": [ - "!wget -q https://i.dailymail.co.uk/i/pix/2015/09/01/18/2BE1E88B00000578-3218613-image-m-5_1441127035222.jpg -O google.jpg\n", - "!wget -q https://i.stack.imgur.com/nm2HM.png -O xray.png" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "uheQOL-y0Fj3", - "colab_type": "text" - }, - "source": [ - "## Inspect the images" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "MFGirRRZ0Y9k", - "colab_type": "text" - }, - "source": [ - "### Google Icon" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "NRlvNQdm1YI8", - "colab_type": "code", - "outputId": "3997165d-46b0-4ac6-adaf-22bfd94efeb8", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 281 - } - }, - "source": [ - "google_img_raw = tf.io.read_file(\"google.jpg\")\n", - "google_img = tf.io.decode_image(google_img_raw)\n", - "\n", - "plt.title(\"Google Icon with shape {}\".format(google_img.shape))\n", - "_ = plt.imshow(google_img)" - ], - "execution_count": 4, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAUsAAAEICAYAAADWe9ZcAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsvXl8JUd1sP2cquq+92oZzYxnvO8Y\ngzEYMAYTVhOWsIRAAklIwCwBHBKyvAm8QBISSELy8X1vli8bJCyJMQQDIRhI2BIgDhjCvpjF2GBs\nM2N7POPZJV317a467x9VV7qSpRnNWBpJVj3z61Hf7rrddau7T586deocUVUymUwmc2jMSlcgk8lk\n1gJZWGYymcwiyMIyk8lkFkEWlplMJrMIsrDMZDKZRZCFZSaTySyCLCxXCSLyehF510rXYykRkUeL\nyPWH2H+miKiIuLt5nktEZPvdOcZSISItEfmuiJy00nVZjYjIBSLy+ZWux9GQheUcROQ5IvJFEZkQ\nkZ1p/VdFRFa6bgshIjeLyBNWuh5zUdXPqup9+p9Xaz2XmMuAz6jq7YMbRaQUkesGhXp6mYzPWVRE\nnpX2t0TkL0XkNhHZKyJvEpFisRURkaH0nTtFZL+IfGZg32+JyA9F5EA6/l/O99ISkcemOr3hCM77\nLhG5PR37BhF5SX+fql4L7BORpy/2eKuFLCwHEJFXAH8F/B/gROAE4GXAI4FyBauWWTu8DHjnPNv/\nN7BrcEN6mYz0F+AngXHg46nIa4CLgPsD5wIXAq89grq8BdgMnJf+/tbAvg8DF6rqhnT8BwK/Mfjl\nJJj/CvjiEZwT4P8BzkzH/ingDSLykIH9/wz88hEec8XJwjIhImPAHwG/qqrvV9WDGvm6qj5XVat+\nORG5QkR2icgtIvJaETFpn0mfb0la6RXpuP1zPD/t2y0iv38oTUtEHi4inxeRfSLyTRG55Ah+y0uT\nFnMwdQkvTNvPE5Gr0zG/IyI/NfCdy0Xk70TkI+l7XxSRey1w/HekFwsickrSPF6ePt9LRPaktpju\nHovIO4HTgX9LGtSrBg75XBH5UdKAfu8Qv+up6fccFJFbReSVc/a/IrX77SLyooHtTxORrydNZ5uI\nvH5gX98UcFnSsG4fPG76Ha8RkRvTdXufiGxeoH6nA2czR7iIyFnA84hC5FC8AHi/qk6kz08H/lpV\n96jqLuCvgV86zDH657wvUVBdpqq7VNWr6lf7+1X1RlXd1y8OBOCcOYd5BfAfwPcWc86BY3+n/7wA\nmpbBe+lq4PEi0jqS4644qpqXOOXzyUADuMOUuwL4EDAKnAncALw47fsl4AfEB2YE+ADwzrTvfkSt\n4VFELfXPgBp4Qtr/euBdaf0UYDfwVOIL7Ynp89YF6nTzwHF+FrgVeCjxITgHOAMoUt1+N53/x4GD\nwH3S9y5P53gY4Ihv//cscL5fAv4trf8icCPw3oF9H0rrlwDb56tn+nwm8UF6K9AhajcVcN4C570d\neHRa30TUjPrnaYgvuyK12ySwaWD/A1JbXgDcATxzTh2uBIZTuV0D7fmbwBeAU4EW8A/AlQvU72nA\nd+bZ/u/AT89tjzllhtP1uGRg21eAnxv4/NxU17FF3M/PB74F/CVwZ1p/1pwyvwgcSMfcBTxwYN8Z\nxHt7JN0bbzjC5+lN6Roo8DVgZM7+A8AFK/3cH9FvWukKrJaF+ObfMWfb54F9QBd4DGCBHnC/gTK/\nDFyd1j9F1Ez7++5DFIgO+IPBhwwYSseaT1i+miRkB8p/AnjBAnW/eeA4nwB+c54yjwZ2AGZg25XA\n69P65cDbBvY9FfjeAue7F7CXKHz+PrXB9rTvHcBvp/VZwoGFheWpA9u+BDxngfP+KJ1rw5ztl6Rr\n5Aa27QQevsBx/n/gL+fU4b4D+/8/4O1p/Trg8QP7Tupf03mO+1zgC3O2/TTwsfnaY065S4GbABnY\n9gbgc8BWolnoi6muJy3ifv7dVPb1xJfjY4kv67u8iIB7A38MnDiw7UPAzw/cG0ckLNP3LFE5eC1Q\nzNl3K/CYpXp+j8WSu+Ez7Aa2DBq5VfURqrox7TPAFqLmcsvA924haoIAJ8+zzxFtnycD2waOPZmO\nOx9nAD+busv7RGQf8aZbzAjraURNby4nA9tUNSxQd4jCtM8kUau4C6p6IzABPIgohP8duE1E7kN8\nKP97EfUcZFHnBZ5FFOK3iMh/i8iPDezbrarNfMcRkYtF5L+S6WQ/0a64Zc6xtw2s30JsL4jX4qqB\n63Ad4InXdC57iT0O0nmHiYL3N+YpO5cXAFdokiSJPwG+DnyD+OL+IFFQ37GI43VT2Teoak9V/xv4\nL+BJcwuq6veB7xC1QdLgy6iqvncR51kQjV3/a4ha+a/M2T1KVETWDFlYzvA/xC7gMw5R5k7iDXjG\nwLbTiW9JgNvm2dcQb+7biTcNACLSAY5b4DzbiJrlxoFlWFXfuIjfsY3Z9qE+twGn9e2r89T9SPlv\n4NlAqaq3ps8vIHaPv7HAd+5WiCtV/bKqPgM4nig43rfIr76bOKBxmqqOEbXhud4Npw2sn05sL4jt\n+ZQ516KdfvNcrgXOGnjh3puouX5WRHYQzTInicgOETmz/yUROY2odV4x5/d2VfXXVPUUVT2b+HL9\n6pwX3kJcO8+2Q7W/Y+a+eTxwUarnDuDngf8lIh9axHkPd2xE5BSitrugW9lqJAvLhEZj9x8CbxKR\nZ4vIaDLuP4hoT0JVPfEB/ZO0/wzgt4G+f+SVwG+JyFkiMgL8KdGW1wDvB54uIo8QkZLYPVrIHeld\nqexPiIgVkXYaLDl1gfKDvA14pYg8RCLnpHp+kahtvUpEijRg9HTgPUfWUtP8N/BrQN8d5er0+ZrU\nTvNxB9Gee8RIdL15roiMqWpNtHktRmhA1GL2qOqUiDyMaKuby+9LdLU5H3gR0Neq/p54vc9I9dgq\nIvO+UFV1O9Eu/LC06dtEIfygtLyE2AYPYrYmeynw+aSxD/7mU0Tk5HQdHw78PvC6gf2Xi8jlC/zm\nzxDNFr8jIk5EHgk8jmimQUReIiLHp/X7Ab9DNCORznPuQL0/TLQrvyiVv0RE5hW8InK8RPe7kXTv\n/gTwCwPHhtj7+LTODAKtDVbaDrDaFqLd6UtEwbKLKGQuI2pQEDWnd6V924i2SJP2mfR5W9r/LtIg\nQ9r/QuINvJt4Q97KzIDF60k2y/T5YqJA2pOO9RHg9AXqfDOzbYEvI761x4kP7IPT9vPTMfcD3wV+\neuA7lzNgl+IQ9rW0/z5ETeUF6fMYUYt+9ULHIGrtPyJ2v17JjL1w0NZ4NfCSec5XEl1q9hIF5ZeB\nRy1UV2bbcZ9N7FofJJoM/pYZ+3C/DpcRtckdwKsGjmOIL8Tr0/dvBP70EO3ycuDNC+ybt02Jo80v\nnmf7Y9LvmEznf+6c/Z8CXnqIupxP7DFNzHO9/4kouCfSOf4P0F7gOHPvjUuBzy1Qdmu6x/al6/St\nuXUk3ss/tdLP+pEukiqfOcYkzXMfcG9VvWml67NeSd3hm4gDEM2hSy/qeC2infHxOscxfSlJvZNv\nEkeU6+U6zwLnfhvwL6r6iaP47gXAP6jqjx228CojC8tjSDKcf4rY/f5zovZ4oeaLsGIstbDM3HNZ\nFpuliDxZRK4XkR+IyGuW4xxrlGcQu3q3EY3/z8mCMpNZGyy5ZikilujM+kRgO9G29Auq+t0lPVEm\nk8kcQ5ZDs3wY8ANV/aGq9oijrYdyx8lkMplVz90KjbUApzDbLWI70Ta3IFu2bNEzzzxzGaqSyWQy\nh+arX/3qnaq69XDllkNYLgoRuYzorsHpp5/OV77ylZWqSiaTWceIyC2HL7U83fBbmT0b4lTmmSWi\nqm9R1YtU9aKtWw8r1DOZTGZFWQ5h+WXg3mkWSwk8hzgDIJPJZNYsS94NV9VGRH6NOK3KAv+oqt9Z\n6vNkMpnMsWRZbJaq+lHgo8tx7Ewmk1kJciCNTCaTWQQrNhqeyRwxg/MnVm36uMw9lSwsM2uHBQSk\nLrwrk1kysrDMrBnyJPrMSpJtlplMJrMIsmaZWVMEQJKOKch093tuQBiR3DHPLC1ZWGbWFH1Bafpi\nck7fPAvJzHKRhWVmzaDobCE5rU1KFJJZTmaWkSwsMwuymFFmJXaNDSBzvjAjygDCzGcdMJUPyD4B\n0CYdVMEHCAE0AIrxPp6k8dA00OtBncqXDoyCK6EowFlQEw8qBkTiX9NfBIwZFL+J/q/pVyqkVTNY\nXebmSpu1f/YPz9xDyMIyMy93EYIAMlsONDQIHpvyenkpMAoNgUJAgoEe6S4ziEIQqI1Smgb8BNQT\ncHACed/7uO6zn2N8535oPKUVNrYdvqkorFAUFuMFCbEGFsHXDQawYgjaAxGCJBnrHKYoQYSqqXGF\n58BEl1odE72GpmzjhkZ5+BN/Ah79aDjzDGi1odOmMQUWQfD4BoxzoIpgpxtAjZl+OTSpPWwApKGR\nGg+06Cz/hcocM1ZFDp6LLrpIc4i21cd8mmVfiFoq0ALE0BCDAIjWID0IJTQeLQr2h4q2LWmrwkSA\nqXGm3v9Wvv6pz6A77mCTWIYMdDpC1Z1kyBlUFSuOgOJVwQhqhKZWClNgTNTi1IfpzHsuKYNBIKgS\nxKCAcRZrLZN1hfGKRXAqeK+0i5IKGHfg65pu5alcm4deeinmSU+GjZtAOjDUImiFsWb613vMLHOp\nxSAhRE1UFA9YimW8OpmlQkS+qqoXHbZcFpaZBZnTnZw9gSbpnQoq/U5pjaFBtBO3hUnsvl3w+c/x\n5cvfwvjtP6IlgdNbYxhnMcZR9xRnCgjgez1arsGKQ8Ti64CxJcYYQoBGekCYFpZew/SAjvqZdWMM\ndVPhigI1gsVCbajrmqJwTNVTtFoFKtDzPQoLplFKdQgFk67ggPbYW1fs61Wce8njOPMPXo+6DnU5\nggWs1vGHC0n17rdHMd0meaxpbZCFZebuM4/tLQqcAk9MhGgx0YZoTbQjBgvNdvjs1Vzzt//Iaft6\njJQGMR4JPmqC0sYFRRRUAt4EKhsFjAtR7AYxBBNADSGdv/QDJgFi+T4y9zaWMGfDzGedI8S8mGjf\nTKYCIz2EGoOnkQ5BHPu6XXY3yhmPeAyn/for4bjjoT0c28bENvKABo8Te5d2y6xeFisss80yszCD\ngzWqCIIzlqZX48o4gKICB/1BNiD03v5OvvDuKzm59AxbuE8AU3iMBhShsgaPoqbGBShCwGjUUlsB\nvEAjLp06YELAqscQEAUvBQvNo+gLwLsIzVlEodgfYDIahWNLwZuGnqsINhDU4HwHgqMdGlQDnaLF\nyS1H92tf5paXPY/rdmzjya98LfzkM2B0hEqGsYDDglQ0BFy2Wd6jyJplZkHmxq1QH6DxiCugexC2\nb+NLv/c7bLp9GxuKgHYMU/snGKJN0W4xRY1pFYTGY4MDDEbBiI+C0QZUAoIiGiiCwfgWKlFwztYc\nw100wtla5lxN0swqZ2loDAQMYXAwXgOCB7UYLUFt1HiNBwLie2ANjUsqZBVoly0mpyqGh0p63R5d\nCm5oah70ouez9bmXgh0G28rz49YIuRueOTxpBCf+CbM2992+ZwZ0gKaGiXHYsYPPv+TZnFS02WgL\n6lAjxmAaUA+htGDB9ypMYfD4KLCCxJFzsQTpu9ukrq+mwRPpxX3CdNcYSF322dVfjLAMEo9tpyVk\nLCeEpL0aGlMAPnXdm+k6gaGRImrDUuPQNCpe0CBIMAgeJ1OUoaFuDF+nwwNe/FKOf/bPQ7sNVlDj\nZpT05I50F1ejWdchS9ljSRaWmcOTnss4mt0gKBoC3lgaoKUS3WV8gN4k9cc/zKf+9A+5eKRNbeLA\nS380Os+cYXrgqWka9hvH5IUP50GveRVs3Ao2jYxrAK2Se5KLQlnje8ETcOpBLFlgHjsWKyzzFVnH\nNKbGU+NoAIenQHyJ1dhlFpmCZh/Xv+4VXPOYh9J765u5aHQje90oIkII0XWnLyTWO03T4H0cxDqx\n49jyjav5zk8/neoD74buXtAaxeBNB1DEN+Ab1DRUBJwaVIpprTOzushXZR1jEWJHEqBBfAWFIj7Q\nnqjh2ut598MuZuNnPs75Y45u6BJKYbSuprVJEcF7v8K/ZHXQf2kURcHUgXGGx2tObgf2/cNf8tGH\nXcCd//h3SHUA2yQfTGtQB6C0NZoBckuuXnI3fB0TmAIspilAoLLQ2nEj17zoUk7Rig1NIGgLpSCI\nx9DDakAAnx0pDokKhGSblGR71cIy3nSZDDUPfM8nYWuHhjZOHV7Bptk/lhaS9ZhjRu6GZw5LjzZ1\nrTSMg99H64Mf4Iafv5R72QCTXbq2RWMbkAqjlqDDeDr4bJ5cBIFgPGoCpS8oa0OrF9hohRNs4JPP\neCxTb/o73NRuVGos0BMHdLKgXKXkq7KOaQMtaXD77+Q/f+yh7HrTG9hUdLEHlbbbjNGA8yXOO7zt\nUhcH6bka1eGVrvqqR9TQagraNXg7QWh3qbWL7wnKGOdvarP7qg/w6Uc+Efnal0D2Ybs1LvfDVy25\nL7We2fcjvvyq11B+70Yu2HAcIVQ0pYXQ4Bofg/TQAzGU3uC8AWkwUhM0z3s+FP0AJD1rUAmY2lNK\ngSpUdUVRFwhwwaZhvv+qV3LTyBBPetvbmGqfSEEHu9I/IHMXsma51kmuJz0aeoRpx8gGqAhUxHXP\nBFBFdyEf4MDtfPZpT+HUbds4pTSIRP9BWxsKYzFFtLWF5CAO8WYx6gj5UT4sgdhuRsEGh5GSgMQp\nnUawCLVTalNzQuN5xJ4u//7859KevBlliv4VrHwKbReiD2jTv8aDy+zVzDKRNcs1Tj+IRYkBPI0E\nnDhcCDg8UCfn7tR1PrCDf3vio3nCaWdy7tAGpJpCRWLQByWFWxs8QXLunmdb5vAMOs4PtmFjosO8\nNzDeapBezQP2CXc861e4KVQ8/L0fgNETcDYgGvDGYTHxgZ3HZtyfQJBZPvJdv8YRZgRcnKCn6PRj\nKWAaqHpQHeT7f/EGvv3kp/K4DcexZ/ftcWRbZNqxPHPs8AKFh3aTuusu0DHK1knPfUrHtT/5dMbf\n+TZsiPPlLU0UhnpXxTLHGj42ZM1yjaMAEuc4WwGP4DFYAx6DCw6KA3z4qY/l4UD7+A57phpcU6C+\nJmAoioKmaVb4l6w/Sg9Oo5apKIyW7Kt7TOyfYsw2VFe+k5Gzz8I98hGoawGpa09Ir0Mz8D85gfoy\nkzXLNY6Q5hMnQWlwuAZCHfB4mJzgP579s5w/vpv28BATUw0FUJSCMQ5jDN77PF1xBejZuASBtnF4\nXzPhPG4I2kUbJHDz636Hb/3v30R6kwhNmpYKda9ibmqLzPKSheVaJ1r/qYixJV0NuIBlnNbOO/jY\nJQ/l/tUEG80m3J6KdmOQNJ0xP2wrhyFQWxgvYcrFXECtxqRueZpj3qsZKwxbr/s2H3rsxTCxG3oe\nNNAuC+axJmeWkdwNX+sIKI6WQiXQKwKje3fzgSdcwoPHHA8d6tApHLtGDbUYTB2wGiNo5Mds5XAh\n4AKoGAIGLylCElDUlmCEUCjqa/xkjwvaI1z/zJ8inH0O5/3N26FwIB6TPROOGVmzXAMoKQo3NRV1\nVCajPxBKhdDQEBMijvb2cs1PPobHnDDEqArGFUxOTtIKFprDBcfNHCtUDCozYeNEQ3p9BXpOaUyI\nydnU0W6NMGwKNqmw6cYb4ZqPQG8c0YKQQul5wjzR4TNLSRaWa4D+iLcgtIAegZ6LO4QCgqMXJuh0\nd/Gpp/0852/eij/Qxbc79Gx8gJw32PwsrRrCwNJHtJ9OOAq+GB0eahsIpomlJXD9H76RH/3Zn8P4\nTqyGGGIvQO6SLy9ZWK4JAioVeAcUOAIFNZOmiY56CkO9Pbz/4ofwAL+fem+X9tBxTFa9la545ihw\nabZUPz+RSiCYuA6w0Rn4/Gf5wq/9JoQDaQDckP0ZlpcsLNcAybU8hisPYNUREIZQeraGiTv5wiN/\nisdsGWGytwtxFi+w2ZYUWdlYc5hZTv/RxuxC/Bvn69ecoiXbd2yHJmBDwBto8uO8rOTWXQMYDIZW\nzKjYz0+DAw/l/p1c84yncc7xw7SkoD00ii8C1BU9rXPHbI2iacDHKvhuhZ1qaFPQTHWR2vBfheHZ\n//I+aHWmn+K2z4/zcpJHw9cAouA89JyC1BiKmEhr3w52PvNneUiry97uFLbYQGgafFkRDJjg7jp9\nMbPqCUlQqoAJMGTb9GplSgrqDcdz8tv+liecfDaeDkoc62uHADli/bKSW3etYKHBYhCkHoeDu/j4\nJU9GWz0mTUHhWkz6Hu3kkmKCofB9f8rMWsJoXPrUCm7TGAc7Le48aStsPpUpjVYZp3HKZGXAS7Za\nLieHfZJE5B9FZKeIfHtg22YR+U8R+X76uyltFxH5axH5gYhcKyIXLmfl75HoPB8lDvAMeRMdhyb2\n88XHPYUfO3kjU02Nrwx1q2BKKirX0G4MNriUNXElfkTmcMzNVNnHELXKmA0Tghik02Z/8GxrlTzk\nz/8GWmMglqqZiHFSXBScNs/vX1YWo3ZcDjx5zrbXAJ9S1XsDn0qfAZ4C3DstlwFvXppqrgNSgITZ\nYbcaagIEg6gFhfLgQT79lKdx/hZH1dvHkB1BnFBUNRukxKmlkTg7x5uGsI5978zAstro50yPYe9i\nLY0aTDBM+IZuNUXbCsPDw0wZ4UZjueSf3svkxhNAoE1Jyw1DGV3LHAYkxxhdTg57H6nqZ4A9czY/\nA3hHWn8H8MyB7Vdo5AvARhE5aakqe09GJeaxbkxyLvaxS1VgUvwtBxM3cvVjL+H+J25iV10z1NqM\nFNnsDFFTm6utzefLuFroR0QPgDfxutc2LmMFdEaGmQhCMJabixEe/6//Cp02Q1qtaL3XM0f7pJ2g\nqren9R3ACWn9FGDbQLntadvtzEFELiNqn5x++ulHWY17Dv0JiDaplT0rlN6Bwn4XGBvfy3ee/AzO\nGjpAe8LgeyU1SjPssfXK1v1YsVDXdTFlVtvMJRtcFJJJw5wxmRg8cSCv3ThG/v7tPHLryWAKeoCJ\nfhCZFeBu91A0BkI84ltRVd+iqhep6kVbt269u9VY89jUFRMEjxBwaepOw1izn0/96gvZ2umw0W1k\nQgQ7ZKmdp5haT0b9gKpH1RNCM72u6uNcz/4y+A0BNasvotLcEW/b87TUQS/gC0fZtPjOGSfBllPA\ntKgIFBhcyHPBV4qjFZZ39LvX6e/OtP1W4LSBcqembZnDEUAweAw1jrYSnyIdZ+ef/w1n/PAGSm0x\nVWxAtaDnGnquR7GOElxpCKAKqnHWSlpHddo2Kaqo78+YTtqaX40d8RR+sh+dvtfgG6UYGsVJhy8d\nv4VH/f07wDp6BEocso6u9WrkaIXlh4EXpPUXAB8a2P78NCr+cGD/QHc9cygsMa0DAxdl6iB84+vs\n+fi/c0JniJ4JBIEiOR87D7VZP50yS8xd03et6X+2yPS8aoNgRQghzKtprhaMzsTpNQobNmykV7SY\nGCq5dngjT3zz26AcpkEpIApKA96sE5vLKuSwT5qIXAlcAmwRke3A64A3Au8TkRcDtwA/l4p/FHgq\n8ANgEnjRMtT5HskkgSEJ0CilNewTZeP4JJ/75V/n7BFlSkaYairGGkOwMby2x+HFxOC/90DmBiQu\nXAFBp9NgzEqFobG8iIARmhADGmvaFsLqaqO+wO9HPJ+oayY7HW7rjvO4D3wQQkDDJNaMIjVgAj2p\nCIAlj3qvBIcVlqr6Cwvsevw8ZRV4+d2t1HrEEANihGoPDI2ysRe46Reezf1GCxozDKFHx1piaAxJ\nWujqHOv1Js5StsERkp6sEusa+mkRjBAaj1GDVYM1LergkbanbuBAVbOz8pRbjue0+53PqY95FDzw\nYhgZiSpZWRAboQAxoAGalKfB96BXQV3DDTfQ+8bX2P7lz3Pj925gtN1iy/AIwyKEbpeWcTFQRa/G\nmQIVaKyg6hH1FNN6vqE2ZsYHUg89aBSPE3A+tgOkeJVJSHY7liFVuhNd6IwwaQvO/vO/5cx73Q/v\nLJaBDBFFPH9JZ+kvVmbRrJ8+3CrHEWIw12ILKpPc9GevZ4MYxI7gXYVdQwGECt+hZz1V0aPdpJFf\nYhSdIgC+jRihlEBddSk3DPEj7bJjcoL7nvRQTrv0FzjlgffjvLENYIfpNYZGChzd1H8VmukoO4Kb\nzjUUZ7uEEmQ4mTOOPwN96OM4+1dew9m9BroHoTpIdeUVXPufH2Po4H62tDq40SEmJibQoIgYhqRF\nxzsmnBBSvctkM1SJQv9QVqz+FFWIrkFBZlLkGAVXVRzEoGJpuYI7ho7jtHPOwUgPtBVfAJlVhayG\nrH4XXXSRfuUrX1npaqwonglst4UvHPaHX+fGX38hm6qSxgsTZZeRNaRVeFtTNgXWF1ROwXRx2iSz\ngVCIQV2L8Ua4dbLHcQ+8kPv+8R/A8BCTrY04oITkJNmAjeqcDgin6GqVUjD0B8OISdoCiiS9TAD1\nYG3SBDXEDXXyIqgr+NBVfPbd72RoYj8bioatrTZTkz1wBTbEKOa1i5F/Sh+P09gA6g6pXRpi0JMY\nZm0gXiVQ+AZttTFumJtbI1z47n+CcnMK5lywOl3p75mIyFdV9aLDlsvCcpXQTFKHIQqd4BMX3o8H\nbR6iLEdQgYmmoW3Xjp3KakCNJyCYUOLFEsRTaoUQuK7bY+P59+d+r30tnHAilCNRFQR6kqbuEdKk\naEsjSkjCb9BxxkCMJo6A6dJMi1MT0y30k6ondS4AiiQ/t5m5PU6B3kTMcfPhD3DNP7yJc44/CTlQ\n4QSCadCBoWgvgheHCxwyoLJRMx2414TYde/7UuIsQQzf1Q6P/cgHqesS6ZR4Dy2bBeWxJAvLtYYH\nTJcv/uLzud+dt9LVXkostvpskofFFYxPHGDzhiEMsGd8ivam47l++w423+f+3P8f3gxt6BUtChzi\nbex2CkCVTLIGn3wDhNl6Vhj4H+ZomBC/L2FWy9kF21HxBCwtfP9Ye+6ED76bj/39X/CQjaeiU1No\nRzEC1B6jFqFIXfFDXR+TZmQ16GSF1J7R0TGmQqAeGeaHtHnk+98NdiNqk7NTA7MNlpnlJgvLtcbE\nfr562Qs4/bbtNAF8Gzp1jBq1xjCeAAAgAElEQVRUFVMUzdoxL4+rZaRVYLtdrt93gHNf+CKO++WX\nQmcIKFC1BJWYA1uS731MMpSkVey+QhwMkv60B2GWEOnfuVFjnMmm3f9eLBEQLFZj+803JbIhUJIE\nbn+/D7HP3N0J3/ou33rZazh30wiTZpKuabDB4A7jh9DXLA0NTHmMOCpXUg2VnP13b4UTT4LWKA0h\nysfGoK4mOkVl7fJYsVhhuXaewHs63/sCm3fvRaylHAEmPYaKygoSStaShnlWu2FXaPiMNjzzM/8D\n5RCUrWRoVIJWqHGImhgAQgNqA3XqRlv63e0ouVRMGkU3A1sH/BQB4pynmS39aONCErRhet9MS4YY\nVjkkISlhOjmcGLBYtH0S8rAtPOAzV/HZX/0Vxm7ezWmtFqZlqHp+Om3tXGKAjJlutyscjTgmS8PB\nloPT7g3apdFJRIaQBtTVTNKjTQubheWqI1+RY0kAtKFHQxNndscHua64+rdfwRieEAJhKs0UF5tc\na459VY3GMG8qBqcBp3Eec+XivlAZSltSmxpvGnxdE6xAU/O+3Z6tr/sznvmxT9OMjKLtNkj0m1HA\nmhKLoZB+1znaD236NyMuZ+IGmdQdn693Grensv3u+GDh6S/FY8ycIXV977LfYNNotBhAC9hwMo++\n4j1ccNVHuF6GERmmVTqQwIRvKEZaGKkwUqM4vHY4KA0OT+EMU0WAwnKDHeLCt74XbwRsGyft+GJI\n/7VpYbIOsyrJwvIYMlsJkfiA9zzU45zTGSM0NaqKaD8pqkHUILoCWmXKMFhZqOxMjEUXoAiBkVHL\n7noSS5tR18GI54c79lP9zM/wcx//OFx4Eb5sE0wxS8CFNI493c1MQk0wSXiZGcFHf778EZjwFix8\niFt91nfMzKoA0sSR72DAD8OWLTz84++lefpPID1LcIaOGELlqaVF0IKWb7DSxeKZ9AqFYZMb5Zqy\n4Gn/8j4YG0ua80AAuek2cNlcuUrJr7BjSA9oYVJH0MUsZGEnV7/kJZxfexS9y6yVlSIASEyE5YJB\nNArJIFHD3De1lw3t45jqeqwp6UqbH/uvq6BzChQGCqGZHnmeYe29nRVLwBuTKt/BOsfmS1/Mbg93\nXP7PnL1hmPrABAwNM+mExla0QsWQOqqiwEwa/ueEIX7mre8i2E70pcyzcNYca+/eXcO0aGK3Njq2\nQN3j83/8O1y4+3asgqpijGE1DLrFNKwNG6rAaM8kf8M4YDHllKHhTdB4bt1zkPavv5wzPvlRdONZ\n0GlTGUOFw/cn5g38nLDGbrkqDbZYH2b0wFAQyi0MvfzVXPCla3DPew7bikAnHGTETyJq6DFMpVAW\nBWPvuZIfv+IqGB5DywI1wyv8qzJHw9q6c9c6Gnu3WsdAv4RJNn3x20yWvRjTUGRVCMqIQVN2wdoE\nui6OUNupQGcysHtnxTf3By7+j0/gn/QTTLktiDUgDS1raCEMITP2VjmKOH6rgBITh50ERBtCCtBs\nLHRqmDwoFL94KRf8y7/x7YmGUPcoHdihFjIyxvdbG2F0M7iSHslOmmNhrEmysDyGBJE41Fo4qPby\nsec/h+N8Q92YaVeZ1YSooTEw3gpMmIrClpjaMdQ5jn0nncbjP/Fx9MTTMeUYbQ14KmYNsvRNrQO/\nba3dcNLEF9yUgUpiaF6HR00D1AwNjeLDRvT4U3nkxz7NPjtGqSXN/i7XiePid18B7WEqmjjPvGHt\nNUIGyJftmBIQMCDBw4F9nLt3J61Oh3KqtdJVuwuiUHgXhbh4hr3CgXFax5/A53Wci9/1LhjZEMen\neg2Ix6Z4nJrccEgpMpS4CCzoarNqsVH2OwxlCs7cIHigKQTvAr2OYUodbN7KvX/797hhynDjyAiX\nXPkBcENMajMTj9JAz3ZX+ldljoI8wLPk9Gct3xULeBuwVeCDz30elzjBdysoWqBTTI+OSoMXk/J+\nB0R6+GW4VDHvTz9h1owi2K99X7AN1YEhCkJnmC9WE/z4Ve/jwMhGRlVpSY23cRik79Tddw6PadNi\nexiY7dazRmgkOoy7Adck17/EHnDgdIqWOGgsPPUJ3OvMTYydeS/GO6OMSI+WJEEpoKZeQx6zmUGy\nsFxS5guZNiM4JRi6tafznT/k4Q/+HgeuPZFRezwydYDGtRHpYaVC1GKDAZnCiwPtICyDoUvCgCEx\naoRBQDUKUR8CtlBs0eaHuxvOf9+b+PETzgQ3zAaIvpMUM/O1hQGXmMgs5+oj8gFaHbi5L76+o2bc\nCUBb2gOfW4zd/xEAjBA/z/hRglDQziPha5I11ida7Rwm8apC2Qrs3/F2TvzJgtOfO8EOuQH1AUeN\niKXBoBpoNQYXlMbWdJfpleZ8TL8aMNQmDuAEE/Am4AUKCxtGRth+oOb8/7gKTjqHqlGwOcNgZv2R\nheWxRKDkWkZbe6mafXRPuJ3zfnsLezddh/MdCq0QPF5HqK1BRXC+oFy29KdRuPuBGBQmgA3RF3Sy\nd4DeROD+V70bNmzF1wWtoRGmlqk2mcxqJgvLJcfMWQazVyvsvArRHmXTpsJRt77NfX/rPG7nu/RE\nKBlBfY9GahoU65VWVS5LTTUFrFCIXfK6x7A4hmyB8cLQxo18ds8BOO50oIPt9Odmr74BqUxmucnC\ncpmZlfvF7GTHf12BqNBrPEUxBVpQ6fc5/1Wb2LPxNnriKFUp1IK0CLYBtzy6XKCfKjZG9ukkIVk3\nMN4EPn/nXp76yY8Qpjr0AJUGCLTyCEVmHZKF5TJjjEkCM7D/2ldy3HE1poBmaJJhvwHtNqjxTLZu\n4byX7uKkV1TcVF7HVDmBaMBLQVUsTzfcasBoQDRgCKgqkwb2DRXccdaZPP3qr8DwifiOUCbNWCVL\nysz6JAvLpUYH/wRQg+Dw3VuobvkUVTlOLQHxbfA9nN2IrxroBXzZgeGv8NBXFOze8h28tVgtcM2m\nWSHGkrNm2nb0l1A0ZoaUfpZBa/Ctkm/v28/Ff/FXTEkHrKMgapSSIgGpyQIzs/7IwnJJiaFkIRCT\nEHhokrfJwSsZGe7SqiukKSjUU1uPN+OUzlLaEq0NUz1DM7yPB7xwEzuGv8kUByklpn31JqC2nzih\nxuBj+LCjxIQCCTZlIlS88dw6McHTP/wf9DadRrvVD4br0jIYFSiTWV/ku35JMUShElLM2SKGAK92\n8MOvfwh7CF9JkSmMmQBf0lSb6dUTPOjlx1FcsJNbmcDpGGICgYOoPQihQwhDqJ046tpWhSdIQU8d\nnQZMZXjIH/2/MDZC6f3hD5DJrCOysFxClJS2QHV6gkdjGuj8gDFzK8LC+WxFTIyP66ZAKpqipltv\n58RnjXHuTwt72tdTSBu6JRI61LbBG4+9G4E3vK+ZKoXCG5yU7NrfwCMfxVQoWZGIw5nMKiYLyyVm\nOnZEkjUWuO3Tr2Kk2R9n5SxAYwKeFoQWxk7gbMCWI1S9G9HzvsE5r3bsP+4WghvC1iVFXSChRy1H\n3w1vWUc5uR+KijvHhft85uN4U9B2BWryLJNMZpAsLJeQmIWwSdMASSEXttMKN6U0BYeaimNmkhOG\nIi6qmBA9xqt6N2e9tMvIo7bRtV3a0kWawL5mYW31cJimIbQMpjJsetFzYHQDFg/arLVZiZnMspOF\n5RIjKYgXUmNxcOcnaOl+vK0J0iz8Pd+JMS5NF5WAUoKkgLNagPY4WO9jwxP3cMrP7uZmthGGAkPu\n6OdCel9TdDbzvaZg8wufT48i2hFkYEpPJpMBsrBcWhQIJo2EVxBg93X/TCFCTZWcuufH0EQzocbR\naTVd1FTTc388NUN2hMmpSepzd3L+757MHe5GNtdjR13d2vQ4uLvikqv+DdwwJYA6FEOTVctMZhZZ\nWC41GnBotD+acWy9HXE9QgOHau4gMZiZqMEEg6gg/STXEigcBG1wzmGKhiZczwN/875cW1yN9ByN\n6RG0wXpB1EfttO/2Iw1V0TDlHKpCWzzqK0aL47n+pDEYdRBaEBzeRCd1u2BNM5n1SRaWS4kANgAV\nMSHjTrTeS2g87VYxPeizqEPpQO5rwASTRtujruldTQjf4hGvPoMtv1Gxr96GdSVTRmlcQPGoCkgP\ntMT6klaYom7GmVLLpLV8terxlMvfRq/dmg5wbvAQOKK6ZjLrgSwslwHFxwCx4z9gqGNwro1v7kY8\nSjX0h49QQ1UrxoEva4LeRjV0Hef90VnsaH0Hbyoww3i1WG2w6kEtLhiKUDPUdjTGUHY28Jg3/BEM\nbwTaBGKySYkRapeiGTKZexT5qVhiVCUOzuA58IOPoHVF06vplMMcdXNLiljUjzoucWZN46G2PWpX\ngf6A+79yE+Fe22hQCmkj+KghSrSiKoZ6qkKpCZXChQ9mIhRIA0Zg2vFJkr9oJpOZJgvLJUbE4hXg\nIN1dn4uTA53gFx7bWeSB+3PBY4i00LQwtgBxmMLQ1Aeg2s0Zz2pxsHM9E+EOAoqqQUxNEyoaOwyN\nxRi4acduGNlEy7QojIEQfUKVmB87z/7OZGaTheVSkux8IgFkB+3mFoSASiDczWg90YmomQ58IeqQ\n4OKAEAG0iKPwepDzXjFCed/dVEOT1KUStMYUjl4IjGzcRCMlj/7Vl+HF4WacO7HpPCr5xshk5pJz\n8Cw1HrB7mPrRJxkzXWqpCSKI6SJqj84eqAYVg1Bj+ulmk2gLKFYLggWVKWoJ2O4EJz99DMrAzVfe\nSfn9DZSFw3OASe1wy37DqZc9D6sBBHrJn9OpS4Ez4og4km+PTKZPfhqWGgOOmju+/1FOlG6MCqRR\nG7w7AydReAkBS4rSwWAk9jh6LTEikLH4ooeXWznzeWPU351k8j3H022Noq7FI1/1fHq2Q5mOMT2x\nMWmUQj/a0OpCAdEGL2CDQw1M0aOj5V1srPPp8fO1/noqd7THMkTviNjGVYyUr4A0KG413irLwmGF\npYicBlwBnEBsoreo6l+JyGbgvcCZwM3Az6nqXhER4K+ApwKTwAtV9WvLU/1VhsykgDXhNqQt4JOt\nMZhp0XbUzCdsB7ZJP9UsEKRBFGq6uHP20bv/OPrDk9k+fpCtT3pSKtZPEzFwjOm11dcRD4AVRVMt\nfV1RFBB6HlPO9gxdrJ/oeip31MeSGJeVwUFAYL1N81qMZtkAr1DVr4nIKPBVEflP4IXAp1T1jSLy\nGuA1wKuBpwD3TsvFwJvT33VA3wFnErE7Cd6jRtO0xRUSPmEShku2PtfAF7fxtU9s4fz2KbTWWP5u\n6D+aMi3cnbPUfhwpN+LX2G9ZSxgM0QEjAG69ychpDissVfV24Pa0flBErgNOAZ4BXJKKvQO4migs\nnwFcoaoKfEFENorISek46wAPe7dRuD1x1o4IRgGNcS451mkZpKDbg6HyDqpzR3nmC/8EbwG6KJ01\nJS9nab0CiEGD5UfjMDq2GnXhtUu/rRUgwJgh5q5Xm7rgTGv464UjslmKyJnAg4EvAicMCMAdxG46\nREG6beBr29O2WcJSRC4DLgM4/fTTj7DaqxUD9Nj5zasYkoD6Eu8sRvuDMsfeISfUQimeKV9wmzuL\nszdfhO2OQ3s6ONKawQbAeOIUTuhhkGKUF/zJpzFbH0YIDk3xPdXc9ceZeTSiME8b3FPLHarM3H2q\niohgjMHf+T0+8cYLKHyXQjYCMSuowawrcbloYSkiI8C/Av9LVQ/IwJOmqipyZBPkVPUtwFsALrro\nonuEYq8YRHv48W9TjIBxRRqUEESbFXH0traL02HqoTYnFufTC6dRljYZAO9eDp+VQKdfOKnuDYwz\nxmhjsGFgllSY58U0j83Xzqfp30PLHaqMZXAiQkxeZ4wBD9aWdAMUMmjNTMdag+aco2VRwlJECqKg\n/GdV/UDafEe/ey0iJwE70/ZbgdMGvn5q2rY+kClKcxOEGAINm24wU0FK+HUsCeqoJTA5NcVxD7yU\nYGoqLAKU00b7tcOM9bempAXa4ExBUIdRM61ZzmvumG+AbD2VO0yZucISsXjvaRVjGAOKizJS4pRe\nKLKwHCSNbr8duE5V/2Jg14eBFwBvTH8/NLD910TkPcSBnf3rxV4Z75mKwu2haaAUIZgm+l6mdBPH\nnNLhQwudbEFxUqrnATxtkPLY1+fuIEl7h/SQ1hBqiuAR0+Dlbsy/z0yjkrrhVvE04FzS490cwbj2\neiZ3h8Volo8ELgW+JSLfSNt+lygk3yciLwZuAX4u7fso0W3oB0TXoRctaY1XNQHCQaRoKACtDIYa\ntAVUBJFjPpLY6zWUZh+ueAS+fBCGCUosfs2qA7HeKi2gxhctCIqoYCimNUtZawbZ1UQ/nXOjFJQ0\ntGgTW74RcE3AuoKaLqV0VrKmx5TFjIZfw8KK9uPnKa/Ay+9mvdYmWuO3f4sSDwpq+4EpwooJJ0uB\nMZaN5zyJOD29xXTu8UxmkRzoQVnYGHBl+lZeX/fQ+vq1y41U7N35XYxqGmlu0syHEF/WKxD6LIRA\n0wQ4+cHJ2Tjm/85kjoTvb4fQv39NnNFjVulMr+UiPzVLSs3+nd+jMJL8AFOUIA2omBUZDS8LQUTB\nnER/auTMksksjIhMmzO++8M9GAEN9Tx2y/VBFpZLSo/CHAAfUDXJIV3SgMTKNLWGHsZZMCfF+JbT\ne9bPTZ65+1x3006CgOvPqE2xCNYTWVguKR4rE6gXNHV6jRK75fHTMa+RUQgeYCuzRy/vEa6tmWPE\nD7ffmdZCSsgHMX3z+nnpZmG5pPQwuo+Aw0gZZ0Bof05tCnt2jBERqtChoRUD+wIzId4ymcVx50HF\nzwTSB5n2S1ixOh1rsrBcUiqG2AWtDYTGgypBlMaszFRHiP5yu/xm3LSpaWJWxKFM5vAE9oybGKxE\nC6wavATsCk3hXSnyE7OkBIwEQuhFw3jfTrmiCW0MaDv+Qeh7i+ULnzkS2u2hOKfiLrfy+rmTcvDf\nJcaZQFCPMT5FJ0hZGVcKdRgznD4Y1lO3KbN0dIaGkflmWR77qqwY6+e1cExQjGlA07S7ftBUYKWa\nWpzFmdGBmZYxHcV66j5l7i4G50qa/sjOrPht60dcZmG5pISUqzuAepTokB4TjK2McPIhULQ3DYzn\nGGKA4kxm8RjbwlpmApWsQ9Gx/n7xMiM6z7zkgTS2x75CTQo87GFWeLNMZpGogLExdomsXy+KLCyX\nmPkDOSgrZysMMYCH+JmqAPnSZ46EHJgkPzFLTk8VocBqC0kpbKOD+srcbEE9vrJ4AqKengREO9Gc\nuiI1yqw5RAdiKc/tnayfMeIsLJeYaS+hEEVR6Gd8XKHpjpBiE8ZgkFlAZo6CMHu9fxOtlGlphcjC\ncokRBC81amJANKPKStoIRQQJk/TDsglJoOdeVeYIWe+3TBaWS4rFywhBFKWH9LNBiU8uRMe+uS0F\n2tsFFCCSZl0AEtb9zZ/JHAlZWC4pDjXDhBAzlGgtCCsrlEIItOQAPkbTmJ6gpvnSZzJHRH5ilhRH\n3WujYlPw35VvXoPHmf0x/JD0ox810TCQDZiZRRLmy5a5zlj5p/kehcWHNkgRXdPMyo8UKoqR8cEN\naWsms3iaplnpKqw4WVguKYYgQ4gahBihXNIoeFjBkcNSpnA0QA+kP8yTySweMx3FUqMigAE16+ql\nm4XlktLigB+mLQ2qQsMkSgGhQI1H5di/nRuBYVdj2IlHk7+lwynrzvUjc3QEgckDt8Z7hoYecaYa\nAmEdTZ3NwnJJcRRDW6IWqZKmhjWgMT/3SuhzRoSeCnA7hjYWMy2015NWkLl7hGqCJhDv61mucOun\nl5KF5ZJScMqZD6T2Nd57JAhqPCDISsW0DOCxsO+biLr0GVihwB6ZNYgaxtoBYwCxMSaLwHqLMZCF\n5ZJiKY8/l6oBay0hhBjSkpWbwWOxYKHZ9jmYzsxnUhc8X/7M4hjrEHOGI1gGeyUrP4h5rMhPy5JS\nwsiZ2HJj1CxFYggNUyPBrEiGR6MFqp7dP/oayE68gKR8z+unA5W5u5yyZSSFg3EMzg9fTwJkPf3W\nY0ABupXJiWGctTEPD4BUGHUro12qxZoem4fGwdxIRYMN4POlzxwBFz/wXgDT/rlRXK4vq3d+Ypac\nFj0dQYNQliVBwGp0HQrLpMqpBIIJ6EDcTBUIBuqmR1NDPbUfdn1l2t60Jr3mZHA1ABYTTIzZSX8O\n/lpZ+lcgaWkaBwNFZYVzNs2HcNbpJ2DvEogl2ywzdwcpqdv3Ai1pvKIG7NQIVdEjmOURUSqKN3GJ\nn8ELMQNfWWNcB1vW7PnW5bSqLl4aGtamj5xAemIrwGBqoTFTYH30a01LfDmF5N+aUhHPs8wnyI5F\nOZGKgMWbEEtpC+Mq8KvDPKKqs6Kin3MmlHUXS7y3kvfwuhKX68c6e6yQDiec9gDMbZ8kBMV5wMTZ\nhsuBAYra4k0Zu9zq6ZZdDIFWDQecsMF3GS/bdCf3Qus2LKczXDtwDcgaugUUzHR9Az1qynYBOkYI\nDgYf8IFgtavxgbbBRbuxAqZCAvh6CFN20VCymvQYg2e0A6aJL1iD6ev1K121Y8oaelLWCNph6LSL\nqW5X1HhcUxD6gX6WQWAGwDtDZQMuBDp1QxkCAQu+ZAM9uuIwYYqhEYv2vgHl8Yh01paghBgpSU3U\nLEXxTOGloC2BAiWF7Izo7MZebT1bqwVOfTSd0MNY8H4IXwRCL6xo/NO5WO3RNEPgYkqJwaZcPbVc\nftbY07IGCAUM35+9zSY2lrswdUljfczNswynM2qwAdR0Y6pSC103RK2BdrtHoUO0dJIbivvxsm/+\n3/bePN6Sqjr0/669d1Wdc+fbTXfTNBBAQJxRwSiJRo3+nEXzfA4xShyiMcmLijHqx6jPl8EheaKJ\n8885GkUNDvGXaHxi4nNCMYAyiICCgEA3TQ93OKeq9t7r98euc4emgaabvvdcur5adJ2quuesqlO1\nztprrb1WpPeTT3LOy57CFucZ1LhcKwQMViIqBtEOXXEI8Lm3PZDm1PfKMFqWAciASBdPehAr4H1f\ntHz30lk8e2tPsnKoKtbaNByPc4yZqSQPpKyOJgVtMJPnUKBVlncxXiPOjzEfj2EqbAOaoIvoQTFv\nYpMvWdRJVVQmx9mCbG4HBRA7M1y54cE8/98juvFodNZzC/NsYIR8jd3oTe35NARU1wxhYRqPWWMz\n3isCuWaoeCocDtgFXHj+ZdC9x8EZhtwJjDGEEHDOUc/uJjNbFq6/6CBPdy1d8QNn7ZgVawTrADp0\nJh6CE0swFQfTtokmMp/XRHKijhLEYGd3UJgO/Wycc0fuyyu/W9M56kSi7GRswxR//bl/wOCHbmh6\nx8SmU2W6bdWkYnNWA7LQvXJtLBaBGBFS2TxDsly29xxhSJqDGWOS0qzmsCFd68WJO+lH+lCitSzv\nQpRIJGBNxhH3fgzxRx+kshWZP3g3laDkTa1BbwPGzpH1YfvEFj5bT/Gp85Xd00cyVQtzkhFm59i9\n7QfsZpZ1TLCWbnghpnwoC5hITUmNw2mx3F95m39/a/b2NytxXCKC2pTmBeyahXxdqrS/yoZl6tsk\nQlVVFJnba33WNTYwOWDWzpOyJjCkGmgeiiOZnzuaGrAaMSr7PYPHEMDUeEaxCpnWWAJqArm34IWY\nW7yUeAvXbTiBv7tpE++9YgNz6+6JsRleS6YYweXC2BHr+MwFX0YJaZ64T4tv/rdgAA0ZBkPzf8CQ\nkdHBLtzFsmRZyu21HJK9LCtx3CDfVSWl41Sh5ms/gVALhph6J62ghRlMmqwQB57fHEovTLh57nOv\nXwND4yxIP1YWszAT7FChVZZ3IWk45VBy6Gxi1p5MTt44yS37e7mDWCIZ1uymNjmlKZjPIFDQyyJm\ndBrfn6XTzbjGPYinXXAs59Vb6Iz2yZnBxUhoWvKqM8zayEe/dQ4VHiUybyNYcNGkeb9DGvfZc5qm\n4G4zfeWOlNVQYAyRLg5PJhkf/dz5mGyMVenVFJMfUoEoFVJ58qzLXFXxP543DSy5lkPSBWClOfTO\n+CATI0Q6EEc5/H5PQrwjGZW3Fau9Y0QNEnM8XcTMYaRkpLIY6VHKJNSg41N8eusGnvezaez4GDPS\nx8cRbDQIihdDEAMhErSCzV12U9OPfUQEL0BM0c1AXJMJ62uNQNMLqbnY3YnDUFOsiiyihiA0wchA\nVgt1CMyXJdOrItHwcYfKUkQ6IvIDEblIRC4RkTc3248VkfNE5EoROVtE8mZ70by+stl/zME9heHC\nCvjoCWYSjnoQRh1qaPLp9g9DJA8RkR6eDMVgqHG+S6E72TW+kbdcNc3bZ+9P6G4kZoqrLdGMJwUJ\nRGla4daBAkW6yhvP+d+IUTqENOpeMCtbDj6xmfCYglYzAczIKCrKarS4MwvRPkUlEkqPEaU3P8Pk\niksznOzLk1ECj1bVBwAnA48XkYcCbwPOUtXjgR3Ai5rjXwTsaLaf1Rx36CARa6TpnjhBTU7QCGYQ\nrb3zRAze1OQa6RvLvB+BYpJKa27oHMf/vMTzz+ZkpDtFJ85gg1Kbgsx4oklR78EMotxYCh/QXo/v\nbbuEbexciCR7AcTgGPLh692CVGRFiaiB//o5BHForFiNAhWDugUpsGQwuaWTRe599AROqxWXZxi5\nQ2WpiUHHq6xZFHg08Plm+8eBpzXrpzevafb/tqxWZu2qMEgNARhnzk+gJkM17ncbBxWoJKdWw2gd\nKVxNbWpmx3+Nl/74cL6fPZjJXAlxnjmbkwXImcNFQyQDFYymQhuDSGsWLdV45MwP/AU72E3W5C8u\nMTBaDiI+BCIVBsfOAO/65A/x3mFFm8mEK0swAaEpuaYZlQXxs7zwd4671WyoQ5V9GnOJiBWRC4Gt\nwNeBq4CdqjqoDHEdsKVZ3wJcC9Ds3wWs38t7vkREzheR87dt23ZgZzFEpHp/GRIixPWsP/LhIB00\n2P0e4goeKz3ykFFoxa/MJK+67hiefu292D4+iY0lUYWREMl9oGcLSlPQd1BZg4uOPERqG5jPPD1n\nCDjGMvjF+Cx/9dWPUdKBJUYAACAASURBVIaKPKRhhC4UB245WBjTwZITKvj2RSW3dO6BkS5Os4Um\ndytJNB4XwEaB2KXMle03/ZxTj4YQV8ePOmzs07eiqkFVTwaOBB4CnHSgH6yqH1TVU1T1lA0bNhzo\n2w0JyaqsAEwJEex9nwkySuZGgEFNy7SomBT8kYghLLy21GS5xashqsVqRqDAFIEfjd2Pl//0CL4d\nTqKODmwBRgia0pacmFTVRlPKUh7S+3sLuReykAFClEjHAyPw79d9h1k7h6eHqyGgRPU01V6BxdUl\nm1pul4jSXEMi0CNQE4CAJ4QeuUJl4W8+fTHkOVbnqZNHecWlNdHRd4ZAjtOakQha72YE2mzshjv1\nE6aqO4FvAg8DpkQWKjEcCVzfrF8PHAXQ7J8Ett8l0g47C4qwBrqN4+8kfrVTCHXEKMtKhi36MJOi\ny2OJC45aupSVR21JYfoEqRDr+VJxGmdekLFr/BgkE7yxFLE3GDwRMc16UpYmpmXwSaIm1X9sLJcS\nBe+ZXj/OM9/7ckobcBlE79HBV3sIOVDuenSPtQxoKkW5jAhceA1k40cgGnHBNK6SVRAVUONB/MKP\n9wuf+Wj2P+Ht7se+RMM3iMhUs94FHgtcRlKaz2gOOwP4UrP+5eY1zf5zVQ8Rp0dTcssN0kEkAkdz\nxH1/h9wWCL5JzRj4L5dnfwfJ8NZjFDKBQjJ2SE50cMW6B/GOn44y29lE30VUlaBCsJ39FrdvFVcF\n5soZdk/Dxy/5V5LKtXg9SJU/DiGkSQpNZYkLjIIdlAnAMSvw6v/9LTpZpymsm4HUIOEO3/vgyBuw\n1HhjmO/t4hm/BUpADqF2t7fHvvxobAa+KSI/Bn4IfF1VvwK8BjhTRK4k+SQ/3Bz/YWB9s/1M4LV3\nvdhDjEBGBqYGDD11dO9xBrvnaR6CJYPZJXPaKpNTuho1NWp6mBpiGSCf4mw5lVecJ9xiR7ATXbq+\nj0UpXYeo+z9GspkjLwoqX9GrdvHub53N9rgT5yCX/c8LbYHBbG8lNolZSyr0SM2ch69e5Bk76gHU\n3uBCyoMV/EGrfXp7RDFNTq4nCvj+diZ10J6s9V8DyDAYfaeccoqef/75qy3GXUBjKUZHMDuxjDGH\no4OnuvAM4s1fRuIcYhRVyGwzPVIN1kXqOuKcQ6LQt57vZvflLRdbeiNHgMuYtWOgDktJN86RBZjL\nCmQ/29p6m1wDYxX0nVIqHN7fwOd+/yzsnKc7mjLsfPBktnVc3WkieOOJQIZBQgU2JWpdsqPgzPfc\niM9GEB3BhkA/r+n4CJr82StJjUn+SQ1UUvGSJ03z7PtE1Bok1mCyFZVnJRGRH6nqKXd0XOuOOBgI\nKF2gJgeCOronv5gqrsfZDoIlc8uVT9mPFHaUONPF21Gu6B7Ly66aZvsR98dLjaWkiDUZc1iNSMzo\nZ5EiHFiriiwY6tQQmrI3z01s5yOXfZF6JA0FQwwMww/qmmTQBhkH0YMNeCy7KXjJ675MpYfhLPTL\n2UY5+iUBwJXFmJQh5MVSbf8lj7k/EEtCDXo3VpR3hlZZ3qWYlDokHihQIMOTCaAPpK/rcLaLtRkh\nLPdLFVnGzpjR2zzFV3ZP87IfriMvtsDcVqQ7zqydwMoMRazJmaOfRYKO0z8Ag8/GlCbUc46IIxsp\nGBm3fOw/zuYGqYgascaSuT0ellZ37gMRxCOkiVHShJQ9jgrYdNJjKDKh7kXGstQrKOXFpqNXGiG1\nbq6jZ8zOMxkhWMVIaD2WDa2yPAgorrndM7TqIwo1U2w+7TnM9j0Gu6T/sqYiBmqoJkd45/WOv7z5\nKPqTJzJeBbqxwPkKoxETHZURemYUFxUxM9TS3W858wilA9TgDeTWUalncuMEz/6HP+JmmaNPueQv\n4qFXl+uAWWz6FXxGL8KZb/4Bt8xbJPYY6U6SOYCY7gNSA7qVxoceVVWRdzq89PceRqaBaBzGSask\nGtrrcBczqHQzsCYkH0tBHwHGf4+5MEksa/omJxsxeI0gOdeMnMTrfzLK2dUDmR89mtz16XdL+q4A\n38FopJaUsOyiTxZILChCefsC3Q61GLIQcVrhYgQvqBfmfJ9so+Hx73suO5lrdGMktR6MVG3C5R2j\nBq0cBksV+6iB2uV8/tzdXFkdSS6BIIaq7lGGDoNWYKuhKAGi9jEIWhoedT9IsfyckJwILbTKckXx\nbGbzE/4JxmDU1NSlZT5mfLF7L556w7F8Z+TBTEfHVLgBn+1EVLBaUWW9FZFvUENRVVPKyBQ87eMv\nYxdzEA2VZvQx5OLXQP2z1UUlUucl0Rty7dADXvjOCzn7/JrRkcNWW7xbUcgksWuIOy6kA039Sg9N\nIn1LqyxXlDmAcDL1TEUZu/Q4hr/bcU/e/MsxfC+wMVa4aicxz5k3U7h6ColdvKyCg10NUYRZs5vn\nfPKVbDU7yYGiCby3xuUdE2OJtZFg4O//veT6HdNUZUZcjdygO0BrT3/ectrJFqekfM+QAn/apg4B\nrbJcUSYBn1muKn+da7tH8vyLDuM/5Xhmu9OMZpZKaqqsSwzjjJRdumGejFnMCrvYk4VpcTJKp5tz\nS3YTz/7Qn7Jbeogq4Jgv+wAomqoqtSwQQkCAjilAIl+8HL51/g6yYgM2L3A6HLbaIMtBVclsTq49\n/vi5D2gS5JMf3bDY8+hQp70KK4lGLMqxT/s4LzsvsvXoo6k1Mj3XozI5M3lGz2aAQaSkLHYSbI2J\nK29ZKqmyeuENwZbU60se/54XsNXuJuDpFJ1l84+Gz1ZaeQbKJxXZMkDBDhx//+HLqZigthEj8+TD\noSsREWLTv8mSIbPnMwGIialalWm9LUtpleUKUolhvhyhZhqn96QuoepYJlxGEaAIAUtJlIp+5ikd\neDpoHF95YSUSTQ2aAknb5nYQ1kce+fbf5VfcQulTapEgmKUJ1Iew1hQRQggYY+jXcBPwu391MaPr\njsKLTdNdiRzE/nV3GmMM1lp8mONvznxM42ZJFQbUpOyHdi5XYoi+trs/OSWjRY/ReiNfeOWHKH+1\nlRG/gX50eDG4YMkD2Ogo6i4TvYIiKGpWJsCzFMHjtEfPOTRO0LXj1KZiwz3GOP3Tf8Iu12de+tSh\nR6+f5BvoyQV9uUfVojXJHlWXlm9PPYwW9imINfSBc34wy7P+5kYyt4k59YikyvbEMXrZgU0kOBCi\nRCTaND9dFcFT1zUdruKBW8CZsmk37JeEdlZP3mGiVZYriWZ4umQZTDDBnz/1TKr+bkoiRipqY6ga\nP1cwNXNZRmUEp6tws6oBzXDRI9LDGcGqY94HrIk8+j3P4hu3fJtga7q5Jfg0nFNCU0Y4poZE2jTB\nYlGhrCk0nUsgUhOXRYar0CdQUuMJNVDDDMIb/vEXvO9rYGWCOR0lk1SaT00ACeRhdZJxgoC3NUYd\nNlqieHIXQPu8789OSVFwiqYpnCMna8bgbfIQtMpyZWmGq6kpmPC4ezyS+qpd9KoaFyNOY/q3WQBU\nmkZSQ8bYVM7/Oue9vPSf3sRO08e6kObLicU2JeC8hcpAqhwfF6surSVkcfphWkt1KlU82DGELrYX\ncBGqHJ7z5v/kRz9fT/Czt/u2q4HgyX1G7Xp4A86W9GuH2XUhE10W/Jft9Na90yrLFcQDQgQ8AcMk\nI3zqDe9n3IwgC/ZXqkqZeo0vKsyhoxDyScsvRnbyiL99Ltczg5c6GSJNNDWkTt+p0IdGFKVaY2ko\npaTvzXmDC+n7SwV8FUuaMjrXL9hRwB+968dU7jS660cxo/kqS35rTFPoKkok2JJ+Twllxcfe+lhG\nzWLAZ6A0W5YzhE/h3ZcIWJpWoxjyaDiOzazfneFF8AZqq9TNXT2ofRiH8Guq60DQQFXtonvCFE/8\nyB9yM7PMshOMRxRySH29xYGYZk7I2sIScQMFLwaPAxxOLaavzJhtXD99PQ9/9Se4on8czpb0dt+C\nxOEbutroCIamkn4kK9Zjdl/KdGjuSxYDPi23pi3RtoIE0k0ZCNiY0a8jncIww24e/6mXEScs86FP\nrzfHhMvJgxCaft/7W4btYBElR/B0vQcJ7CYy1Z1m5lc7eOVT/4THHfkbrCPDhQyMwUvj/tKIHUZL\n+bao+pA7eiiZF5w4glbMOjj7p//GWV/4O6ZPXI+1G2DbSXS2vwQNOUpYNnVxGHr22drR7wZs3UPI\nCLXny2+aZnIg5uqLuCq0JdqGEAtNS4cMNVAUhuArXMzY7CfozhtizzM5PrFkUD6cMZEonsKn2Sl9\na7AxUNEnHtXlXd/5OI9927O4hGsJtgdSI0R6eKohUBp3Bs1zKiDHolR4O8tNbid/9q9/xQcu+RST\nJ27Al4a5+VvoT34HP/VRfPYr6lwXpo8Og6IECJnH1R6ki2SecPPPGAN63i929Wy5TVpluaIsWoc9\nIGgf6wzWdDnrjL9i5pqbWOfGmd81x+BnXoVV68lye+SxpswCPad4A/OAGIvd2aMstzNywihnfOjV\nvPUHn2EbFUpgrPZ0q7VV8EvUkMdUIWqr6/Pm73yUJ773hVyw4wJs9HgV5tRj8wLFMDv9XcoNnyVm\nl6226Lci2B5FZRCrzFYVb3jFr2OpqU25xjzJq0M7DF8lBkNySMEQUfjqzHn89Rf/lh2uRqyjCB6T\nOWoPzgyXxjTEJVH61JXSRLAK/Sydl1NL5h2zv9rJsx/xJF50yn9jioKOrsNLYzXHpiKTKLEJfzX1\n4xeSoQd3aMTTTMRL/W1uldhpFoeSEhd2LSoCw6Ar9/KcyRrXzL+PCq7ppYRCz8wyR8XffuP/5auX\nfQ+zcYRgK7KY6k8uvQapbYSH2MWWR5PNPhI795tQdzEy05xZRI1gNSZZlnytdiGnc//9nVFSpXWj\ndrFriQQgIi4S5hVvS+obLuCr73kcEzK4Fw/ZUfg+D8OHzwt9iJAUQZNKRLIgT5s4iXWyHteZZ3eY\nw0hOCDW5McQhu5XjQDk0DNaDgKsthogjUFmPO2qUr137Q875j3/l3huP4+3Pfyvrsdh+wBYjVCGQ\nuSwVcIgkRTfo+TY4cwWLW2gAFprPUtK1G0zLGwyVpInEw0D51s1fxfQHg+sp4MUkFaqKFcFTMy+R\neQl86Luf4Avn/gtj91jP+JGj9OoeNoBr3mbxGgwqjBiQktC5CnXbcdm1FLuejfospR3FLhJL0C7g\nmnMtU+GKZrZUami3v0bM4OcmLg6tJW0PpWAnCuz27Xzy3Y9jVAZBx0NXUd4ZWstySAiUBG+Yq2Z5\n4zfeyU9mL2G3SX3ANfTXVA8UiclyEiJ9p8zFGpd1cNEwno0wOz+Diznhhjle9bQX84QTTmOCkaZ+\nYrZgVVoGlmFcsMSXx2kbjaommbWALvtZWYxcDKy1xbnsNYKgKJGKSKQk8N2tF/Gaj72VfPM4IYtU\nFoo8x1Q9Qq9HnjtUknV6u7F9CWB6GD+Bm38wduYRmP59kOgQrTGioAWKI4oHKTHqQAvAH6CyHLTT\nHSjOVBSjYzvs6t/IP/yPI3jwdMRHAybg0DRr5xDVmPtqWbbKckiIVCg5NsKMmeMRb//vmGPyVEDY\ns6Ym6NY2Re9NDAhKHQPWWpxYJCoakxWleYcSS2/7HPeRDbzw8c/ikUc/HEiDbYfFYSigsTgHSmT5\nU62YZgCfXi2bMdTYTk4dQSAQ8UTqppLTdnbw4a99gnN/+iNuGQm46Q65VhR1jQ2BeWuwzqExYK0Q\nNFm0QYRc7UBH7wVDVIsxFme2IXP3I9v9FEz/flCPY80tSYFrBrik4pvh8oH04REVVJQoTdCmeS9R\nS8d41nM9733NiUxoClsFAUsNh3CH8FZZrjEWfZipZvY17OD0Tz8fqkC3M01g/yuirzQDZWk1Yve4\nv0SFYMAbRUUQH3EmI4jBR8XMBfpbZ7jvlnvw8t95MceMHMkYo6QyFEJB3jzWZuHxFjUEWT6tUomU\n1LjGPo306BE5f9tP+MZ/fZf/uupibvZz2HVdym6kq5CFQIwenwlGUxJ3FlP3zaqpSznwLxs1C5MG\n9o4hikFkDgkTWNPD1uuRnU/F9U9Byo0IfZAK1CHkRAkgVbIw90dxqSDqUIlECQSNiAjOWDLj2HHd\nD/i3d5xGwU4KO4aLDkxs3B2HpqKE1me5Jkk3rSfgOJwJRrYJcfMEeF1TP/odv6hEQhOYGvgWk5Vj\ncCFiCYhADCVBI04indFIcYzjF/ySP/nC/0Q0R3uB0PdMxTHucfjRHLFxExsn13HYxDomOiNMdEag\nDvSrkltmd7NtdhdX3HgNV9/4K3bM7mRGe5QTNd1ugXWCcYbsCIvtlXQ7QjEfqA3UDqw1FF4WppmG\nJhshSPoREzXkAfIAteW2p6KKRwZDbYEQO2i2FbPho+jun5NvfwlRBWKOFQFC8jRqwX4ni4mS5hsZ\nBk4Mh0DoYbC85czTyHuzjI5N0CfiBJKtnS0E1Vpum1ZZDgmD2ceKaSpVG77wpx/nCe99AXa9NM3u\n1wYpUp6S6QdBkIELbqBsVJIirTQi1mLJsKqUsSaztrHoanABpgxRDXNxnkv4GZfvuJq4Q+nXFdZa\nxDi8NHmNCiZomi46pYRRT2EsNnMQYuqHZB11r4fD4uuI7xRkMWKaqX7BJqsyD4baepQmUq3NdFVJ\nivJ2p6FqskBVDCoBxBNND8IYOv4f1H4jrrwX9I9D6xGM6aPGY+Io+++zjE0WgAOEIsvw5QwZFVu3\n3cCpRz+IUTEQBWOSW8IGu6ZcPKtJOwwfEpZPokiWWUlkhnke87rfoX9ql3X9CXZR08kiLgaswrzL\nyMLwZcktTXKWJYZx3Mu+RcytkqOXzVy6rSIcehtKS+Ie+5cflxR5bGLHt5Z7kA6V/jKtDFJ9Blbm\nHSKxKXkWl8iTpyCP34CdfTRu9hGYegLROYQ8DaO1m34gJaVLYZIbxoTObSrSKJGoiqgFCagPjGTC\nzTddyjlnPYyjiEv8uyYFw9Qc8iZlO4PnboAFRunyL2/5DMW1NT0tWZcXaZ9KKt8Wh7PW4MCaHDzX\nS6uq77lvkTRneemyDDV7X26LW+2Py5bB+y/9lKWyLQ3epPImi5+5T4pyIMPStCI1gEdFCG43cfIr\nhOlPEovrQaYR49LnSG8xpcjMNab4GPF2IvBGDSYGbJNrWWQ5ZT3Pu175MKbTEY1eHKSstYryztAq\nyyFGgAxhA+P8y8v/kU7mCDrLSB3p2RQEsG3vvTWJ2nnUVESpiKPfxa/7ADHbjSdNj7RkSFREPSZ2\nIY43OZkzt/meopCJA60xKuzql5wwcR2nHA1jqz+AXPO0ynIYUYPGCEEhKhkZm5hm5BYl9AN9p83w\nLN6+ZdUytCg2TWU1FZ5I7F5Lvf4DhO5lEEfTQVIifgq0wMgcmNkmveg23lPAqwF1FAZGZi/mTS8+\nNbkk27JrB0z7pA0pIi7lJhqbIpVR+Owfvo9qe0ozEq0IGoih9c6vSeI4qEtTHq3izTy+uIp6+tPE\nse8RbIlKB9wsaJUKDkeD+snbfMsgwlxQ8rwLYZ4v/d3DWd/o3TaIc+C0ynIYWZiN1yQUY8iMMMk6\nvv2Ks3nYxAPYUGxk1HZR2yY0rFnMHEiZ0ou0S8xuInR+Tn/je6k3vJfQvYgoBjVVmtkTx1nuYV2O\n1UAn61OXN/H+1x3GODV5c7xvH/UDpr2CQ8LAz663sQQEvKcbHG99/GtYN7OOcEtNbedXSeKWA0Gk\nh8Q0eyfNCzeoFum7NhXV+P8lrPtnYnYdYnxKizI14nbd9nsSGWeeVz1vC1vs8vnyrWF54LTKcojY\nMzC5NHYrCM4YJCqjdHj/c/+ajZ1NTJjOgh/LNP5LbVJUTOvPHFqEEjSHMJGKZ5geA6tRMajfgs+v\nxh/2QUJxNRGLkT6GQCQNuQc2YyQFd6zW/PYDNvKoX0vz6ZWs+Sw/dMWj1yLt0zRkyJLFLlsi3hjI\ncowKRQi85/lvprq0jysUDYEYDV5JFboXkpPbr3goiSNJiclc+jdmSJOSlLqKRAI55ejl9NZ/knry\nq0Q7h+g4tYDXDCVAHqmDJ48lj7qP4ZVPLKDfJ1+WJpTaerQcGO0VXIsodEzORsb47F98gu2XzjIS\nC+yIYqgpfIZiqFx/wcpsWUNICdn1YGeg3kQsriVO/yNh6gv4/DpGA0zWNVmM9OcFqTNOOHKeV5w+\nTVVVdDqdtunYQaBVlmuGPW7+4HBzytFMcN7rvsTTT3o8tnLUosxnqYhF16dpfy1rCxMLCOsgFmi2\nHcVQS0458Z/0Dn8j5egP6WcBEaHrhCfdcxvv+P0tyYZ0KeBnTPto39W0V3RNEJvit43KbDz2ZmQE\nUcN0HOWM+z6FVz/yJVRzhiLW1HVFFcxQdoZs2QfUpQLBsSASiLZPpeANzB3xCWbHv0BP+jzi8Ble\n86yjUXajlK2SPIjs85UVESsiF4jIV5rXx4rIeSJypYicLSJ5s71oXl/Z7D/m4Ih+6DFw6QfANxVx\nUpl1z3RnPQ/feCqf+f23U2gHh6GnMpw9x1tul2jKZtJBQMwsSE0MOdZ0iMZS1TejG77OU079Hq9/\n3hZ6Hmo/TqHFaot+t+bOPEkvB5Z2YXobcJaqHg/sAF7UbH8RsKPZflZzXMsBYRaUnm1mKS+mGkV8\nTKVspxjhXtURPPnIR2KzSSa6nXYYvlaREmlyMA0Wa0siPWxQqt27eNVvPp1XPOZ0rAVXeDpOU3W2\nloPGPilLETkSeBLwoea1AI8GPt8c8nHgac366c1rmv2/LcPSC3SNotDU9Y7QFJhILRaS0gyZwWmV\nyoiZEV7yGy/g9U/6A268/ueENsCz9tAug8xICSOYOEnwBmcj0e/kIy94P/9ty+lk1JRakZGKLatr\nJ4AfTPbVsnwn8OcsRhnWAztVdfBbdh2wpVnfAlwL0Ozf1Ry/DBF5iYicLyLnb9u2bT/FPzRIBTWa\nFJCFxSwsORlGUg+VPDdMUPDosYfwvT/9CvNX70pKNHo8nqIoiHXEkqVIuQQsNZmGlMNnwBuT0pRa\nDhqlM9TWEA0YAnlM19/bQBRDCI48CKKz1K6HywrYFvnvxz+RUzsn4jAIltzkTfUgk4oNtxw07nCu\nnIg8Gdiqqj8SkUfeVR+sqh8EPgipnuVd9b53V+RWK0v3mWXHxRAZsR1G6HDBn/8LN1Dx9I+/FDo1\nc/1dZIWjCj3EOFRTDclBf/I8RvIQEYVeO5PyoDFZ1tRGUjsNyfBL+v7WfjdjWUaNMucN63WC0a3K\nh/7o71lPgVvSt3Lxdmh/3A42+3KFfwN4qohcDXyGNPx+FzAlIoPH6Ujg+mb9euAogGb/JLD9LpS5\n5Q6wNj15IQSEEY5iig89/3/hr50ntyPMh4p8LCcbtHTFEMSlToOkIrLBtKXfDibRpGroqWJ8uv6D\nKvKTuWEmziNFh7zX5YwHnM45Lz2LLawDxlZb9EOWO1SWqvo6VT1SVY8Bng2cq6rPBb4JPKM57Azg\nS836l5vXNPvP1WEox36IUZYl1lpyTT7PB8lxfPvPPsvm3dOM+Qn8vAepEakRfNNcLBLFMJ/BfNa6\nmQ8mc5mhtOka2xix6hGpsJT06siom2D31pKvvfC9PP+o36JjxwkYsrp9lFaLA7HdXwOcKSJXknyS\nH262fxhY32w/E3jtgYnYsj8UxSCNpMappySjwwRnv+jdvOG3Xkrv6lk8ykw9i+s6qnqWLHpcBNQR\n2pSjg4zFxWTZO/UYG4iZ4p1iq1F2XjHPl178bjYyiumMkwo9R2xbEWPVaHvw3N2JPTCGPgUGyICa\neXZS8aT3PB8/JpiRjNHRDtXMDFmw1MbhDW3xhYOIUUMeIxCoLZhuQShhNORsNkfy7me+ni4QySnI\nUpAO3/isW2fyXUnbCrcFgNJ0sUBHU5IzYpBg2GCn+PYfn8MtzPOEv3wO1b0y+g5qG5ruaWb/Ggy2\n7BPBQM9Egk2dMOOOiled9gc884TH0EEwkYUotxfoCxS4VB+lNfpXhfay381xNBl7YkAywJLZHNH0\n8B3OCN96w9m85bGvonfxHFWdAYrxNVEMEUWMNj5NAEMUQ641mdZYwkLnw9T29dC5pYwarJrFh6hJ\nxTJSoxLx0sWoI48BtKQMJSbr4GtL7WeRToYJo/R/MsM/PO0veN4Jj2HUg8UhslgpyJJGBAJ7zYZo\nWRkOnTv7EMXCklJdSxaBGqXG0aHDaZMP4PtvOoeNv1Ty+QxTjOLrHjHUqIHSKaWLqERchHlnKa1d\nSH8ZtJU9lIbu3kZKG6kMREOqao4QsAiQa48onr7NKMmpKk+Rg693EWMH/fk8Lzz+cZz3pnM4bfI+\ngKU/ePMltfoG5foWtresCu0w/BAmC0KwUOJwwEi/5JzXfJDr6fH7Z70Sd3SX/u5ZTFBsxxFEkWbm\nkJcckYiN4HTQV3vQWvbQ+A02uuRMNVnVoVmsRgrtgThEHYUYsmKU2ZkZiokOj9v0GF7+e7/LpApZ\nyPE2o4zKqMmapuSreGIte6VVloc40q/odnJqQDpjTDDCBBN865Uf4wqu5QVv/TM4YRoNFUXwGCKV\nVTq+maO+oChTYY9DRVFCGobLgsKMRI2IGIykXNXaOIoAQUrqTgH9DHNdyTte9npOG3kggk0+SZve\noRtD6sKYHTrXcC3RKstDmL71OGtwIZJHAw68GCyGTsy4n5zED878Z27J+7zv+5/lI9/6PFPHrMdR\nE6VPr+4vFJrVaDExAwxppuTdfzjeN5YogsQA6iF6rA84tYyMjuGzEXbumsH9ss8HX/1m7p8dRx4c\nznRAU+WoCOQKhGSizxMpAHsI/eisFdrUoUMY35SpcbDQf1zFUNM0SVNwAlR9XB6ZpeIbN/6QN3/s\nXcgRjs7kCLvLOYrRDlW/ohMsmdpUf3GVzmklMVEIAsEFghXKusR4ZbyYoNrdw2+f41VP/QOee+Lp\niM9S7mrjfBz4IdO1B4gEiXhoWkK0ynKlaFOHWu4QF1PxjSCpyVkKBkVyGmXZRGMlyzHABBlPPvw0\nnvja0/j0VefyobS/4wAAC79JREFUsS9+kvGxDIxDRBEJhOjRQ6TIlFPFKVQRnFimOhuY+fkODuuM\n844/fCdHMUKBw2mBd+mapvBawGFTapbEpHBJUfAidadrfZZDSGtZHsoMvnoZxBSW2IOaEqZRA9GC\nM+lwVUSUQM0cnl2UvPPc9/Pdy84nrrNEo8tsoriXEnF6K6tpaU7nkuMXulQO9qSo+97YMyd06XGy\nNzt3SefLwbF7zSu9jRJ3Rg1qk+zaV3RXzSlH3Y+/fOqrmaCgIFs4TyUQfEXmsvS5asAM3jdd10hj\ndLbBnRVnXy3LVlm27D+NZVQS2EWPr93wbd5z9ofxecXI1AheAtqxlHWFxgBRk+JzecrfHCS/izTB\njsWIelJwzfEyiLLbBf2+p9IUXd57fdk+kjU3UJBh4ZjGcpY0NlbVhdQnay2qyuzcLkbGRql9wOYZ\nsQyMuBytPL2bezzwuPvzyqf+CZuYYIoRiqb9bKvz1g6tsmw5qATqlE0YXdJvlYeuwQdP7Wp6VNTA\nKz/yRi7bfjVxXYGMW0xhkaoihBpnDRp8MzRVjDGoNooLIeJA3cKwXqgXIu9L7T0V9tLFcvG+jliC\nFBhN76oaMPiFY6ILiG8qMAXI8i4exSN4UVzMKK/dySY7yXMf+3SecJ9HMUZGx1sK14XgoI7gkk9S\nxTfytl6utUDrs2w5qNiluskAXYcX8C5HyRlnFEfkH1/4dioqenjO+9UFfPbc/4/v77yCfCTHFDli\nM9QoKpE61mCrxffVQEqdb5SlAprqbi5F1IDmLJ1jsVR5WoWc3mDPggUrRFCD9HPUGTRzlBqpe+Bn\nK3LJeNxR9+MZj3oy9xk9ji4ZFgvkVCqoE3bHSGGTa6GwySKW1qy8W9Jali37RYp3L/rbDIv6wbDo\n/1OBSESb/4KnZJ4K+OGNl/CV73+Di66/kpt1Du0IjIGzFqeKjSEFnYJP7TOMIwzG30bSsFmaf28v\n/h4ChVhqIsY6ohE8Qh0iVRlxM4bcC8dObOJR93soz37IkxvFaMiafw3SRKjN8nG+7G3Yv+iPbBl+\nWsuy5aCyNEgz6AeEpuKZarLUeXKBwTRLMGSMkVFr4FGHP5RHPu1UApEKT0VkJ3NcdPlPuOhnl/Dt\ny89n3lZULlVB8t15TAbiLEYMYoQYIwhkkqOqKeczRLz3aIyEEMjIkSrDzsMRExu45+bjuO+xJ3Hq\nvU9mkzsMQTFNo+EMRxcHlQIZ5KkP2DJV3JybjR5iUtgpgm1SIWXMsh+PlrsHrbJs2S/22hurURqB\niGCa3j8spMKExgoLHnKX6m2qBkSEUaDyNevsJMfdczOn3/Ox1E/xVNRIY8GGZepn6bphoM6UNFa3\nzX4lNkPniMVhMSiBnLyxGEFihBCS/CalU2me3jGl9DRuh8HwWlJCuTemsSKXFNNYNoeptSzvTrTK\nsmW/8BKbKDMLw9OBbnSNglSB2qZpkRYWYtnBCkE9RgyqgohBA+QmBwkoiiAU5EC+xyfvbbhtCAtr\niWVWXZQFRT5I0kkqNQCCN4JKslYDacjvMBhVbOP79EYITc8bi2l+LAzLFWJshuVtHbW7I62ybNkv\n3F6UwUJ1HLP4Or+VQgG7NAdycKwd/IW7g+Hr3pXQ7RYQN7d+sTTv20FSpqTE8EXhFt950FNz+b69\nf1A7++buSfuttrS0tOwDrbJsaWlp2QdaZdnS0tKyD7TKsqWlpWUfaJVlS0tLyz7QKsuWlpaWfWAo\nUof6/T4XX3wxWZYtbPPeIyJYa8nznLIssdbS7/dxLomdZRl1XaOqFEVBv98nz3O893jvcc5h9+hK\nH0JKgj7ppJNW9BxbWlrWNkNhWaoqWZZRliVZlrFx40byPEdECCEwMzPDunXrKIpi4W9ibGodGoMx\nhqpKBRgGCnagQOu6Xjg2xoi1Nk1Pa2lpabkTDIVlKSJ47zn++OMJIXDTTTfR7Xapqoq6rrn3ve/N\npZdeSqfTQUSYnp5m06ZNiAg/+9nPOPHEExERLrnkEo444gicc+zYsYMsy5ibm+P4449HVfnlL3/Z\nlAFb/eIhLS0ta4uhUJYARVHQ7XYREa655hqqqiKEsDCMttZy3HHHcfPNNzM1NcVVV13F5OQk69at\n4/LLL6euazZt2sTIyAhXXHEF69evByDPc4qiIITA6Ogo/X7/9sRoaWlp2StDMQwHqKqKa665hvn5\neaanp9m8eTPWWuq6ZmZmhi1btlDXNSEEduzYwfT0NJOTk+zatYtjjjmGk046ia1bt6KqnHDCCczP\nzwPJR+m9R1WpqoqyLKnrepXPtqWlZa0xNJblYMh87bXXYoxh+/btC21Wr7766oXgjzGGXq9Hp9Ph\nxhtvxFrLtm3b8N7T6XS46qqrqOuaDRs2sHXrVmKMXHrppQsWqjGm9Vm2tLTcaYbGsqzreiH4Utf1\ngm+xKIpUo1CVsiwXgkExRpxzGGPYtWsXMzMziAjGGNavX8/27dsREYqioNPpkOf5gqJsfZYtLS13\nlqGolC4iM8Dlqy3HfnAYcPNqC3EnaWVeOdai3GtRZjgwuX9NVTfc0UHDMgy/fF/Kug8bInL+WpO7\nlXnlWItyr0WZYWXkHppheEtLS8sw0yrLlpaWln1gWJTlB1dbgP1kLcrdyrxyrEW516LMsAJyD0WA\np6WlpWXYGRbLsqWlpWWoaZVlS0tLyz6w6spSRB4vIpeLyJUi8trVlmeAiHxERLaKyMVLtq0Tka+L\nyBXNv9PNdhGRv2/O4cci8qBVkvkoEfmmiFwqIpeIyMvXiNwdEfmBiFzUyP3mZvuxInJeI9/ZIpI3\n24vm9ZXN/mNWQ+5GFisiF4jIV9aQzFeLyE9E5EIROb/ZNuz3yJSIfF5Efioil4nIw1ZcZlVdtYXU\nZ/Qq4DhSg+iLgHuvpkxLZHsE8CDg4iXb3g68tll/LfC2Zv2JwL+RGqQ+FDhvlWTeDDyoWR8Hfgbc\new3ILcBYs54B5zXyfBZ4drP9/cDLmvU/At7frD8bOHsV75MzgX8CvtK8XgsyXw0ctse2Yb9HPg68\nuFnPgamVlnlVvqwlF+BhwNeWvH4d8LrVlGkP+Y7ZQ1leDmxu1jeTkukBPgA8Z2/HrbL8XwIeu5bk\nBkaA/wJ+nTQjw+15rwBfAx7WrLvmOFkFWY8EvgE8GvhK83AOtczN5+9NWQ7tPQJMAr/Y83qttMyr\nPQzfAly75PV1zbZhZZOq3tCs3whsataH7jyaYd4DSVba0MvdDGcvBLYCXyeNOHaqqt+LbAtyN/t3\nAetXVmIA3gn8ORCb1+sZfpkBFPh3EfmRiLyk2TbM98ixwDbgo43L40MiMsoKy7zaynLNouknayjz\nrkRkDPhn4BWqunvpvmGVW1WDqp5MstYeAgx13w8ReTKwVVV/tNqy7Ae/qaoPAp4A/LGIPGLpziG8\nRxzJJfY+VX0gMEcadi+wEjKvtrK8Hjhqyesjm23Dyk0ishmg+Xdrs31ozkNEMpKi/JSqntNsHnq5\nB6jqTuCbpCHslIgM6hcslW1B7mb/JLB9hUX9DeCpInI18BnSUPxdDLfMAKjq9c2/W4EvkH6chvke\nuQ64TlXPa15/nqQ8V1Tm1VaWPwROaCKIOcnx/eVVlun2+DJwRrN+BsknONj+/CYK91Bg15LhwYoh\nIgJ8GLhMVd+xZNewy71BRKaa9S7Jz3oZSWk+ozlsT7kH5/MM4NzGslgxVPV1qnqkqh5Dum/PVdXn\nMsQyA4jIqIiMD9aB/we4mCG+R1T1RuBaEblns+m3gUtXXObVcDDv4aR9IilqexXw+tWWZ4lcnwZu\nAGrSL9uLSD6mbwBXAP8HWNccK8B7mnP4CXDKKsn8m6ShyI+BC5vliWtA7vsDFzRyXwy8sdl+HPAD\n4Ergc0DRbO80r69s9h+3yvfKI1mMhg+1zI18FzXLJYNnbg3cIycD5zf3yBeB6ZWWuZ3u2NLS0rIP\nrPYwvKWlpWVN0CrLlpaWln2gVZYtLS0t+0CrLFtaWlr2gVZZtrS0tOwDrbJsaWlp2QdaZdnS0tKy\nD/z/3B3R/7eWEaMAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "clXQrFVa2nN7", - "colab_type": "text" - }, - "source": [ - "### X-Ray Image" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "tbaIkUCS2eNv", - "colab_type": "code", - "outputId": "e6b11596-71f5-4a45-aea2-657c4ecdfef7", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 253 - } - }, - "source": [ - "xray_img_raw = tf.io.read_file(\"xray.png\")\n", - "xray_img = tf.io.decode_image(xray_img_raw)\n", - "\n", - "plt.title(\"X-Ray image with shape {}\".format(xray_img.shape))\n", - "_ = plt.imshow(xray_img)" - ], - "execution_count": 5, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAADsCAYAAAB66G16AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAH6ZJREFUeJzt3Xv0JGV95/H3R0BUQLnpiMNw09GI\nJzqYCWIkK9GNAqtBE5fAqowGd7xxFvZosmA2BjfqahIvIAYDSsT1AniFNUQWR7xEFJgRRC4io0KY\ncS4iMIAKBvjuH/U0U7+evndX1+3zOud3ft1VXV1PPV31qaeeqq5WRGBmZs31iLILYGZmxXLQm5k1\nnIPezKzhHPRmZg3noDczazgHvZlZwznoW0rS2yR9tOxyzIKkfSTdK2m7Aa8JSU+Zcj77pffZfpr3\nmRVJ35Z0UNnlqCpJV0p6RtnlqAIHfUEk7SzpFkmvzA3bRdK/SXpFn2kOk/RQCq17JN0k6bVFlC8i\n3h0RryvivectIv4tInaOiAcBJH1dUiOWrR9JLwXuiYir0/MVktZIulvSOkl/m98hpXUq//egpA+l\ncZ0dWH78X01QpuPS+7wuN2xHSR+RtEnSHZL+r6TFufH7SbpY0p2SNko6Y9QdqaRj0jayRdJmSedK\nemzuJX8P/K9xl6OJHPQFiYh7gdcDH5T0+DT4b4HVEfG5AZP+LCJ2Bh4L/HfgbElPK7a0VkNvAP5P\n7vljgJOAPYHnAC8E3toZmXaEO6d164nAr4HPdr3nrrnX/c04hZG0G/A24PquUScCzwWeCTwJuBP4\nUG78PwCbgb2AZcDzgTeNONtvA8+LiMcBBwDbA+/Mjb8I+ANJTxxnWZrIQV+giLgE+GfgdEmHAUcz\n4kocmYuBO8g2EgAknSbpttRyWyPp99PwJ0r6laQ9cq99tqSfS9qh+/0lnSrpk+lxp0X32vTed0p6\ng6TflXStpLsknZGb9smSvibpF5Jul/QpSbt2zffqdFTyWUnnS3pnbvxLJF2T3vdySc+kB0nvyLU6\nd5D0S0l/l54/WtJ9knbPd6lIehfw+8AZqWV6Ru4t/6Okm9N8PyxJfeZ7sKTVqY43SXp/10temY7M\nbpf0l13TfSe9/4bUOn1kbnxI+m+SfpKm/TtJj8iN/zNJN6b6v0TSvn3K90jgBcA3OsMi4syI+FZE\n/CYi1gOfAp7Xa3rgT8jC9Vt9xk/ifwOnA7d3Dd8fuCQiNkXEfcD5wDO6xl8QEfdFxEbgK13j+4qI\n2yIiP78Hgafkxt8HrAFePO7CNE5E+K/AP2A3YAPZBvDaIa89DFiXHj8C+CPgIeCg3GteBexB1np5\nC7AReFQadzHwxtxrPwB8qM+8TgU+mR7vBwTwEeBRwIuA+4AvAU8AFpMFw/PT658C/CGwI/B44JvA\nB9O4RwK3krXkdgD+GPgN8M40/qD0Xs8BtgNWALcAO/Yo4wuAH6THvwf8GLgiN+77XeXfPj3/OvC6\nrvcK4MvArsA+wM+Bw/vUzXeAV6fHOwOHdM3nbODRwLOA+4Gnp/G/AxySPpv9gBuBk7rKcBmweyrD\njzrlBI4C1gJPT9P/T+DyPuV7BvDLIevSl4D39Bn3NeDU3PPOcq0H1gH/BOw5xjp+MLCabJ1dUPfA\ncrKW95PIjjo+3VlX0vjXA59I4xYD1wEvH2PehwJbUvl/Cbyoa/zpwPvLzoGy/0ovQBv+gK8CvwIe\nN+R1h5EF+10pQB7MB0Wfae4EnpUe/ynw7fR4O7KdwMF9pjuVbYN+cW78L4A/zT3/fL+yAC8Drk6P\n/0MKDOXG/ytbg/5M4G+6pr+JtBPpGv5osh3OHsDJZF0D68jC9x3A6V3lHxb0h+aeXwCc3Gd5vpne\nf8+u4Z357J0bdiVwTJ/3OQn4YlcZDs89fxOwKj3+F+D43LhHpHVm3x7v+zxg44B14s9SPW0T1sC+\nab3aPzdsZ7JA3h5YBHyOrBU+yrq9HVnId3aGC+oeeBxwXlr2B4Crgd1z459O1up+IL3m4/l1Z4xt\nbHFap5/aNfxdwDnjvl/T/tx1UzBJryILiK8C780N71wpcq+ke3OT/CwidiXroz+drOWaf7+3psP7\nLZLuItuQ9kyjLwQOlLQ/WYt7S0RcOUZxN+Ue/7rH851TGRZJOk/Sekl3A5/MleFJwPpIW1lyW+7x\nvsBbUvfGXWkZlqTpFoiIX5OFyPPJdiDfAC4nC7rnk+u6GNHG3ONfdZanh+OBpwI/lHSVpJeM8j6S\nnirpy+mk4t3Au9laLx35uriVrcu9L3Bark7uAEQWYN3uBHbpVXBJLyPrRjkiFnZrdLwa+NeI+Gln\nQETcGxGrI+KBiNgEnAC8SFLPeXR5E3BtRHy3z/gPkx357QHsBHyBbKdG6rb6Shq2E1ld7UZuOxlV\nZN1VXyHbqeTtQtZwajUHfYEkPYGs++S/kh2iHt3pU4+tV4p0TpAtEBH3A/8D+O208ZKm/Quyvv7d\n0g5hC1kgEFmf5AVk3TuvZuHJull6N1nr67cj4rFpfp3+7g3A4q7+7yW5x7cB74qIXXN/j4mIz/SZ\n1zfIdnYHAVel5y8m6y74Zp9pprola0TcHBHHknVbvRf4nKSdRpj0TOCHwNJUL29ja7105OtiH+Bn\n6fFtwOu76uXREXF5j/msBaTc1StkAw4n61Z6aUT8oE8ZjwPOHbIcnfobJR9eCLw87dw2knWxvS93\nbmQZ8PGIuCOt0x8CDpa0J1u7sM6IiPsj4hdk3UZHjjDfXrYHntw17OnA9yd8v8Zw0BfrDOBLEXFZ\nRGwgC+mzJe04ysQR8RvgfcDb06BdyA5xfw5sL+ntZC3/vE8AryHr3y8q6HcB7gW2pLD589y475B1\nDZyQTo4eRRbKHWcDb5D0HGV2kvSfBrQev0EWTjek+vg68DrgpxHx8z7TbCK7CmMikl4l6fER0elG\ng6xLbZhdgLuBeyX9FvDGHq/5c0m7SVpCdh7j/DT8I8ApStd9S3qcpP/cayapHr5KdlTTKfMLyE7A\n/km/ozhJv0d2hPDZruHPkfQ0SY9QdjL/dODrEbEljT9V0tf7LPNryMJ0WfpbTdbt1TlJfRVwXFqe\nHciOAH4WEbenI46fAm9M68quZOdsrs2V7RZJr+mzPK+UtE96vC9ZN82q3PhHkZ03ubRP2VvDQV+Q\n1Ao/lFwIRsRHyVpwb+83XQ/nAPsou276ErLD0x+RHfbfx8KuACLi22Sh9L2IuHWaZRjgHcCzyY4m\n/pns0Lsz/9+QnYA9niwkX0V2EvT+NH412RHOGWRdEGvJwqKfy8n66jut9xvIlrtfax7gNOAV6eqV\n08dbNAAOB65PXWqnkfXB/3qE6d4K/BfgHrId2vk9XnMhWZ/0NWR19zGAiPgi2dHDeanb5zrgiAHz\n+keyo7aOvyLrxrs41yX4L13TrAC+EBH3dA0/gGy9uifN937g2Nz4JWQnVLcREXdFxMbOH9mJ97s7\nOwmyOrkPuJmsgXIk8PLcW/wxWX3/nGxd+Heyy4o7VxftAfTrFjoQuFzSL1P5biJbtzpeSrbD+lmv\nidtEC7tSrQkkfQ34dNqxlE7SFcBHIuKfyi5LmSQFWbfO2hm937eBEyJ9aaookq4BXpi6VuZG0qHA\nm1M32iTTX0F2gvu62Zasfhz0DSPpd8kOVZf0aLnNqwzPJ2td3Q68kqxb4oDUfdVasw56s1FV4p4d\nNhuSziW71PHEskI+eRrZSeGdgJ8Ar2h7yJuVqbAWfboC4DSy62w/GhHvKWRGZmY2UCFBr+wugj8i\nu5Z7HdmZ92Mj4oaZz8zMzAYq6qqbg4G1EfGTdBXGeWRf8TYzszkrqo9+MQsv+1tHdm+Th0laCaxM\nT3+noHKYmTXZ7RHx+GEvKu1kbEScBZwFD1+NYGZm4xnpuzJFdd2sZ+FXvfdOw8zMbM6KCvqrgKWS\n9k/fbjuG7EcAzMxszgrpuomIBySdQPaV/e3IbhPa/cszZmY2B5X4Zqz76M3MJrImIpYPe5FvamZm\n1nAOejOzhnPQm5k1nIPezKzhHPRmZg3n2xRbI83rarKFP41rVk0OequVKlwOnDdJebxzsHlz0Fst\nVC3gp9G9LA5+K5qD3iqpScE+TL9l9Q7AZsVBb5XTppAfxC1/mxUHvVVCgT9pOdb8ZxWmRSyPg98m\n5aC30s0qFKcJvlmHZq/3m3X459/PoW+DOOitNLMIvjoFXL6sRYV+nerD5sdBb3PnSxIHL880OwG3\n8q0XB73N1bgh1sawmlXL361863DQ29yMGloOpq1m0dfvwDcHvc3FKOHkIBrNpC1+B357OeitUA74\nYnXqzoFvg/julVYaSaWHTUQ04gtak9RlE5bbRuMWvRViUIiUHe6wbfkiAkl9v5RU9eXpGLeF79Z9\nOzjorXEmvXdMr+k6O4BRpps0LIv4xmuvndawMjjsm8tBbzNVdst30Pwn7aoYt3UM0y/rLN5rkta9\nw76ZHPRWuKLDo4p9zeOE5ihHDNPe3sFh324+GWszM4/ArfPJ007ZJyn/tMs8TnjXtX6tP7fobSZ6\nhcMsWob9QqcOYZRvHU/zjeDOtNP25U9aFqu/qYJe0i3APcCDwAMRsVzS7sD5wH7ALcDREXHndMW0\nKisq5Nui+2TuqP3zRXaz+GqcZplF180fRMSyiFienp8MrIqIpcCq9Nwaqlcrc5qrT5rY2hz1BHGv\n5c8Hbr/bIUxytDDuffqt3orooz8KODc9Phd4WQHzsAqYVQj0C6umh8wsl6/Iumr659AG0wZ9AP9P\n0hpJK9OwRRGxIT3eCCyach5WQbPorhkU8E0Jl1nuDGF4V84486vCN5NtPqY9GXtoRKyX9ATgUkk/\nzI+MiJDUc81LO4aVvcZZtc0q5G0yw06qFtF378su622qFn1ErE//NwNfBA4GNknaCyD939xn2rMi\nYnmub99qyiE/vmm+vDWs337c93d/ffNNHPSSdpK0S+cx8CLgOuAiYEV62QrgwmkLadXhH6guz6T3\noR+Fu3GabZqum0XAF9PKsT3w6Yj4iqSrgAskHQ/cChw9fTGtCmbRonOrcDYGXYI5jxOz3inUi6qw\n4fXrx7dqmVVrvgrrXFP167+f9TdjHfSVsWaU7m/fAsFGMm1wONzno7vFPcm3YUf5bP151ouD3oaa\nVevQ4TAfvcJ+0tslWDM46M1sIt5x14eD3sYy6mF9nW9GZhl34TSHg94GmuaSPn/Jphocxuagt77G\n7Zuf9Cf8rHiTnhD39fXN4KC3mXBXTXXkr7SZ14/BWLU56G0k07TqHATzM8p9cKZ53378GVebg956\n8oZbX/7srJuD3rbh+9nUT6cvvciQ93pQX/7NWJuKW49m1ecWvVlDlL3TLXv+1p+D3gby4Xq1zaPL\nptf8+nHYV5OD3hYYZ0P1Rl0N03wOvuFcO7iP3h7mk7D1M0lID/shdn/uzeMWvZktMO396H2EUD0O\negO8cbaZb3PQfA56MwPG77LxzqE+HPTWk7/y3k7jtu77vdbrR7U46G1s3ojN6sVBb2NfbeND9vrq\ntNi7f1O2F+/Qm8NBbxNx2NefQ749HPQ2MYe9gfvp68BB33LeGNtjnN/7HffHS7zTrzZ/M9YW8Abb\nDr2+CeudfnMNbdFLOkfSZknX5YbtLulSSTen/7ul4ZJ0uqS1kq6V9OwiC2/l8T1S6qnX5+bPsvlG\n6br5OHB417CTgVURsRRYlZ4DHAEsTX8rgTNnU0ybh1lcP23VNO4dLh38zTI06CPim8AdXYOPAs5N\nj88FXpYb/onIfBfYVdJesyqsVYvDvh5mcRvjSaf3DqMaJj0ZuygiNqTHG4FF6fFi4Lbc69alYVZx\nDu3m8T1srGPqk7EREZLG3m1LWknWvWMlcWurueb5YyT5eYLXqyqatEW/qdMlk/5vTsPXA0tyr9s7\nDdtGRJwVEcsjYvmEZTCzHspsxTvkq2nSoL8IWJEerwAuzA0/Ll19cwiwJdfFYxXiDbKZym5V99rJ\neF0r39CuG0mfAQ4D9pS0Dvhr4D3ABZKOB24Fjk4vvxg4ElgL/Ap4bQFlNrMBHKzWTVVYKSbp47fp\nzPJnA6uwDtl8+uVH/XbtJNPZRNaM0v3tWyDY1LwRV4N3uNaPg95mwmFvVl0OegNm0xr0ddtm1eSg\nt5ly90G7+fOvJge9PcwbqVkzOehtgWnD3l03zeXPtr4c9LYNh7314iO++nLQW0/eqM2aw0Fv1oOv\nIBqfGwfV5Z8StEKUcffESTnQrencorfCVD1A3Wq3tnCL3gqVD9KqtPBHDfeqlLcqvFOsLwe9baOp\nG3RTl8tsGAe9LVBkGA5r3Y8z74hYcB7AIW7Wn4O+hapwG9lpfyCju7z59xnnVrr9XutuG2sSn4xt\nmaoF2LQ7mEmmH1QHEVG5OjKbllv0LVeFLo9Zhf2wgHaAT64K64lNzkHfIv1+VarT3113nT57B3q1\nNGHdqjt33bRUdyu4KeE47rXxTVlus0Ec9C3RHWhNb/mOGvZFtzab8KWsupffHPStMEqgN3FjHneZ\nZr3ja2KdWj056K3ROmFbduiWPX9rN5+MbbBxWqj9XtuUgOq3fPM8EV2nG711NOXzbzsHvQ00KJia\nFgJ1DGKzUbjrxibWhFAs4xvB85jnLL6b0LQdeZsNDXpJ50jaLOm63LBTJa2XdE36OzI37hRJayXd\nJOnFRRXcBmtCCM/KoMAq6+qjtoRoW5az6kZp0X8cOLzH8A9ExLL0dzGApAOBY4BnpGn+QdJ2syqs\njWaeweUdSn/D6qboVvMk7++WfDMNDfqI+CZwx4jvdxRwXkTcHxE/BdYCB09RPhtTGcHbpLCfV8h1\njiSaVHdWXdP00Z8g6drUtbNbGrYYuC33mnVpmBXMoTE7RYf9PD6nce/mOc7rrH4mDfozgScDy4AN\nwPvGfQNJKyWtlrR6wjJYUoWAr0IZBqlKiBVZT51ul0m7bKy5Jgr6iNgUEQ9GxEPA2WztnlkPLMm9\ndO80rNd7nBURyyNi+SRlsExVArZpQVG15RkU3qME+6Dxs1jWKvzGgfU3UdBL2iv39OVA54qci4Bj\nJO0oaX9gKXDldEW0fsoO+Ulbj2WYpK6qtFzD7qE/ibp8dja9oV+YkvQZ4DBgT0nrgL8GDpO0DAjg\nFuD1ABFxvaQLgBuAB4A3R8SDxRTdyuJwKM+kX+oqu1Fg5VIVVgBJ5ReiRub9mTUh2GdxY7dZHBVM\n+9l131663/N5c9dNadaM0v3tb8baQG3ZWEdZzip0deQDveyydFShsWiD+V43NeONajL5UKx7HeZv\nxNbrdwbyqrIzsHK5RV8j7rKZjV6t4bqHfz9FL1fT73raFG7R2zbaspHW/W6VZZV9lFs7WLW4RW/b\nqHP4TaOtyz0O11E9Oeitp2HXbTdlgy/6pGavPvNp5jftl6Mm4fvy1J+7bmws3thnY5TLIoedWO3X\n9TSLX83y59wsDvoamXef8rB5jVqWOvTZdup2lJAc93MY9J7T3pqg39U3o5Zr1urwWbeRu25qpoob\nUpWu6Z7GNKFZtnz9l/VZNGEdaCq36G1io3Ql1HHjr2OZYfRy13FHZtNx0NdQFVqeVShDEca5d3vd\nlr3oWyRbdTnobSzj9BtbNRS9Q/LnXX0O+hqbR6t63pfyVV1VW/FVLZdVg0/GNkBRYdu2EB/FJHVS\n9HX6VejCs2pzi74himjdN+HEahHGuU9OEV9eqgqvD/XhFn3DeONrriqFvNWLW/RmFVbFcHdjon7c\noreZG3aP9HHep+w+6Hnot4xNX26bH7foG2jW13gPasH1m8+swn7Q9E1oWeaXqw7B3oQ6byO36K22\nqh6MRfwGrdkkHPQ2MQfV5OpYd27N15eDvqGK2Cg74VSlfvOqlKPpHPL15qC3keRDvmqqWKZJzmuY\nFcVBbwPV5eZdVSvjqCepzebBQW99+XB9ck2quyYtS1v58sqWy2/EvX4JqU4t0Fn8hN4k8xxHXY6Q\nrFmGtuglLZF0maQbJF0v6cQ0fHdJl0q6Of3fLQ2XpNMlrZV0raRnF70QttU0XzKqc8hbMdyab4ZR\num4eAN4SEQcChwBvlnQgcDKwKiKWAqvSc4AjgKXpbyVw5sxLbT1N+ruhTfpWZtXLXfXyWTMNDfqI\n2BAR30uP7wFuBBYDRwHnppedC7wsPT4K+ERkvgvsKmmvmZfcZip/JOAwKobr1coy1slYSfsBBwFX\nAIsiYkMatRFYlB4vBm7LTbYuDbOCNeVHuqflQJ0Nr0vNMfLJWEk7A58HToqIu7tO4oWksbYuSSvJ\nunZsxpr6e65mNpmRWvSSdiAL+U9FxBfS4E2dLpn0f3Mavh5Ykpt87zRsgYg4KyKWR8TySQtvg7lF\nVg117A7zutMso1x1I+BjwI0R8f7cqIuAFenxCuDC3PDj0tU3hwBbcl08NmfeYM1Mw1oakg4FvgX8\nAHgoDX4bWT/9BcA+wK3A0RFxR9oxnAEcDvwKeG1ErB4yj3o1d2qkbi3JWZnXDq5f/XZ/P6Fu3ECo\njTWj9IoMDfp5cNAXqwqf8byVGfS95l2nz8AhXysjBb1vgdBwdQqYWarSclepLMM45JvJQd9wbb3k\nch7LPEqA1ynkrbkc9GZmDeegt8Zp4xHMLLjemst3r7TGcFBNznXXbG7RW2PU8YtJVeCQbz4HvTWC\nw8qsPwd9SzQ5CKu6bD66sKpw0LdIVQNxGvllqsLy1e2GclWoMyueg95qq1dIObhG57pqD1910yJ1\naWUO44AyG49b9C3hkDdrLwe91YovoZwN7zDbxUHfAk0MxiYuk1lRHPRWW1Vu3Vf5ZnJVLZcVx0Fv\ntVXlMO2oevmsHRz0VltVbc2bVY2DvuEchmbmoG+4pncdeEdmNpyD3qxg3ecSmr7ztepx0JvNST7w\nHfY2T74FgtVeRPQMzk63TlVDtbtc7oayojjoW0BSq0Ok346g6Hn2MmiHZFYUB73ZHOWPMhzwNi/u\no7dGqPK3ZHupU1mt/oYGvaQlki6TdIOk6yWdmIafKmm9pGvS35G5aU6RtFbSTZJeXOQCmA1T1T76\nMrgu2mmUrpsHgLdExPck7QKskXRpGveBiPj7/IslHQgcAzwDeBLwVUlPjYgHZ1lws25VCrG6/dKU\nNdvQFn1EbIiI76XH9wA3AosHTHIUcF5E3B8RPwXWAgfPorA2uSqFYJE6XTgO2G21ZR2wbY3VRy9p\nP+Ag4Io06ARJ10o6R9Juadhi4LbcZOsYvGOwOWn6hu5wN+tt5KCXtDPweeCkiLgbOBN4MrAM2AC8\nb5wZS1opabWk1eNMZ9NpetiXzTsbq6KRgl7SDmQh/6mI+AJARGyKiAcj4iHgbLZ2z6wHluQm3zsN\nWyAizoqI5RGxfJoFsPG1KezLXtay55/nnVB7jXLVjYCPATdGxPtzw/fKvezlwHXp8UXAMZJ2lLQ/\nsBS4cnZFNquXKoW9tdMoV908D3g18ANJ16RhbwOOlbQMCOAW4PUAEXG9pAuAG8iu2Hmzr7ipnnz4\nNLWl54A1y6gKG7mk8gvRclVYD2at6KAf9TYHVapb7/waZ80o3d++BYIBzWvhlxloTag/axbfAsG2\nUYffYjWz0blFb33VtZXvnZTZQm7R20jqFJ512imZzYNb9DayQScZq3wCsih1vNVwGffmt/I56G1i\ng27c1YbgH7RMbVh+qw933djURmkhzrMVOa95uWVsdeEWvc2NW7lm5XCL3sys4dyiN5uDeRzN1PHk\nsM2HW/TWKPPuN59kfvkfCM//zYt3Bu3joLfS9Aq4Yc+HvV+d+RvJVpSqdN3cDvwy/bfMnrSkPsYI\n+0rWybjhPOz1Y7zfNvXhHUU115EC7TvKiypx90oASav9IyRbuT625TpZyPWxLddJb+66MTNrOAe9\nmVnDVSnozyq7ABXj+tiW62Qh18e2XCc9VKaP3szMilGlFr2ZmRWg9KCXdLikmyStlXRy2eWZF0nn\nSNos6brcsN0lXSrp5vR/tzRckk5PdXStpGeXV/JiSFoi6TJJN0i6XtKJaXib6+RRkq6U9P1UJ+9I\nw/eXdEVa9vMlPTIN3zE9X5vG71dm+YsiaTtJV0v6cnre6voYRalBL2k74MPAEcCBwLGSDiyzTHP0\nceDwrmEnA6siYimwKj2HrH6Wpr+VwJlzKuM8PQC8JSIOBA4B3pzWhTbXyf3ACyLiWcAy4HBJhwDv\nBT4QEU8B7gSOT68/HrgzDf9Ael0TnQjcmHve9voYLiJK+wOeC1ySe34KcEqZZZrz8u8HXJd7fhOw\nV3q8F3BTevyPwLG9XtfUP+BC4A9dJw8v32OA7wHPIftC0PZp+MPbEHAJ8Nz0ePv0OpVd9hnXw95k\nO/wXAF8G1Ob6GPWv7K6bxcBtuefr0rC2WhQRG9LjjcCi9LhV9ZQOsQ8CrqDldZK6Ka4BNgOXAj8G\n7oqIB9JL8sv9cJ2k8VuAPeZb4sJ9EPgL4KH0fA/aXR8jKTvorY/ImiGtuyRK0s7A54GTIuLu/Lg2\n1klEPBgRy8hasgcDv1VykUoj6SXA5ohYU3ZZ6qbsoF8PLMk93zsNa6tNkvYCSP83p+GtqCdJO5CF\n/Kci4gtpcKvrpCMi7gIuI+ua2FVS5z5V+eV+uE7S+McBv5hzUYv0POCPJN0CnEfWfXMa7a2PkZUd\n9FcBS9NZ80cCxwAXlVymMl0ErEiPV5D1U3eGH5euNDkE2JLrzmgEZXfj+hhwY0S8PzeqzXXyeEm7\npsePJjtncSNZ4L8ivay7Tjp19Qrga+koqBEi4pSI2Dsi9iPLiq9FxCtpaX2MpeyTBMCRwI/I+h7/\nsuzyzHG5PwNsAP6drF/xeLL+w1XAzcBXgd3Ta0V2ddKPgR8Ay8sufwH1cShZt8y1wDXp78iW18kz\ngatTnVwHvD0NPwC4ElgLfBbYMQ1/VHq+No0/oOxlKLBuDgO+7PoY7c/fjDUza7iyu27MzKxgDnoz\ns4Zz0JuZNZyD3sys4Rz0ZmYN56A3M2s4B72ZWcM56M3MGu7/A73kGdm7fpW0AAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "UwqfpOm--vV2", - "colab_type": "text" - }, - "source": [ - "# Play with tfa.image" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "jIa5HnomPds3", - "colab_type": "text" - }, - "source": [ - "## Mean filtering\n", - "Mean filtering is a filtering technique, which is often used to remove noise from an image or signal. The idea is to run through the image pixel by pixel and replacing it with the average values of neighboring pixels." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "SutWnbRoHl6i", - "colab_type": "code", - "outputId": "4755c425-b3f6-4309-e53e-e46e72acdaac", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 269 - } - }, - "source": [ - "mean = tfa.image.mean_filter2d(google_img, filter_shape=5)\n", - "_ = plt.imshow(mean)" - ], - "execution_count": 6, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAUsAAAD8CAYAAAD+D4bnAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsvVmMXFl65/f7zr2xZSTJTCaTyb1Y\nZLFYS1d1rd3V3epRr4PWjAGNAUkzI8AjDwT0yxiwAT9Y8IthwA/yiweYlwEEa2BpYIws2J6RZkbu\nRb2qu7q6a+uq4lIssrgmdzLJ3GK993x+OOcukSSrWKzIjXl+RDIzbtyIOBFx7/9+59uOqCqBQCAQ\n+GjMag8gEAgE1gNBLAOBQOA+CGIZCAQC90EQy0AgELgPglgGAoHAfRDEMhAIBO6DZRFLEfmWiJwQ\nkVMi8kfL8RqBQCCwksiw8yxFJAI+AL4JTAOvA/9UVY8N9YUCgUBgBVkOy/JzwClVPa2qPeAvgN9e\nhtcJBAKBFSNehufcDVwo3Z4GPv9RD9i2bZvu379/GYYSCAQCH82bb755Q1UnP26/5RDL+0JEvg18\nG2Dfvn288cYbqzWUQCCwgRGRc/ez33JMwy8Ce0u39/htA6jqn6jqS6r60uTkx4p6IBAIrCrLIZav\nA4dE5FERqQL/BPjrZXidQCAQWDGGPg1X1URE/hvgu0AE/BtVPTrs1wkEAoGVZFl8lqr6N8DfLMdz\nBwKBwGqwagGeQOATcbd0YFnxUQQ2MKHcMbDuUe6upYHAMAmWZWBdEMQwsNoEsQysD8KUO7DKBLEM\nrBP0TutSBzV06f0iQWEDwyOIZWD94VUxSGFgJQliGVg3CE4nc5G8w5Rc0eEENhhBLAN3paxD99Ig\nXXIj30+W6pjeZb/Ss2b7q5a2aulhWrImFVTB+t9Zi0ER95QiIOKn4LLkDUixbzYqKY9Gi/20uK1L\nHi/3fHdyh2sgCPjDQxDLwB1o6bfg9Ciz6sonfzllJ5Mm9cKm3scoIoUQ5b1TTfFIVbdZFTRF+z2S\nVov+4iL9uXnodpBuF+l2Ielh+z00tWBt/jtXKCOIMRBFmChGogiJY2ylhtbqSLNJVK9THWkSNZtI\nvQ5RBUSWvLey/erGrZiSSCoqJTEXQf27l/KVIgjlQ0UQy8BdWSqYg3dmMjlofxa3bK6sNtMxVSds\naUJ/dob+7C36s7dIbt/Czs1ibt5AFhexrRa21aK/2CJdnIdeF+l1kF4P6ffRfoJa6yxLq6AWUQUp\nBFMlgjhGohjiGFutYGtVqDeIag3iRpOouYloZBS2jGFHR5FNo1S2bCHeMkZt5y6i0U1QqYIxqBgQ\n6z8L9z7LPbMzI1QANCXFOusWgwmpzA8NQSwDd0VKYpBblQPoHSKqOCvP6ZaAWuh3sQvz9GZm6N68\nSTpzg/TKNDp7Gzs3i529TTo3h5m5ibRaaL+PUcUAsZjcZFU/EvePXKEEvDB5i9YqlhS1FpU+KoJZ\ndHauWrdPokIfwUQxjDbR0RFMcxTdNEayZZz2jl2wdRwzvpXK2Faq2yapbp+ESgUw+dQ9v4yoIv59\n3zkPDzwsBLEM3MFdxdHPx7UsUvnkO9umYBNI+iTz8/RuXKd79jT22lW4cZ3+rZvorZukl6+gC4tI\nPyVSqIqgAkKKQb0WuedWQK0XSMm2lDybuV65abLmWqX5bae2gmjhElAsYvvY+VvY+RmwkFhBTQU7\nNgbjmzHj4/TGt9KemCTav5/K5BS17TuojG1F6g3UxCBgsiuLCGhmSwbFfNgIYhm4K4P5iyX/XOZq\nzM27LOCS0r9+g97VS/SvXSK5egmuXYPT50ivXyWdm8H2OhigKjGi4kRF3POnXgRVTD6CPLDi/YNl\ni7IcpJH8eQp/YRYIkmziXAoEaS72zhqM/OMlAqSPzl3Hzl7FnrH0TIQ0N1HZt5902xS97TuR7TuI\npqZoHjxEZcs4WquixmQey8ItEKbgDxVBLAP35M4k8CyMAaqKpIq2F+nNXKdzaZre++8jly6h1y+R\nXLuM3pql0ukRixCLYKVaCqSoExUUwYK4IIr7cYKGlKb1AqgBKQtQKXhUbCr9XYpqi6KZoJZ+jL9f\nAWuyxHfFYIgxiFWYnyM58i59E6GjmzHbJpGpKfqPPU68ey/VPfuo7NhNtGkzxJF3R2QOg2BhPiwE\nsdzIZAajZB7Bkux4cy3PoFF1wmEt6fwci+8fITl3FnvhHOmlC/SuXYQb16klSkWdOCbGksYWi2DU\nCUdmeVmBvg/KiLr7jbcMVTKb1eRxm8yGvNN3uuQN5RQR7nz6reU0oPL+ziY0tiyk5EJrsWgEEYpZ\nmIOFOfTsh7R/9RraHCWenCTevQezZx/VZ5+ntnc/1a3boOqi/uo/y9ynCfm2zGJe+j0EkV17BLHc\nqJSjuYq35bR0lwzu0O/Rv3md3vlz9M6covPer7EXp9Hr14k6LeqZzxLBCi61hmwi6p9Ri4mp8zCW\nxU+ctQrgLTMn5kuCKUu4e/DJy42W7E4/Xy8LlpT2z6bQ7nHF52ABlaWjFcBQs4rMz5PO3SY5dwbd\nMkb//Hl6jxyk8uhB6vsfpTo1hYw0QUzp01VUM0ksJSSV80xzv21grRDEcoOi2Umb3y5yKq044TSA\npCnd6Wl6Z0+TnD5FcvY0/XNnsJemqaQpUensFheBQUuJ3qJmiV8xu8c/Krf6vLBJWT7LI7sfPmo/\noUh6LyeXl0JGUr5Y+P3Ev7PcF+ovLqVxxiJImmJv3qTz2qukJ96nt3cf3f2PEj96kNEXXqYysQ0q\nVUSyy5IiGJ/DWfhny+IfpHJtEcRyA5MJpjtDS6JhBWzf5UKeP0vnl6+Snj5FcuECeusWUb9PtXiS\ngoFqmOwvuedJP7BdyhJx78fc/XnuY2+50/68M/lJBn4Vfw7Kl2Rz+nw/f0EQaNgEvXGV/sw1eqdP\n0N+5C3vtMtVDh6kdeIzqtu1QqZUi+8XYBGFQzgNriSCWG5XMREJBs0iuQtLH3p6ld/4s7Q+Ok3x4\nguTXb2BmZ6iohTwIE07oMqqDFnE9tejtW3RnZ2jdvk7v1DG6B5+kfvhpagcOUZnagRopgl1kVmUW\nzQqf7lojiOUGxrnvxPno+gn9mevM//Kn9I4fwZ76AL1yFdPtUUWAyE0bsRhs6fQOLEWBVAQVIRJD\ndH0Ge+0W3bfeoTsxTnzgAI2/9w3qh5+kOrUDKnXcRcii4qfn4bNdcwSx3NAI2AR7e4bumdO0jx2h\n8+rfkV6+gGkvEiMYccnclsidyGoQsWGyuIRy70zF1Y6rd29EqSFCiJI+6dXL9G5dJ2kt0Dv3IY2n\nn6V24HHirZOoiV0GAISPdg0SxHKjoqCtRTrT5+gefYfkvV+TfnACuXLFWZISkec4qkVEsODjJJ/M\np7jRcIEtyaudVBKfmuRFtZfQP/Ie3UtXSM+ep/vMs9SfeZbGwSeQuJplx6/umwjcQRDL9Y6SJ/0M\nBlYyyybb4tNxEFCLXZxn4c1f0H731yRHjiIXzxO15xFjnPVoAdE8Wm183Xfx/BBO6HsheX28isWK\n9fEgQdQQqSGyir1yDXt7ns6li6TTF5C/36f6yKNEo2NAfGcKV/HsA7d1yW7hW1kegliuc3yIBnLB\nzMMERWccX37nAjgp/atXaJ04wvz3/jP21Cmi23MueCOx90aW8/18qaEKkWapPSG883FkAqb+UpPF\n+F0RpM+ONyD9Nnr2DHbmNnPap/rC84wc/gy1yT1IXPE5nkuzScNnvxoEsXwIKMmjtx8NolkjCYso\n2H5C9/IlOu++TffNX9I7eYzo+nVq5XNQIc7Sxss56QOZ0uFEvS/yLCQpkuOzj84lILjySrEuv2D+\nJsl3/5b262/RPXyYxkufp/7MC9T27UeNGZg1fHQVU2C5CGL5kJAJZj71FhflFlW036N19Aitt9+g\n//abcP4scWvBNZAI4rfiZKIZWffbipKKpZom2GvX6S8ssHDzJv1rNzHf+odUdkyhUeyj5IOJ+/mk\nPBiey04Qy3VPeSKOL6OTfB6Ytjt0z5xk7vv/geToUaLrN4iSlCibaouUGvKEs2wliRSMQiqQipKa\n1Lk7Ogvoqffpzc6x0Kgz8tJLVB/ZD7URQEo5nVmeZmjZsRIEsXwIKKRSML6vI5rSn5mhfeI47V/9\nDP3Va9Tm5jFE2KzSRrPSPT9NVA2CuUIorlAqC+IYdbdT44JBlbRPenma9g++Q3L7GiOvfIn6oSeJ\nmptcyWT2PZVrSQPLShDLh4C8WE59DXPaoXv1Mq133qb1i1fhxHEatxcRDKm45rQGc0cJYBDKlcJ9\n7qmAGt9xyVuZLgjk7o9RkvNn6LbnSW/fIp2bY+TJZ4i3bSdbmM21rAvf20oQxHKNk8tZ1gnHp/OU\nvJR+P1dXQ3eR1qs/YuHnP8UeO0p04wYxkGSddrEYX94YzrHVIeu+5NwmBhWKxC7/hWffbMVaoqtX\n6d74MbPvn6D1/EuM/5e/Q33XbjSuUSxxUU4fC1/schDEcj2Qn0CWrB9Q1udR8lUGlWRxns7J4yx8\n7/+DD94nnp8j9nXcqTckjYKKdV4uDYK5mmTfofpMotz7LKXZtbrshopV7OXL9PuvMj+5DV5+hdoj\nB6E+UupBB+ELXT6CWK518t6KWdOLwuoweEtEU9L523SPHaH1058gx44St1sgxq+uKMQ2e8Kij06Y\nda8Scvf0n+ybsf63qFtmQ32HohhFb1yj85Mfoq02zS+l1A8cwjSa+XO42vLwxS4HQSzXAX7RA3wv\ncWdfiu9AbpV05iadI2+x+OO/xR45Qq3VA4n9kqyl1RYDa5ZSCqZf6kLyGvPsTsVF0Dl3nsVugq3X\nYXSU5r4DA2sXhW96eQhiudbxc7Fi1cLBFQp7N6/Refst2j/8Lrz/DtHCLNBAs2UauLNrY2DtIqUu\nwC6f0paW1vBpQzZFuj201wVNccdCkeEQvvDlIYjlWkfBFchp3hhNvb9ROou033yN1o9+hBw/QrQw\nj0aGxFgiW16wILDeyPyXRr01qaDiluyw27ZRf/4FRp97kdrkjoFUsOBaWT6CWK5xxHv7DWDFoliM\ntXSvXqX11ht0/t2fE129RGQVpYaqYk0KBpdzma/1ElgvqI+9IbhORSpYiehEEcnkdkZ/53cZ/dwr\nVHbuAxOj+J6kQSmXlSCWa4klJWt5srn3O7qNKf1rV2i/8TqLP/4B8dXLGJtiJXKBAMUHc3z3oCxS\nGs6jNcPHVSZmEXLIrMmIronpTU4SP/8SzZe/RGXnDlSM92+Su2vCF718fOwq8CLyb0TkmogcKW3b\nKiLfF5GT/ve43y4i8q9E5JSIvCsiLyzn4B8mVIupl8u/K3Ios63GQnL9Bq233qD9o+9hThwlUouK\nyVdUFFGf5CxLc84Da4bsux380vMcS1/gb9SJX1eE3rZtxM+/yNhXvkFt5x4wlbzEMYR0VoaPFUvg\n/wC+tWTbHwE/UNVDwA/8bYDfAg75n28D/3o4w3zY0bv/5IIpoEo6c532O2+x8OMfwqkTVNsLfldf\nAZLV8mRJe6W68YeZpZ/cuqC8XKSUpt64i152oewDycQk8XMvsuUrX2fkiacQ31RDxLiGKeKawEmQ\nzWXlY8VSVX8KzCzZ/NvAn/m//wz4R6Xtf66O14AxEdk5rME+rBSZlDb/Ed9sVwGrgtqE9pE3af34\ne/DBMUxrEZEIxCDiIt9G3emiIpTMjodiZnaPy8ldxXFdCKcWFVhWih8VC7gIt0Ww26aovPAiW772\ndZqHn8Q0R5wvUwzGf/eu8lFcuWrwWy4bD+qznFLVy/7vK8CU/3s3cKG037TfdpkliMi3cdYn+/bt\ne8BhPGy4uZgqpAhiXa5d2l2kfexdFv/i32LOfEAlTRGtohqBSb3t+bCfJFr6/+4MfAI+8VsZNOLW\nAqJO4Ky4oF2W3iW+okpw3386uYOR3/19mi+/QmXHLjRyF8fgmlwd7mca/pGo6gNdxFX1T1T1JVV9\naXJy8tMOY32T13q7pgiuzttVD0uvTe/D47R+9mPk3DRx6uvAjYJJfVOMjcTd7cs7tMO7IbJ/a83O\nLI9IVImsElkQNfSISCaniF54mebnvkhl1x40qiBEZMdIYOV50PPsaja99r+v+e0Xgb2l/fb4bYGP\nQEq/Vd3JICqQduldOE37tZ9h3/wVcS9BqWBNTGoUa3zC8trSgaGhqvlPJn6u/ZzNO8HnP6qua0/+\neehA0GRN4r94o4KxghLRlYhk2xTm+c+x6avfoLpzN5gYEeP8kkrIn10lHlQs/xr4A//3HwB/Vdr+\nz3xU/BVgtjRdD9yDso0kpa29q5dZePNXdH/5GubKJR/nKUe6nUX6sNboZH643C8H3pKWvEOPa20m\neZ18ZneJLw4tosVr6zPKxp+XokpE10T0t08Rv/Aym7/6DRqHn3JLSigY9T5tsUEqV4mP9VmKyL8D\nvgJsE5Fp4H8C/hj4SxH5Q+Ac8Ht+978B/gFwCmgB/3wZxvzwIWBVEbUYVRflTBLa771D57VfIBen\nfdK5m5yLVd8A1mD98hFrhSyK69L+fGFmngNY7JX9XyTNZz5GLaRNXZgrQUkAayKkUsXEMaYSY6IY\niXxUGNDUYm2CTRLSbgdN+mCVWC0xSmSK+unB35qPw71skbKVlxmWtix99J2fweD7LT938V6dNSw+\ncSERoTe5neiFl9j0m19j5PBTRCMjftlhHwzyQhnJxnK8rBU+VixV9Z/e466v32VfBf7Fpx3URsTN\nHC2qFnptFk8cZ/Gv/j3R6VNUbFr4MP3+WWWPO4nWmt3kgheILSL0/oTP7OdCUNxfPXBle7UasmkT\nbN5CdccuzKYt1JqjVEdG0WYTGWliqhWkUsFUYiSKMSYCC9ZabNLHJj1sexHtdpF2C7O4iC4u0L91\ni/7cLHb2NszdJlpYIE76GBEfWNNsol/yBbvxGRX/nnQgaHSvT94Z/oUIl0q+XdDOv3ejSoLxlTn/\nmNGXX6G+aw9Esd8/k0pBiEKwexUJFTxrAs2XRxWrdC+ep/X6zzEXzhOnFuvPTINQNlLW9nmTuQgU\nwboTXvx7VAEjpAqpCLbRgL27qGzbTmViEsa2omNOLOPNW4hHNxM3RzH1GlRiMMb3f/R5htlz+tdT\nFNUUSVPodrGLiyQLLbo3bxDP3sLenkFu34SbN+ifP0d/5gbMzUHSw4gSUcpV9Qn/SDHtL2zHe5eS\nOv+zkzlrSsEcgCwmKkKCIZ3YTvzCS2z63Oep7tgJkfdbl5QxJJ+vPkEs1wjZVLo/c4v2O2+Tvvka\n1U7Li8L66VHo7agBn6rDYtS1AukbIR1pYiYmiXfuwuzYhTy6n3hyitrkDqrjE0TNRr5utvo0oKKR\nSGafLp06W7+vtwvjKlQbRJvGiID6gYNgU7TbJp2bpXfjOu1z50ivX0OvXCS5coHk6jT21i1AiPz6\nl4pCluLjY03WUCzjcbfPwKcHuZqBkp9Rs3wHSDDYbVNEz7/Epq9+k+rOPX7Z2/XxXW80gliuAbIT\nLm236XzwPt3Xf4U5fyG3y9bTyVPYXFkE2/lV3erYgh1pEj3xBLU9e4h27yHeuZf6zn1OKCoVNJLi\nGbIpbP68ZuA1oCTHvuqlkGcZ2AcBIiCKoNIkajZp7NxF4/BTJHOz9C5dpHf5AunF8/SPvUdv+gLR\n7TniJMEAqbjeoDYfx/2l8OQXO9UiMCdCH8FObCd64WVGv/b3aTzxFGri+3rOwOoQxHKtkFh658/Q\neeOX6AcfYDp939B1vcW6JV9XRnyXpH5UId00RnXPPmqPHCR+9rPU9+2mOjWFNEcRUwUit0RGOQNR\nBn55b+KAPVkIIYJolO+cB2SWmn4i5E5eFaga4sntxBPbGDn8JMmtmyzuO0B8/jT27BmSs+fQGzeI\n+20MNh8DNquWuXdwrZzhEKnrIASGRJyPMn7+JUa/9g0aTzyFjIySdSB1M/T19a1vBIJYrhHaJ48x\n/53/SPKznxLN3hrwTWbdhNYHzheXIth6k/iJJ6gfepz40BPUDjxGdftOpNZw0+VSeZ7gKpfcbR/t\nFc3dE/eUDp9OpXfspKW/Sta5Ln0mL3/GIPWYeGeDzVM7kaRP//YM3XNn6X5wgv7PXyW5eB5ZnCXC\nuuXhtBj/XYaV/xaf7qVEdKOI3uQko7/zj9n0uS9Q3bkLNZU8jGOxXijDdHytEcRypcgrSiiULztx\n+x0W33uL3tF3MXOziBTJ1sXEcgVPnDuEWcmC75n3VLLa5mI5QsBVNae1Knb7dqqPPU7ji1+h/tjj\nVLZPuUi3DNiOpTBQOapf+CazvXKbssj2IR/UkkEPhl3KkjMYmc5epPh0/XMYA5U6lcmdVMe30Xjk\nUVpj43TfP0rvxHHSyxeJOotEaimqrnyQLn9HBtQFt5whaugaQ39yO+b5Fxh9+QvUdroSRsEFqCxp\nvoxEkMm1RxDLFaJs51isP0l99PvqZTrvvY1euUxFXLDUlPOB7rCGlp8s79EKmGwJrWxabF2itDWZ\n1esWcu2bGLt1K/HBx6g/9TS1p59h5KlnkWoDydcpLyL77nXyF2Tpn4PRfxn4PfiJFAGwpcKY3Z//\nf1chyjMfi2m2eGs0rhJP7mD0N79CfPBRov376b3zNsnJE3D9CpH3SWYt9dw02txh6fZE6E9OEj3/\nAlu++g1qu/ZANGg9SpZ5v8EKWNcLQSxXkOyUzXLIxSrp4jyt40exJ09R7fb8qWMGjKbVsjJU3BK6\n5CkzgE8FUlGXEI/zASYi2McOUXnmORrPPEf9sUNE27ahlZoTSv/YXISym3e1AO/kDjGUO++7q/tw\niQiXPcCyZEdBUfHPptltL8XNcRqHNlGd3EFnzx5ab++i/5MfoTdvEtnULzHsXAs+lOXWcReXHpVO\nTFJ5/kU2feXrNH2btXwKn/sozT1HF1h9gliuECrFomP5mt22T3f6DL133yaemSUCVweNDwzIKnmt\nxI0j67GoPopr/LTbmtRbUTFqDWljFPbuoflb/5DG089R27kLW6tijeuSY7Qcm4Zc3mTJ7bsP5RON\n++PuuGMXGbxRFuO8CYcqSgQmwmzdRvO5F4m372BeIvq/fov0wjniVgsRIcWApBh1ZQQpBjuxg/j5\nFxn92jdpHH4SMzIy+Gpyx6sH1iBBLFeIvLlDdkpYpXPhLHM/+VuS1/6OkTRx3c5XdZSObAlWAWoJ\nRNb4dmKu26ZzJRi03kQeOUjl5ZdpvPwyI4c/g5iK87b6ZQYjLSa5he9xfaCl8WZWJhpBdRO1Rw4T\n//4Oul/4Iq3Xf0Hy+mtw/ixxr4sVQyrG96PcTuP3fp/m575AZccuMKFz0HoliOWKkfmzAFXS1hzt\nE8fpnziOzM74GhezdlJGSkZPapREnGREGoGNSMYnkMNPMfLKF6k//zyVnbsgquSPMX7qXZ5YroUL\nwSdhMLE+q7rx9wHRyGYaj3+GaHQT7S1jtF/9O9Ij72BS6CHYySkqz79E83Nf8AnnkW+eIaFscR0S\nxHLF8JEbC2hK7+I5kuNH0PPnqVCk7a2Nc6ioTLGiJMZiEWI1aGpIR0aJX/4CjS9+mcaTTxNNTOBW\nOLCgmUhmEpNnPOa+QP8S6wD3OTj/bGYXZxU9Pn5eqRHvPUi90URGx2jdXqR/cRrGxqg8/xKbvvZN\nqjt3o8aXMAIugT5MutcbQSyHStl2WnIqZFNsgXR+jtaJY/RPHCWenUcw+bTXPUsxWR1cnXG4p1ce\nvV0y3lzUpCgjrKgBKyRbtmCfOMyWb/0WjYOPY5pNH7Rxdd/lPj2ai8tgeGb9iISWBpsFYErfscG9\n5zimMrWL+MUq/avXSU+eIJqYYNNvfJnGE0/mQmk0S5GyCFGQy3VGEMuhs3SyWY7UuhOlc+ksyQfH\n4eJFquA70Lh9MsPLCohGGFKyhG0d4twt6x6u+DSlXDoLC9DVNUOkilqwYxPwmWcY/epXaDz5FKba\nKL1FKX5yf0NJWtb5+uWuSjx7j0ApL9SIa6snWycZ/c2vEj9xGNMcZWT/AUyzWcqVhdxkX68fxAYm\niOVQKadbF6EB/P+qAnYWZv8zMT9Ex26QplXoV1CNsGIR8d5LP521pby7YZ5fUh6vGLIUePUJ8a5T\nhBPPXq0B+w/R+NJXaX7+i9T27INq/a6h5eJ5l4rjYKR5XSDFe8j1v7jTfafip+QKEsHI/oOM7D9Q\n2OxS+D5dOWfRfzOwvghiOXSyYMBSRRAgwXYvEVfeRPafJhLon2xgL48g7QZZsoqoIbKCmoTEGFQj\nn34z3BCJ+HI9i89k8sMUcOuPA6mCffQxml/5Os2Xv0R1525sFPlcwnud9HdRw/UikCU+Os0o23Q3\nH6wsvXn37YF1RRDLoTIolOqn3lmJn+gi6eL72M5xKlvniOoV4hGl14DkA4P2I8A1orD4Po0WX/64\nHGMln2pbvzXLiUSUVEHHJ2j+5tdovvIbVHfsQ0XWWDAqEFgZglgOnbKEeL+gAli0dxUWjmG614EU\nM6pUHrVEdVjsGJLLdbRVJSLKp3eRTzVShq9Orveji/SiSqSuOw5i6KMkm7dgnn6WkZe/RHXnXpDI\nW53kAaBAYKMQxHKoLFWPkg9TO6SLZ5D5E0Ta9dZcAvWUaLdSf1bojCj98wadNYjN6ovLqdHD9nVp\nvj6OIVsd0bUQ6zWbyBNP0vzNr1Hb/Qhq4tx3Fw0EOgKBjUEQy2FSbsBT7qmmlrR9lvTWz7C33wCT\nIAJR6oIottKlfjiluqtDb7pN+1SNdLqBzDV9dx+/5O3dXuxTiVZ5OVn3XB1j6G6bIH7p84x987cY\nOfQURHG+32CSdhDMwMYhiOVQySzJkohYQDvYxQ/RxWOQXMu1RpBi9T5JMZu71A70kWaVzmif7tEY\nWagQq+tmkedfKj6yik8Cv0NJ72uoBvFBHcWKszJ7zRHM4SfZ/KW/x8jBw0i9UWQDZY1zA4ENSBDL\nZWCgakUV27tJungC2zpJRLewQH3LsmwJBiWBuqG6C0zNYNvz9M+PkM7WiWyEisvILCbj5lMVxLjJ\nd9FaLFVg/6OMfv5LjBx+GtMcBZ/mkgWpgjEZ2KgEsRwiRb0K+foxSJe0ex5tfQDdKz7lZkmCMkVH\ncKsGYqU62WP0mQUWRyA5Y0hx6Z6AAAAgAElEQVRuxpik6NajmvVCtA9UF5MtXphmlSlW0UaTkRdf\nofnMc8Rj4yCDtepBJwMbmSCWQ6Xsz3Mo86StU9A6jbGtgfvKuHpjJ1+KYqOEyi7LSF3oNS3dD6vo\n+VFMGucSWXTnzl77k+Gm3oJRg4oh2nuA5kufd91xxARxDARKBLEcIm5d7MF667R1HubeROaPE1s7\nWDGXPQ5ASmEWSd32GCrbZ6lsXaD+SJ3WG0p6bhRuV4nUiV0qLrn8k8bJBTA2RYySRBXMY08w8q3/\ngvrhJ1xjWiRkBwUCJYJYDpFSYWNWPIhtXYD2OSRd+PhHZ2VzWXmcukayxAnxZJfGs7N0R/qkp0fQ\nmzE2ibHERLlD8ROghZ/STkxS/exz1J59BsmXOpCQSxkIlAhiOUx00G8p9ND2h9A+j0jfBVLuKj5e\nZhUQ6yPOeVGxe0aTEO9uIfWEfrNP58Ma9loDacUP1ihSXIVRUm8SHXqK+nMvUNm56yNKGAOBjU0Q\nyyGS2ZTge/ikLWifQXtXQGy+ZMTSCmGXF+7EUbVoi5alF7mSSUVNSrSti9RTGLV0Tgn2gwp0K6VM\nSbLsIopHZ12+XSs1UVcdlIiBHXuoPv1Z6o8eQuI6g36CkEsZCGQEsRwyoq5zj2LR7k20fQHsIh+f\nC1kEapyYab613EBDJcVsTqk/KsT1iIVWjL2Soq0KRrOvM6/0zkXU+teOfEMOBdLmZiqHD9N46ini\nsa1+f/eqoeVDIDBIEMsh4iLaKS4VqEd/4Th28QMMCeDsvHtKUJ4vmfkLi2d1v8rpRgnSSKjtTzDN\nhM6FKt1TVdIrI9Cu5q8h6joHKeIrgFxvzNQYUitUn/ksjS9/mfrjB7FR5B9lMSo+ZSgIZiCQEcRy\nyGRrSEvSQrvn0WQGQ7ZMQ7EcwYPhBMw1wLBgOsSThkYjJWomtEdS+tMjyFwTo4LR1MudwfUzcknt\nloikWqP2+ONUHz2IxrV8XFlwSf0MPMhlIOAIYjlUBN8FkrR3E9udRrTtLcrS2tkPhC+lVIMSIQIW\nRUwHszmlVqkQNVJaI9A9VkUXTSatgMGC71/p05smtlF5/AnirVNA7EovsUWpTlhRKxAYIIjl0PEh\nlc5lpHMJIS1th08tlpnw+qUgrCiifUzdUtvpyyQ786QXKqSzMcZWgBQVg9UI1RSNYmqPPErt0ceQ\nuObEXDO/prNcs0BRIBBwBLEcIoVHsgPdS0j3irfsXKWMPKhO5qRF5Nx7I32nDvdT6VGZVJrPpnSa\nVXpn6tibFpNU855BFoMdG8ccOEhlYhtIFlDyjT3uEPWgmIEABLEcLgqQoL2raPskLE4D1qcCOfIA\nzn2TCZj6HkEJRW/LLPndNdRIJYGoR31fj+pkje6jKd0PU5LTwPUIFUMaV6h85hlqzz+PxBWXOG/U\nB4AGhVLWUFK6c/ne2dVp4FZwHSwPWrrK5x2zyo1VNkbuRBDLYeKXf0h7M9C7hiZz7jCSLOvxUz35\nkmcoT8uzTkfqrccUqfeo7RaiutKrQ/vXEel8BTs2QbR3D7XduykO8jx+PlCFtDYpLF7r37OrdMJ3\nlA8Mm8FLU/YJZzObjSCTjo8t1xCRvSLyIxE5JiJHReS/9du3isj3ReSk/z3ut4uI/CsROSUi74rI\nC8v9JtYOzvrT3gzavw52oSSUmdh98oOreNTS5yhbgl44/aqQSgqVNvHUArWn5qgenkcn28i+bcje\nvcRjE7iv34WfJFtUx/+WNbjITnY5yDIO0NT5YNUn86/2AB96tGRlKoPi+fBzP5ZlAvz3qvqWiGwC\n3hSR7wP/NfADVf1jEfkj4I+A/wH4LeCQ//k88K/97w2ABe2j/VvY/i3EdMkSxEXdAmSfrifkvR94\nxz3OuYmaBLNVaTyj2C0xsmc31X370Eo9F+A7n3WNqeQA7mKR1UkJSoL7lEOh5vJggGhAFAvX0Ebi\nY8VSVS8Dl/3f8yJyHNgN/DbwFb/bnwE/xonlbwN/rq627zURGRORnf55HmpUFJI2tncT279FTEIx\nVcn8jPYjn2Oo4/H/WUkw26E+vgnZuZPKjm0gXmDWsi6WKfvJABUBNViU96cXuHirj5iIj+yGfLdz\neyPt9yDPZVN2bq3y9J4GkVh3Z+bLXi/HzpD4RD5LEdkPPA/8EpgqCeAVYMr/vRu4UHrYtN82IJYi\n8m3g2wD79u37hMNeoyiknRvQmUbbN3yAxFfGrMZyDF5gjAp9U4M9TxLteILK6Libxoqus5zKYtqn\nCCoRCfD9I21+8G4biRobzdhZHsp5Y2mXzx9K2DVRYXzErzZKdiEu1aStl0PoU3DfYikio8D/A/x3\nqjpX7qCtqiryyRJjVPVPgD8BeOmllx6SQ1yxyS0kuYnYNoLxHdOzSPbKHlFFTbmi0RZMbR9RYzdW\nGoga5NMWFK0gSyd9+eJpCm2ts5DGxFpfpdGtf+44AbMP3FaZWUyZbSdsGamWSiuKJLN1cgh9au5L\nLEWkghPK/1NV/1+/+Wo2vRaRncA1v/0isLf08D1+2wZAwYul0Y6fqmTWUGkKs0KHV2bMWgSpbkOq\nOzG1cayfRUXr8ijPLj4gaonUBbXUB7ayAMRdLwJrZTq8WvvdY59yhliea6BeEEVY7KQstFJ04s6A\n4kbifqLhAvwpcFxV/7fSXX8N/IH/+w+Avypt/2c+Kv4KMLsR/JUOiyazaDqLEdc8Q0v/5/PiFUNy\nD2lU24mp70biJkgKmf9pXR3w4lUwM3tcgMdo0WwZybo+ceeP3OVnI+13j30o3c4+5mx/RJjvGBY6\n2X1S2v8hmRDeJ/djWX4J+K+A90Tk137b/wj8MfCXIvKHwDng9/x9fwP8A+AU0AL++VBHvJbRBE0W\n0WQRkzfxLfICV3w4+R9CVNmB1HYA1dK6PasyrCHgT9hS/upSy6i0W+A+WPpRFXajsNiLaHV9Spr3\nU0oulRvHwryfaPjPuPen8fW77K/Av/iU41qXaH8O+regNw+kgAvuqID1jXdXdkBufmW1jmnsIxrZ\nhVAvlq1Ypwd57i/XYingIthQvKf1+v7WBFqIYKerLHSUxBpiU8QFYSNJZUhNGyo2WUD7c2jSJqvE\nzgVSsknjyuG6ooNKHa1OuCl4KbF9Y02iAg9Kqkqrq3T7foMsvdRujCMpiOUQsekipPOQdu6YG2pJ\npFYWhagBlXEwtdLWAS9VIHBPFGGxZ2n30nxLxkbqER3EcoiobYO2wPYz77hvUMEQOg59ckQUEYtU\nRqAyBlTIXfnZwAKBuyAipR9Du2/pdPuUI+HOI79xwjxBLIeIaBexXe/8dpdc17E832NlB5TpYtxA\nKltAKoP3BQL3gwi9xNLpu4q0O+VxYxxMQSyHiCTzfn1wJY+dies3mfW1XGljLiVGK1sxlXFUfcBp\n3aUMBVYPl2vZ6kGrd2dmx0Y6ioJYDhGxbbAdBIv4FlZKuXnayl+BLRWIRzFRPRdsN9iNdJgHPi29\nBHp9A25FqdUezqoQxHKY2B5oj/Ia4a5gZ7UOLkGJ0GgEMVXvaip8lRvzkA88CP3E0E+XHDEb7IIb\nmv8OEVWLaoKRlLy7kK5WFDwjRqQGJs5Dl0UNxsY62AMPTj8RkmTweJE7Z+UPNcGyHCaagk1wCenZ\n2jirKJQiQARS9d2PBnKZNopfPvApUYQkVZL0zgvs+i1t+OQEy3KYqEU0wWh6R0e21TikXK10BKZS\nTJmyqTgQrpWBj8UvjpdawdqyKVku5d0YBLEcKoW5JkipNLzcpWVlRTNviFAe38Y6xgOfEtUiNDh4\n6GwUm9IRTIshUlx3vSiu8ix8sImHLrmd9+kJBD4a8WscDfgoN96RE8Ry6Lhpi6gha52xeofVoBWp\nxXqIqzaiwHpjqU1ZVs2NRRDLYeLThHRgQ37HaoyIsmBmQql+bq6rUYMZWF8IA5f8weYZGyscHnyW\nQ8WgkhU7FlfirC/gQEvqFUGcFaCp+50FeUKeZeATctdjZYMdQMGyHCamipoqVkAlpRDK1XNeik2Q\ntAWaIL76ojyKDXa8B4bCxpyRBLEcIiJVoIZKtGTSsjqeS1Ew9DG2hdpe6Y4sg0g26nEf+KT4DkQb\nmSCWw0RqYOpYjQGTV++s5iEmkqDpIpq4RVQEzUsxA4H7RQbEcmOKZhDLYWKqYBooWbVMJpirhGgu\nljbtUI5shqShwP2iKMaLpQ7e4VziqzWwFSaI5RDRqI6aOmgWN1ME4z2XruHuSh5YzlNqwS+iBokf\nU3nJqY1yqAcehMzbHhmIpHyhlSV7PfwEsRwiGo1ANAIa4WrDU38cGb8caVYvvnKIWOjdRrvXgG7e\n41oy8d6YM6rAJ0GgVkmpVVzQUkuX2dU4pleLIJZDREzdrXMTFZalXzFsad3hyqGCTVrYZBbRBPI+\nm4HA/VONLdV4Yx81QSyHSFRpInETqdRKSUMW8onvCrvGVdw4tIP0ZyBtM7iqYzArA/eBQi1WL5ZL\nj+WN03U/iOUQMXETqWwuLTlLsVqZrvxBlcXiDT3oXUV7tzdsqVrgQXH+9loM1YqXRykuuRtDJh1B\nLIeIRCMQjaHRKKrGW3aQL1+WTcdXbkSAEEkf6V0m7V5HpV9yz28cqyDw4Kgq1UioxT7LI+ukJbCR\njqEglkNEqWMq2zGVbWiWa4kA1jdqGf7HrXf9N1jJK6Ro6wxp6wzowkAzjXV/mA+0wLvX32v8J/+z\nlIuzhiYAapWRWoVmo4azKQsjYCMRasOHSgTxJoi3YLUK4tZZzrsPLdNa3SpFMofrjVF6vex29zrS\nveQi47VxUoTIP2hdCWZZHNVZ64qF8lIefseyx2FtvccsCyFLK/M+QEn9BXV1G+sOFi24T65RjWhU\nowGPtzvolqYRPbwEy3KoCFQ2o/EWMHWXFI7m03Fr7NCvxtn0XhDfFs54UfYC4lM7bHIT7U6Tdq+5\nqLhmgrOerIPCYsw+R0Vc85I84aBkXcuq24z3+NH8o8/X/tRMpNZeKk4kwkhVaFQUKR0vG0MiC4Jl\nOVQMUWULtjJGWmmgiRNKUZOfJMMufnQXdsl9pG6S1HcWo5/aWbGoLpL0LkJnmnj0SdBRxETr6Ih3\nJ2mWY6BYVDJb0ldLld9M5k8rr6axUkP9WLJ+p6Ck3iqO3AxEbLHPmhixUomhUYXIlKYia0vPV4Qg\nlkNEEExlDKlshWqTNFUi9U5xscty6CuCFSEVJVJFrHudXDzVTcpTI1xdvIZZuMy2zfNsqo6uK9d8\nWSqzLc4OUyok1OliMCXLvZCatXZe+7AbTur7ICmiEalUSURcstkaGbSgNKqWRs1f6AXy9VLWy8Ez\nJIJYDhFFwGxB649AYxdJ+1TulQJdphNAEFUiSRFRrAiJxKhAZBU1EQvU+Vlngtduwqb0Ol+qXeHz\nu3dSX47hLBf+/HTWufEpUVBB+OazNZ7cHWGkcs+HrxHtAQrhd86ZyLU5VeXagvDOeeX4+ZS+FsK/\nmt1+VFO2jiZMbIoxJf92Fo/KL0kbQDiDWA6dCKlMQjyFpQGymMVYludiLIpRkNRNQ1NxE3FjU6wx\n3KDGsf4Y312c4mRvlPHbs+ydv84zSZd63Fxj09N7o0v+doIJiPL0vjpP7RPMOmkhZrPgVB5VdgfH\nsekOl2f6HE/tqkcTMoFWa2nWhU11JxXZ8ZIHFFdrgKtAEMuh4tTQVCcxlR2YaBOazvsG5ctj21ix\niAjGGvDT0EiVvtS5aOu83d/Ma+0JjvQ305WYfnuG6dnLXJ2/wfh4c50d7GWbrEiSikv3rYfT1xWc\nglE3E8hGPjefcHvB0l+GFLMHRdUyWo/YNFIpb3S/hXVzsR0Ga+dbeQjIoq+muoWoOoWpbPVpPfZT\nTMGz0JBPbVcQtRRRU/fEmd8SSWlLjRPpOD/o7OI7izv4dXeCjlRRI7SSNhdmL3D+1jmspO4V1PW4\ntP5H12CuX9FJsZgIFjFxwx0BnjVN0a++XLYws2C5tZiiZmXfR55BUOq35qxfSyTKaN0wUotYWuS4\nfj7v4RAsyyGjKBLXkeoUUp1Ce0cQ1Txq/aDPWjjWKR2jilHjouFORelEVd7tTvCL9lbe6I1xwY7Q\nlwrGJzxrBBfmLnHm9nn69mWM1H0gSH3K0dKFJ9YO5XGJ+FsK6++an1lmtrRFuLkItxbtgx8mn3pM\nPuFcrFvUDqVRhc0NqEalT3+95eYOiSCWQ0RUgAikDs1DSOMQOvcTsAl5bfgnNDGLKLA4K1UyD5ei\nYlBiBEhFuGTrvL24nb+e3810WqVlIqyJ/IFtULVYgav9WV67epQv3/4ch8YPYIicXeaTjMuZdGvl\npJA7zlAXT14zA3wA1E/IDTDTSjlzw3JrwYAxTjBXUDWz+gW34qdFiFAVJrYYtm/2zX8HH7FiY1sr\nfOwlWUTqIvIrEXlHRI6KyP/stz8qIr8UkVMi8n+JW4AGEan526f8/fuX9y2sJVy+n1JF6tsx9V2I\nreKE7sFmtVmyOT46mvUPzMQtFcMiNU4mm/lRe4rvtKY4bUdpmSoun88lElsxXlwhjSznWpd5/+ZZ\nFpM2Nu9rWQxyFfp+bBgKR4K35FS4djvhyqzFSuS+pxUVyuLLVrGIWudPtbC5GTPWjMKhwP3NX7rA\n11T1s8BzwLdE5BXgfwX+pao+BtwC/tDv/4fALb/9X/r9NgiZHBqojiH1PUi0ySVPywMu5aBZlY6z\nKi2CJUKswSDMa4Wj/c18r7WD77d28H6ymb6JUImwEjs/p2QRVyFWiCRlPpnn+MxpLs1fAVJfAVP2\npQWWC1XFYsnquRILl24lXJuzYFbbpeDj86oIKWNNw3gzRh7oUv9w8bHfjDoW/M2K/1Hga8D/7bf/\nGfCP/N+/7W/j7/+6bKhl4TJ/VA3qu9Dqdr+WuM1Kaj7Z0+VVKoqo9daIIZWYRRq81dnK9xa28Xfd\nSc7qFrrG+Sdza8FHW90tl5sYWcWIcmLmDO9f/5BWfwHxFTG5z3KNBXgeNtyl02KB2XbK9IxloZO5\nGlb3w1fvLmrEKVtHLFtHo1Uby1rivi5jIhKJyK+Ba8D3gQ+B26qa+F2mgd3+793ABQB//ywwMcxB\nr22y+GyMaexGG3tIter9T5/8BHB+JOeTFBWMCqmJuSF13u6P852FXbze2c5VO0LfRFh/shUr/ziZ\nNKq+XtxtMSpcmL/CkRsnOTd7iRQXGS+3ogjWxPJQ9v4pMD3TZ3omRamQZ1+u4Edf+MIVY93F2GIZ\nG7Vsayrjo3GYaXCfAR5VTYHnRGQM+PfAE5/2hUXk28C3Afbt2/dpn27NkDnt0YiovhvZ/BnswnvQ\nv+ybELizoGhrWYRwluasaX4ry2avMEedY73NvNYZ4+3uOOeSMayJQCyxtagYUnElltZbisaCUUvf\nWKwokXW+1YV0kVcv/5qR6gj1qMFjY4+46Xv+LqToW3wXwgl0L7zDpUhH9IGT7JZPqle4MZ/y1pku\nR89bDCOoTe8SzFr+8eYXeY2xYkhsi93bYM+EsKnuLr0b/fv+RA4SVb0N/Aj4AjAmIpnY7gEu+r8v\nAnsB/P1bgJt3ea4/UdWXVPWlycnJBxz+2iPvKgMoVWjswdR3gEZ+EnyvQ26ww2R2+Po4OCBc1hF+\n1Z3gewtT/Kw9xdl0M9YAYp3ViRNGF2RyZOeoC3QL+VITOPfY7f4c7904yZHrJ5jtzuZdZYJN+Wkp\nXRj94nXljqOCC6Ccu9bj9HXLrZY694kP7uiKSlPW0s9XfwEVYxlvRmwdrWS1Uhue+4mGT3qLEhFp\nAN8EjuNE83f8bn8A/JX/+6/9bfz9P9TBBnkPOeW3apDGfmjsR6n7VmpLah5d9MbFRfPzy30tKi4M\nkJiI6zLCTzrb+c7iFG90J7imTdL8WiXFCean4W6qXRJLwU/jy/0SFWssF+Yv8ubVY3xw6xxWrUtS\nv493F/goyn7HrAVbtskJ5q3FhJOX+5y9ltKzJs+7XK1MhCLbwrKlAZObYNuWGHkQX/tDyP1Mw3cC\nfyYiEe4s/ktV/U8icgz4CxH5X4C3gT/1+/8p8G9F5BQwA/yTZRj3GqWc0O3CKVHjEWzjIFoZh95i\nqQUX/oQobMdyjqPTUKETVbiqDd7tjvP9xR1MJyO0JcKKYMX4rkKDZ9fdz7PBaVSpJojFtM3Rm6fZ\nMbqDJyYOMho37zkVDDbGfaLFBSlLEXK2v/uGe1b48FqPk5f73JoDkShPBC/PKVYMKarUU5uybSxi\n+xbDlhGDYL29ubH5WLFU1XeB5++y/TTwubts7wC/O5TRrTtyj6Vbr1sFU92JjDyONPZie9MId1mr\nW12bNZfj5ixMUaFtRjiTjPJmd4zXW2OcSjaTmBhD4k8641/xQdq/OWsUtRijXO3c5I2rx/iNvS/y\n1MQBqpl1Ww42bKSkhk9J0SRDsZK5QMD5n5Vr8ynHLvY4ddXSTWuID84Vizas8Ged9UJVBVImNwtT\nmyNqEfj2QhueUMEzTMT6WExWuy1Y2YIZex67cIykfYYouQJaLDfhH+hF0rUeS8Vww1T5bmcXr7fG\nOdMb5bbW6cYNXNKkIdKUWBNs5p98wINZEIyClT4nZk/xH07+LYn9ezyz7RA1qXmLQllL1Txrn3K0\nu5h/iyhWLV1r+NnxFr840efKfBVM1S2P4ZfGMFkjjRX6wJ09GTk3kcLkJuXgpOHR7XVcv99gVUIQ\nyyGTFSJKflNUiKo70OZhdOQAdvYmprReTBHvNigRfapctVXeSjbxnc4U08kIfYkRiYg0gdJULp+y\nSfmZ7h/j3QaFBZTw7rX3mahtZnN1lANb9hIjuUGZpwCW327gLujAb/cpWxRLIsLl2wlHzqVcuR2D\nxOTHjAub59/JSg9ZfZXYri3C7jFhc8OAal5NtNG/7iCWQ6WwFLOJlEFRM4oZfQyaB9H5Y6C9XKLA\n1+WKYVGqnEubvN3Zwi87E5xJx0lFMMZNhSOS3P9lJZv0P9iBnAmfekvWkgIpVzvXeP3yu2ytjzMS\nN9nVnCwWOMgj5eHk+XjKQRFXdmoRFnpwYrrPh1eUblL1a3CnLs3Lu19WNhJeGqsIalN2jgm7xmMq\nkS3Nf8K3HcRyqBRBkSyPzh2IFeLGI/Sah6H+OrRuu+CMpL55hWGWBu8nTX7eHeft9gSX0i1YU8Wo\na8Lh/Jw2fx31vk/FJ5w/6Hi98GaO1ISE8wuX+dmFNxmtjlDZ/VmmGlvz+PnAYwdTBwMZAznlRZCn\nZyPOXGnz9oddZhYrIDFCCoiPOK+WqyNzG0UYEnaNCzvGq74BcPhyM4JYLgMDV+KsW0tlArPpKezo\nIWzrA5fKIxaViLY2eL27lZ93t/BOf5xrdhQ1BpG+e4osP1Jc2Vlu2ZUznx+YrA44s1Shoz1Ozp2h\nfr5CvVrl63tfoSox5ep2U36PgQGyybfmcRolQbgw0+ONUx2OXxSsVPzso/AHDnyaK/nRZseRWjY3\nlN1bK2weicmPrfA1A0Esh8od05XSn1ZHiDa/iN06TTrzC4y9SZsRzqZN3uiO8R8Xd3MtbaJiQBLU\n9FGqFCnqMChRw5kcZbaDajHhUlHm7QK/uvYO1zozjMQNnpt8gtFK0z8mq0L6uHSlDYqAVdecRKzQ\nTQ3vX23z/XcX+dVJmFmoIybzBS/5Hlfhg3Q+0ghDl4O7DI/vrRObwgO/agNbYwSxXCFUBI3GMZue\nQkce4fZcwulkhFe7m/lVfwuX7CiRuCh3IpBSI0qriKQuSrrM2Rv5miulCp6eJJxdmOb7Z3+GMYZn\nJh5nc6VZBCB0oCBzVRfWWisU9RfqnSWGszd6vPp+h3fPwa3FKsjqN6YYrBNxfQdGa8ruCWFr07f2\nQ32xQ5BKCGK5Yoi6E8c09jAz8lnenUn55YLwa7uJK1p3J5Cmru8kFYyNiNSivkxuZbuBu9CUiJLQ\n463r77iEeVWe9YJZNAYJ7n9YKj6FlT7bVV79oMMbH1quzjo/ZeFjXhufmiiQJkxtidg3EVGLMn/0\nKkXm1yhBLFcILz9oPMGNxud5R3r8snuR23EV1/PbkESZH9KAWtR0/WPjFYuQSr6AlktRMgg3Ozd4\n89p7pKnStwnPbnuc8eoWvz9F7XBIKypZ5obZruW96S6//KDHpVsxlggkxajJU89XCxEZEPh63Gff\ntoj9k7HLsSi7CPTu1VwbjSCWK4g7QerEo8+gzev0by8CHSJVFxPN64YtGMWiiMaoRvkzrBw+8q4R\nKsr1zm3evH6Enk3oJl2+vOdl6qbOoH0ZAPd53Ooo713o8uMji1y4GdG3MRifxyC6Yhe/e46xJJSq\nlokt8MiksHtrXMp8yCzk8A1DEMsVRXwu26GJJ/iNTpsb/S6vXzlKT7qoWIwvOUsFUIPRmKVu9pVB\nkbwCyQBVEMut/iyvXn6Ds7enme0s8OLOp9m7aYoqFVSifJRZDudAezcpUo3WjWuz5L8dWCtOs8V4\nU39J8ZmoCq3Ecvpaws9PdHn9VI9LM0JPqyDGW2iGVLIuoyv5Xly3fgCjxq+5kyJGGal2eebRCi8c\nqLF1ZKk/NQhmRqhjWiHcyeEm47WozmMTj/Lk1sfYVt+KppAJomKcaGpW1ePuW0mrshhr8drZtlQs\nFxev8N3TP+UHZ37O+zc/pJW2XPQ3W0Eyzy0sre6dt9xZnx4wV3+TvRvrpVJ9lyYLKfRTOHqxyw/e\nbfOL91Omb8b0tU7mqCiEduVF587jyHUYUttlx7hyYLuwcyzOBUHIxrmxBbJMsCxXiEJy3P8TI1v5\nzPbH+XD2Ire6M3S07RKT82YL+K7n8inWHH9QlgZtBgMSNrKcnDvDQn+RmfYsX9zzPE9OHGSyvhV8\nMr5PDfWPsiXbRPLfa52ytBTfgebbVV05ICos9ixnb/b47tsdjk4rN+YrqMTFkr3Z85T+X1Eki87j\nZT9FgZGKcnAq4tCOCpvqmVXpR7lupgArQxDLlcJHj7Nu6VWpcGBsH89ue5zp2fN8uHiWTCRFi5PT\ndSNaDcH8aHr0ON+6zNUEsMMAACAASURBVMKlLtfas1xeuMnXH/kcE/UxIh+wKkId7jc4AV0PQglL\nChZ9+zTN1mjHSUqKMtPqcOJKwhsnU946rcx2jZt2r6Gpa7lRu5VsPShhaizisamY3ROVNTLStUsQ\nyxWiXP4mKEYNE7Uxnt3+OOfmznF64TyJKU7CCHyzYCn8fmsEl1ViUFFu9mZ4+0aHG+1bGIEXpg6z\nZ9NO6lEjn8YNvPe1lTVzH/gLnJo85xCc1rTTNufmLvL25Yu8dTrm9PmdzHe3lS4GgxHn1bXU3FGl\npIUz2VoemzIc2lllSz3CBEvyIwliuUK4KpnsL3dQxmo4vHUfnQO/wXs3T3Nq/jxp7L1jVok1a2Cx\nasO+B+7Ei9QiYunZBT6YP8n/fvQaBy/s4ZnJJ/js9id4bGwP2/3UPFuV0pRTjdY4rr+jxarmQRlV\nSyvtcmH+Oj88+wveu3GEM7evMN/eTFx9imrtS5j+FKqN1R7+AIUDyCCkYC3bmylffLzGoZ213Ksa\n9PLeBLFcMfKVb1x4QNzJWJE4n47fbs9yI5kjjTK/0lqmFMjx5uLtZJZ3Zv7/9s41Ro7suu+/c6uq\nu2d63g9yniRn+N7lrvalfWjXq40etqMISgIoiAIj0QcFApJ8cJAPjoQAAQLki/MhtgMEcQQrgRMk\nkRPFjgUFgWNbEhInzkr70Eq73AeXS+7yPZwZ9rx6+lV18uHe6q6eGZJDLnemZ3l/xLCrq6urTlVX\n//vee849p8yltXnOlS7x0PBhnp9+gv09o+RMDpxg7h0EFeM+i4RKUuPq6hxvLbzHz+bP8n8uv8Ji\ndcHWhM+XaQQVTJzDVB5E6lNI3IPo7s/WAeuSSkuXKBAGDWbHhMNjEblQUfbK4Mju4cVyh8j6lkkT\n6roSE/1hL4/ve5Cry9dYm3+LchKT1vtOiz900o0sqUfYOaHSPO2C0KDBlfVr3KiU+GD5EqXqGieH\nZzg8NM3+nmG6pABEqGa6tJnxzJuPaGr745bTPzNrZGO4VdZdldku203euA97ltSJubB8mTML73N6\n8Sxv3TjL+eWL3GgsI8Y4w2M0XKDe8zImXMVU1girM9AYQpLI7kltOFZajnjzkTOxVnfRxGuesW5x\nDSUBVYwKDY3ZN5hwYipkpC9NzrLp1D0b8GK5w2SDMdIxvVDggZFZLq9c5Vp5nvfKl0gC5+lRzXxv\nOuRudn6LloffYjNtK4mBCjXOr11m7XyFM6XzHF2Y5tjAFLODBzk4eJBIomxATWu/7iufmUDSihBw\nr6azS7I1ztMNt5ba9GfHGq2bXsu+2/48JSjVpMp8eZH3Fy/y0tXTvLP0AWdXLlKqlUBiAqQl+u6w\ncXSdxDQIgjKYVUzlOEFtMjOckjVA3HVsxbU2139I31BaxtjuwwplOn7aE8UcnzA8OJ0jMvZ4HXJn\ndTReLHcMad78LYlwKxT294zw6MQDXC7PUbq4wmJ9iWZ1AdXOiuTOKJlo61wErOceMKoYSbgez7Ow\nUOLs8gecXtjPbO80D489yMHBcfZ3D9OXKxJq4BxZ7RGeWX90WytMN0YMtl5r+zHKquKmwd/N70aV\nhiTMV5e5vHydC0tX+GDpEu8vX+T0/LuU4hXqJkYC5x3fqgyjxGiwSFyooMESQbCMEGHqA6CRszHB\n9i4Cl8atYesvJXZdOsJ4Nyg2lELUZM5ZEU3luMHkUMJDUzkOjeaQ5j/P7fBiuQtsimIUJVTDyaFZ\noiMhBYn40cX/x9XGjdY7OicKhVtFC6bfz3TqZiJKzdS4GpeYLy3z2vV3+MHFFxnrHWWqZ5yp3v1M\nFEc5NniQfcVRevI9hK7Eb1Y6065667F1xKa0SHvhtmx6MVHZsN6OQ5Yb69xYX+La6gJXyotcKc/z\n9uI5rqxcY668yGpjnSRQhBiRGJu0XogRRMzWQiMxSbCEBqtodAXCOYL1B5HKUUxjCFWT7oHU+WW9\n7aapkXfzUbciDRSVhhtLtvsGA0mD6VHlhVN5njnWTX9XiIpiNEGa91jH3GQdhxfLDkEQIkIO9U/x\n9OSjXFtf4NqVHzdzRnaUVt4CBRIDqLhJgEqgALEVuhBKyRKLN5Y4s3SBvlwfY8URjvRPsb9rlJGu\nIYa6Bhjs7mO8d4Ri1EWOoClKIqkoNLNv0iaY7v/EKbZCUyhrxKzVKyxVVymtL1NaX+bS6jWuVxaZ\nKy8yt77ItfIC8+UFlAZGgDA9SqtOji3rIRi1xd62HjsN7LUIVom73kDNGoFZh8pJpD7i5vvXQY0r\nJWHn4DcF/06LhzeviQtXEucgVGmuH+iKOTUdcOpgxFCPIWhdrj1zf+0mXiw7BbE3dHdY4NjIYZ6u\n3ODni2eYr5RAjCs7sDdo9tLVBqq4EkKkc+MBJBTq1JivL7CwsMDZxXMM5nsZ6R5ksNDPQFcf+7tG\n6It66MkV6cl10xMV6c0VyYUR+TAiMgGhBATGtooSTagnMbWkQblWpRrXqTRqrDfKrNXXKFWWWa2X\nWa6XKdVWKFWWubxynVJlidV6mYZJbLKLUNMy2lYMJa1ThP2cVAiTzLne9CrYSIEkWEEL74GpoMEq\nwfqDaHXG/aq0WtBtAw53pVzZ1qGgmrhhEkW0zpF9CY8ezDE1FGI2jYenEwY8N8OLZUeQDmbaL+lg\nvp9H9j/AqaGjvHztNKvx+p65iwXFpAmLNnTp0g60LfmbtLrDAVSlwtX6OtduXMUkkDM5BgoDdOd6\n6I66KeaL9ITd9IZFckFIZEIiY2zCZLFVCRNVYk2oxzGVRo163KAa11hLyqzWV1mqLlOuVyjXy6zX\n12loTBIaK4ARrjtKq1UntjRE05EltpUaKASJi+u2p7QFzqmktsaRBKuQP4sGK6hZI9Q8Uh9Bkrxt\nXTYdP5nQqjv+zLNj2+ImNSQYbVCMajwyk+PEZEQxyrRa0+qN4kcub4cXy47CtkYCYLI4ynPTn2Sp\nusbpG2eoSt3Vlk63TNshNx8/3BXaxs1cRsy2rmorX6aVmkw/UG1cYxwKFRKuVBfRyqLtViYQIORN\nRBgEBEFIIAYjxs2/tt3POEmIk5hao06cNOxzSYiNIsbYbryo9aOk8qBZ29IOqa2yKK6JabIeJdIM\nPmlT7BZXX0BVUYnRoIrKJZQGknRhKscwtQNIow9QRBLIZJpqHW9jX3+L47mWYuv1NB9pg8jUODCq\nPDyTY6Q3cBNR7b0kze5+x9xBHYsXy47DdshyEvD8gU/Smyvy38/+kFfnTrOWVF24TIKRVkKHTmoR\nqGwIKUp7lc0QQLUJQqzrHCGwi9oSqPRt4pI/iNj+sIoNSXJDoJn9b3EFUscygLrxuaxNslGQWka2\nTS91OmUy9qfjsuk53czRZcdL02a2NSqRBI2uo70/xBTeIVh/kKByElMbQ5ICkil3bMcEbOvbXlHj\nWqGbD6rOfm2GJymGhJxUOT4Jn3+sm2MTXUTpfpq9mc66fzoZL5YdQ6srDoAqxbDAgyNHWK2tsV6t\n8MrCWwhJaxvIZCrqjBu+lVWItuG3rAC1Xsu0NyWNN9wwNrvpy7zhqy2bFhy6YZuNQwLaPG5rnWul\nbxoe3jw/vyWUtxu1zHSL05QiEpMEy6ipg1mHYAk1DxKsn0RJA9izUQA21EhvN1FUW24uAE3qjA0r\nj8/mOTVZIEI2zU+XtskBnlvhxbLTyHwJA4TBQi+P7j/JeqPKB6uXma8uuLEycWOcnXer386Sm71+\n6/fd6fnd+d62fsfWvu7t2rBBmtxjgpoEdJ04dxHMKmpWES0itXEk6XJinLjmqQv9cQ4j2cJTnnr8\n0/tBgZFe5ZHZHI/NdjHWF5E6kjaa1Dl3Tmezlybq3kfY29cghBjGiqM8Of4Qnxg5xkDU48bAsonC\n/O2+11DsuGcideLwOnHX6zSKLxIX3iEJSqQhRKKBnV+uih172DxvCdLhDzvma8PQ6pw6IDx1rIuD\nIxE52e1CFnsf37LsEKRtSbFSaYOFBZjs2c8Lh56mHFf56eJbrMSrdts0UYP/Juwh0jFDm608EUVl\nGe36KcasEJg1qB7G1AeRpAvSqAFNu/Jbf9hWRgXRBuMDMb9wosCx8TyFyPiu9j3Ai2UH0D7SlF1j\nmt2kEOGZqScY7h5m4vz/5c8vvcKVtTkabnac/xLsJZwXTBKa45Hi5pWHSyT58yTVGYL1BwjXHrGC\nqREZj9UW+1QMMYGpcWQs4fMPF3j6SIF8lCZk8W6cD4sXy05DyHSvLWkChC4ijg0cQA8kaAx/fvkV\nLtautrZthha5LldblS1P52DzgNpPKWhFg0uCUiWOrtsyyKaMibsx1RkkHmy+VzfOH3ce+4AG00MJ\nz5zI8chsjlzo4gs0c0/5e+Gu8WLZgWxsaaZ+URFDd9DFsaEZYrVB2Msf/IiVpAzGJdhVbUacNGeF\n6JZz8jy7hpMubTlu0GzWIUjMGo38e9CTIzDrBNVjSNyPaECiQXNGVDPISGMmBxOeOZrjqcNdjPdH\n2bgE//HfA7xYdghbC2T7i+qCr4tBgePDB1ESPli+yNuls6wm69ZTimZiGnfMfM8dYAPo3ZMtJ2Un\ntpUZ1Gl0nbGtzGCZoHoU6uNInLPpKV24lSpMDiQ8eTTkU8cKHBiKyDVDrnz3+17hxbKDuNVNnXo7\nRe1skh7Jc2LoEJ+bfQ5zzvBW6T2W4xU3K8O46opJWyiSp1PITC2UpLnO4rzeEtt2Y7AMhXfBLNuU\nb5UHiconECIShZiEYgRPHQ149mSOmX058sGG4/iP/p7gxbLjyUxjywRgBxrQZ4r80sxzHB44wP++\n+BP+7NJLnC9fpkEDk6Qxd4JPu7WHSOMq3TimSkwclEnyF4ijOUz+bTCfJqgcJagPMNUX8tSRkL/6\ndBcjvaEbzWwvlOa5N3ix3DO0/JkqAsbOie7GcHzgAJExRFGe/3XpJ5y58a7tubuKhF4q9xianY7Y\nsF1tSdCgSpK/QqI/Jh+sMhGd4ukDkzx/osBIT+DGKF0mI/8Dec/ZtliKSAC8BFxS1S+KyAzwHWAY\neBn4m6paE5E88O+Ax4EF4K+r6vl7bvl9RCtGLk1OAbZGj+2W50zEzMA0QZijJyzw++srXC7P05CN\ne/B0PAI2+DwVzFY33QadJ4T5D5jo7+FT+w/zCwfyHBrNYZN9iEsU4oXyo+BOZvD8KvBm5vmvA7+h\nqkeAG8DX3PqvATfc+t9w23nukswscJt/UDPzp0VTtzeRhBzqmeDTE4/zwvTTHOybIjIBepMZH57O\nRomxopmmtAOICZKEowP7+ezRGV44McLsWEQUqUvH5ifkfZRs6+qKyBTwl4Dfcc8F+AzwXbfJ7wJ/\nxS3/Zfcc9/pnxf/UfQhsbZq08mEaQ2nTKjQ3wQCRGMaL+/jM7LN8buZZTg7M0GMKkLjcirSKVtmE\nO2neosRlx2mt2ZCRzHMPSVNjZK+4HV9OnxkEgwFMEpLW5YkSmO4e5pdnfoG/cOApDg9OkQ9DAoNN\n5us/sY+U7XbDfxP4NaDXPR8GSqracM8vApNueRK4AKCqDRFZctvPZ3coIl8Hvg5w4MCBu7X/vqAl\nkdl5GO2P6e9RIobD/QfoCvIM5Xv4swsvcXrxLAu1ZdRoMzNZMzEDavMbChv26/lIyTq/aSW/UDGo\nSzAsJHasUgMK5DjYM8rTE5/g+emnGCsOEzYzU3qP905wW7EUkS8Cc6r6soi8cK8OrKrfAr4F8MQT\nT/ifxJvQDEiHLb3assGBY1TJEzLTM8lEcR8PjZ7klaun+YMzf8KFtUtUtOJ2kyBiZ48k0MxqblQJ\nE5ezUVrrPfcOUSVQbWaPSsQ0P1slQSRGXUfcaMjBrv18ct8pnp1+hAdGjzCSH2z+aPpPZ+fYTsvy\nWeBLIvIFoAD0Ab8FDIhI6FqXU8Alt/0lYBq4KCIh0I919Hh2mJxEHOydpBgVqTWq/PjKa7xdOseN\nhi2za7vehgRDWhfREG/IaOS596iLr7RZgtQ5c4wmmLTYmIBoxAMDR3lm7GGemjjFocFpimHXbht/\n33LbMUtV/aaqTqnqIeArwA9U9VeAHwJfdpt9FfhDt/w99xz3+g/UB33tKCLS/Asx7CsM8plDT/Ol\no5/j0xNPMtM1TT6xSWYhRomxtVps+t26ERrG1o7x3HtUhIa42jyo+9GKgQZCgiRCF0WO9s3wpcPP\n84szz3BieJbesJsgnUvu2XE+TJzlPwS+IyL/FHgV+LZb/23g34vIu8AiVmA9u4AtyWDbjKPdozwx\n0cto1xAT3aP8ZO7nvLb4JjXqYBJQJVA746MhPnv2R4/gqmWQkBCniX0Tw3A0zLHBWT45eYpnpx9l\nOD/o3D1pAJFd8uwsdySWqvoj4Edu+T3gyS22qQB/7R7Y5rkLsoEHmvGwCgG9YTcnhw8zWOhjf/8o\n5qzh3dIH3Kgvk5gEJEYSQ5AY36r8SBGCRDCqzoljR6WLWmS6d4yHR0/yxPgpjo8cYjA/iDQrCPkE\nvruJn8HzMcZGZIaAErpRydAYpnr3M9G7j+ODs7w+d4ZXr7/J6wvvcGX9GjVTs04Gl4RBbYxRxlnu\n2jWaDZbGpZZrT2H8sWeLYmctz7Ydh5TUq+0mFKB2eEQloeEKjOUkz3T3GL80/RyPjZ1kdnCK3qjb\nBpmn8/ztQdz/98XV7Ti8WH6MEdedbpWiTduY9u9g/yT9hT4mBvZzoH+c1+ff4ZVrb7Aal9Gg9eVv\nNVYzSb8k3V9r/f3EzWvEZetTuvRrZMqApF5vhUADRgpDHB08xEMjx/nMgacYL46Ql3BTiJgV2o/w\nhDy3xYvlx5x0pKst9EjTr7KhP9/LqdFjjBdHOdI/TX9Y5M3Fc1xav0Y5qWCMfUMiWX0UklSIbZPy\nvm3rNPVrqx8UTVyL26C0fnw0UYZzA8z2TXFq+Ain9h3j8OBB9nWPEGJcriCXSMPtq00679eLvct4\nsfwY0x5nLu0vKG7qpBBIyL7uYYYKvfTlujkyf5Y3Smd5Z/Ecc+V51rUGrmWUVnNJXCkEo67Dqa5O\njMjGo3+MaSXfRdO527Zykg3Iit3s7sDOylEoSMD+niGemXicU0NHODF8iNHiMJHJu2TADqeQbVfx\nfrikHYwXy/sW+0U3mmqnISd5Hth3jMn+cY4tH+L0/LucufE+r1x9m8XaEg2po2JF0QAkZITy/uwl\ntlp99npqmlotbY0rkChdkmN/9zBHBqc5OTTDp6aeYLw4SiHI2znd6kqRuaakts3W8nQCXizvY2JR\nRGzIkK3fYzAYhvNDDI0M8InRk5SqK7x65TTvLL7PmaX3Obd8kYXKInVqqCTEkqbbFIwGrnV0f/TL\nFbFtR2nN9A4UTCIkKoQSMZQf4InRBzg8cICjwzMc6p9kuDBAJGFzdNO6gpSEhMTg5oXfBxdwj+HF\n8j6mOT05TZ0oaWUXwAnnUH6Q56Yf5/jwQc6VLvJu6QPOLV3i59ffYaFyg6rUSdxdZB1J90f7Umid\nbyyQuJIf2hC6TTdjvcMc7Jtgtm+KJ8c+wWT/OH2FPgICsrP801Zp1vnj6Uy8WN7HmGYyDZrKKc1S\ngK3sRF0mz4GeccZ7RjgxMsPFpWvM9E5xfvkSH6xe5XJ5jpXGmi1jIerUd2vRvNU0yva1N3v/zQWl\nLTfFNo+48SjbsSEl9XWTWM92T1jk+OgM073jHOobZ3ZgkgP94+zvHkXUkLguehqUlRaXsGOdree+\n+92ZeLG8T2lmXceNWaZiuaW4CEhADsNY1yj7ukaY7p/g4tJV3l+6zPurV7iweoW3F95jqbpCQxvN\n3LUJzUFR+59sPkLWlmxyOMk8bmeu+s2kTTL73CyQG+QxY2e6QtOBRNLWpI2f1AS6wy6GuweZ6Blj\numeMh4aOcWBggrHeEXqjIkHGs50K5KajSmufXio7Fy+W9yupR7wZ6tISk80e2EyYEDa74mjXMMNd\ngxwbneF6eZELS1d4o3+ay6tzXCvPM7e2QKmywho1CGyLVVIPvOBUKePuVTJTnjcIeeb/rSSxJWXQ\ntsNNJ9w6y6Ymuv2mPxhNWW46rdyjCBorJhEKJs9goZfZgUnGiqNMFPcx1TPOdP8E033jRBJixDTF\nL/0BaIYEbbJ9i2B+r5gdhxfL+xnJPmwILdrctmyimmDnBhn6TJG+YpGZ4iTPTD7CamOd+fINrq7O\nc31tkbNLF5grL7JQvcFiuUSpssx6bNPEiUmly5Z+zUpimt44SWfC4LzwmjRFNSvvm1qemQFZlUyb\nzoU42RR1mazzLr4RbJnaJIEAQ3dUYCDfy1TfOCOFQfYVhthXHGJf9zCHh6YZyPeQkxyB2oS9kgaj\nagJiMtd2G0MPXiA7Gi+WnruiFRGYtjoDQgL6oxx9/X3M9k9TS+pcWb7K3OoC1ys3uL5+g+vVEhfX\n5liuLLNSWWWlskq5sU4dcK7kNKLbxYHaVGappiW0GqXZFmj2MX3SbLFpkrFXXcxoQqJqk3YmhkAN\neclRzHczWhymp9BLf66H4UIfo/l+DvaOM9o9xEhxiL58L5GJkOaUxmxAZMYKzXTnPXseL5aeu0Dc\nrBTaGk2t9p1tZXWbkNmBgxwamKauDdZqZRbWS1xcuUJpfYVSdYVSdZVSfZX5SonV+hpr9TXW6+tU\nqhUq9QqJumzhYuMWbeHDjQKUMSLtX6udd23rFlkNDlQITEgURhSiAl35Ar35fophkd6oSE/UTX+u\nyFhxmP58LwP5Pka6+xnq6qMnKrqAHtM8U1umg1ZwZNMWtlj27HW8WHruinavdDqTJTv6mfp1bQhS\nTkJy+TyD+X4OD0xR1wbleoVSZZVSdZX58g1W6+us1ddZq61TrpVZra9RSar2r1GlmtSoJXWSJCHW\nxGZV0lYAt6GVxzMwhsAEhEFAzkQUTJ5iUKRgchSCPF25AsVcN335XopRN71RN/35Iv2FIsOFfiIi\new62XOKGuEdpnu9GObQzmLyT5uOIF0vPXbDlKGEbm2VTSUutCUJeQvK5PIO5fkBJsHUo60mD9XqF\ntWqZpeoq60mNSmzFshbXqCd14iSxLU6lKZhkhNI0xdIQupZk3uTpCbopBDm6ogI9+S66oy4KJmpz\nr4iAaMbts+UJZrzzkvXcZ0OmvFx+3PBi6bkLtvY042bzZN0ZqU/ZJiJuCVvb+2iNgeZMSDGfZyTf\nv8UxbmZNu/f81lvbLdsc1Ru3bhNA6723mtzyZptMuzIbjNTub/eC+XHCi6XnztmywbVF/KCLsWzr\nlGaDOm+z8zvtzG41krmVrt9s1rWSDoemMY8ZwcucR5q6siXMrRalT8/78cWLpeeOuaWIbYiF2SKC\ncBsNrg8nkrd78WbhOpvXb2n9TQ641Y+F5+PEbQuWeTwej8eLpcfj8WyLjumG70a1XPEBwx6PZ5v4\nlqXH4/Fsg45oWcZxTKlU2rHjBUFAPp8nn8/v2DE9Hs/epiPEstFoNMXS2ApZJEnSfF1EMMagqm3r\nb4c0K+m1d/FzuRwi4sXS4/Fsm44QS1WlUqkQhiH5fJ4gCKhWq9TrdQCiKCIIbKH5JEloNBpt790o\nsM0pb+496faq2hTO7D48Ho/ndnSEWIIVxEKhQBRFzXWqShAETbEUEaIootFoEIZhU/BSsQQrmGEY\nNkUx3S4IAiqVCuvr696x4/F47piOEcu0NQitbnO6TkSarUJjDGEYksvlAJqtR7DCWK/XCcOw2QIN\nw7A1Zzgjql4wPR7PndAxYhnHMfV6HWMMxhjiOCaOY5IkwRjTbF2m6xqNRnM5Fb4kSYjjuG18s1ar\nbep6Z7vjHo/Hsx06TizTrnetViOOY4BmyxJagpeKaRzHbV33MAwxxlCtVqlWq00hNcbQaDS8SHo8\nnruiY8QSrBBu5XipVqvUajXAtgpFhGq12ny9Xq83u9q5XI4kSajX622tSo/H4/kwSCeIiYisAG/v\nth13wQgwv9tG3CHe5p1jL9q9F22GD2f3QVUdvd1GndKyfFtVn9htI+4UEXlpr9ntbd459qLde9Fm\n2Bm7/XRHj8fj2QZeLD0ej2cbdIpYfmu3DbhL9qLd3uadYy/avRdthh2wuyMcPB6Px9PpdErL0uPx\neDqaXRdLEfllEXlbRN4VkW/stj0pIvJvRGRORF7PrBsSkT8WkTPucdCtFxH5F+4cfiYij+2SzdMi\n8kMROS0ib4jIr+4Ruwsi8mMRec3Z/U/c+hkRedHZ93siknPr8+75u+71Q7tht7MlEJFXReT7e8jm\n8yLycxH5qYi85NZ1+j0yICLfFZG3RORNEXlmx21Op/7txh8QAGeBWSAHvAY8sJs2ZWx7HngMeD2z\n7p8B33DL3wB+3S1/Afgf2HpVTwMv7pLN48BjbrkXeAd4YA/YLUCPW46AF509/xn4ilv/28Dfcct/\nF/htt/wV4Pd28T75B8B/BL7vnu8Fm88DIxvWdfo98rvA33bLOWBgp23elQ8rcwGeAf4o8/ybwDd3\n06YN9h3aIJZvA+NueRwbHwrwr4G/sdV2u2z/HwKf30t2A93AK8BT2CDjcOO9AvwR8IxbDt12sgu2\nTgF/CnwG+L77cna0ze74W4llx94jQD9wbuP12mmbd7sbPglcyDy/6NZ1KvtV9Ypbvgrsd8sddx6u\nm/cotpXW8Xa77uxPgTngj7E9jpKqpvNfs7Y17XavLwHDO2sxAL8J/BqQZqQepvNtBlvk/H+KyMsi\n8nW3rpPvkRngOvBv3ZDH74hIkR22ebfFcs+i9ierI0MJRKQH+K/A31fV5exrnWq3qsaq+gi2tfYk\ncGKXTbolIvJFYE5VX95tW+6C51T1MeAvAn9PRJ7PvtiB90iIHRL7V6r6KLCG7XY32Qmbd1ssLwHT\nmedTbl2nck1ExgHc45xb3zHnISIRVij/g6r+vlvd8XanqGoJ+CG2CzsgIumU3KxtTbvd6/3Awg6b\n+izwJRE5D3wH2xX/LTrbZgBU9ZJ7nAP+APvj1Mn3yEXgoqq+6J5/FyueO2rzbovlT4CjzoOYww58\nf2+XbboV3wO+zWffEQAAATdJREFU6pa/ih0TTNf/LeeFexpYynQPdgwREeDbwJuq+s8zL3W63aMi\nMuCWu7DjrG9iRfPLbrONdqfn82XgB65lsWOo6jdVdUpVD2Hv2x+o6q/QwTYDiEhRRHrTZeAXgdfp\n4HtEVa8CF0TkuFv1WeD0jtu8GwPMGwZpv4D12p4F/tFu25Ox6z8BV4A69pfta9gxpj8FzgB/Agy5\nbQX4l+4cfg48sUs2P4ftivwM+Kn7+8IesPth4FVn9+vAP3brZ4EfA+8C/wXIu/UF9/xd9/rsLt8r\nL9Dyhne0zc6+19zfG+l3bg/cI48AL7l75L8Bgztts5/B4/F4PNtgt7vhHo/HsyfwYunxeDzbwIul\nx+PxbAMvlh6Px7MNvFh6PB7PNvBi6fF4PNvAi6XH4/FsAy+WHo/Hsw3+P5cgPlrAb514AAAAAElF\nTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "Mp6cU7I0-r2h", - "colab_type": "text" - }, - "source": [ - "## Rotate\n", - "This operation rotates the given image by the angle (in radians) input by the user. " - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "9kxUES9sM8Jl", - "colab_type": "code", - "outputId": "f79b075f-a204-45f3-c5fe-e80cd6ae20ee", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 269 - } - }, - "source": [ - "rotate = tfa.image.rotate(google_img, np.pi/4)\n", - "_ = plt.imshow(rotate)" - ], - "execution_count": 7, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAUsAAAD8CAYAAAD+D4bnAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsvXucZGlZ5/l93vc9JyLyUre+0VY3\n0CB3Re1GYWFUVGBoUWAdXGXGEXWUGWVGHEdXUGdEZgcb5aO7+tnVxc+osJ+VcRkZUAQBERWUbrlI\nI01r3+iG5tLdVHdV3iLOOe/7PPvHeyIyKquqK6sqMyMiK779yc7IqMiME3He+J3nfa5iZsyZM2fO\nnIfHTfoA5syZM2cWmIvlnDlz5myDuVjOmTNnzjaYi+WcOXPmbIO5WM6ZM2fONpiL5Zw5c+Zsg10R\nSxF5voj8o4jcISKv2o3nmDNnzpy9RHY6z1JEPHAb8FzgXuAjwEvN7NM7+kRz5syZs4fshmX5DcAd\nZnaXmdXAfwNetAvPM2fOnDl7RtiFv3kU+NzYz/cCT3+4XxCReRnRFHDddddN+hDmzBgf+9jHJn0I\nO8GXzeyysz1oN8RyW4jIy4GXT+r555zMvOx1zvkiIpM+hAvlnu08aDfE8vPA1WM/X9XedxJm9kbg\njTC3LCfNXCjnXAhmth8E86zshs/yI8DjROQaESmB7wX+aBeeZ84OMBfKOTvBxbCOdtyyNLMoIv8W\neA/ggd8xs1t2+nnmXDgXwwKfs3fsdwtzx1OHzusg5tvwPWcazvvZMGD/fvT2LzMomB8zs6ed7UHz\nCp6LkFkQyiGzc6RzhszS+joX5mJ5kTFLC1nar7lizh6ztM62y1wsLyJmZgHbSd+yYs7Ioc/ZZGbW\n2zaZi+VFwsws3NZRaYAyF8xZZ2bW3TaYi+VFwEQXrDXbelhE842RUCra3mvtV96Ta/s1Z1bYL4I5\nF8t9ziQXapa6gnS2BxqMC6C2tx0O18rm8GHWiqXNBXOm2A+CORfLfcxkLUrQdnnlo3h4C9MTwPJj\nHQ7B8CiqDU1dAW3AxwJooGa+K581Zl0w52K5T5nowmz3zoqCRYSKSKQinvLQbHVGhIiJtnajQl1D\nMrx4uiGgCpiS2hQ+j0MSc8WcMWZZMCfWSGPO7jHZrTcEybcKDETw1gFRwGV19JAVNWEUmAQUxaHZ\nqrT2sWYwWAcHftCHbonrLACdbLV65mI5g8xqpc9cLPcZOy6UyljC41mem0g2KaUVRI+JyzppDqka\n6NUQHwJ3GZx4kCBd7nv96/j033yAS6/9WvRvb6TsOO4bCE9c6OFswEOxYMEJYfkQV/7+75GWHkED\nlArJgbcGpNjZ1z1nV5lFwZyL5T5iVyxKhSr0KekgD+e1MYXWQjQ/fFyF9lfwYRFCjb/5I/Dkx3Pn\n//FLpA9+irsfup9rv+IrCP3EVwv4T32S2CtRTVx+5BLY2KDxJcseDi4uEDcGUCyg5uhIPrbkwM+F\nciaZNcGci+U+YaeFcvjXxEc6BPK2mNNamKkB5wAPZXR5y/3gMfxShL/8EP/4zj/jS5/4KJeY49Eu\ncHCpx2IlXNE7TF31UU2YOBoVREpKZ6T+BuYUE8V5eGj9GAtVB2KDdIxgAgrljr7qOXvNLAnmXCz3\nAbshlA1KqUMLcdxya/2JLQnwBVCtQiOwXnHz//Ic7jnesNCBJy0uc4k5rugW9AaK9jxdb1S+Rh00\nlhCp8c5T4rO/0gLJG+oUp1AoaK2syzpHRIk2IIqjWxbMY5Szz6wI5lwsZ5yd33rneLQnZCtS3chn\naVrRT30W/KH23xp8fx02VuFDH+SvXvc6rjzU5RHBuOISz1odKWslaqReEDYWld5Sh8H6CtFFJHi8\nOryViBNqdXhxCOAsx3cKVTweCOiBI8AS3dQhBZgL5f5hFgRzLpYzzK4IpSkiASNvgcUbgyiUjeG6\nHRYMaGq48y7e/QMv4ZGdDpeWDXWZePzBg3SSQ9XRGCyKUeLR4Omb5h38Q+t0ARMPtWMoeNFBCgkM\nfJtv7k0JCl6Ehd4SxwoH9RppYTHHj+bsK6ZdMOdiOaPsjkWZSFJgKIKSu4wWEMA5geoB+OuP896f\n+nG+crnHdZd0kZV1iAUFy3SiJ6YK60Z6FMSYqL3gzBHUUaS8pW68EsWBOMQgicObUibHsLNpcjml\nPToIztE0DYupgINX4C2CbArtnP3DNAvmXCxnkF2JelsWHxPwKJJaMRKlW21w47/5QfqfuZMndXpc\nd+lhmqaBtYg0HZpeSXKKtwYrGiwZiYS6bP+pKEkc2kbJ10qALJ7DkkZgJJTOHGY50o1TBloTipJq\nbQNWHoDlJUwc0Jk3B96HTKtgzsVyxti1qHd7I0iTKxPFQdPn5p//CQ597GYe6Wu63pFSgwLee/Ae\nLcFcxAHeHAMnBPMkO9nqC5oFswoQUhblMjmSnFrjraKI5eh7av2lVd1HFpah48H1SOQt+nZzQOfM\nFtMomHOxnCF2Y+sdDQpxgIJroAb6fVhb5bZ//jwWO0t0UAocSbIAqiqqSggBsSx+3tyoFFHs9Ntj\nbzpSZ2+KPexnQfFtfXkpiVAEym45Vv3DmNJf0JswZ0qZNsGci+WMsONCaWASCRJITcRrH+64ndt+\n9Ic5uLiIa9ZYZAGvJd6nUecfAOcczm0KopgbJRQV8ex+RAcj4X04vDpYLKnrmqSKkwKkzfmELJIu\nMiDRmW/J9yXTJJhzsZwBdsdHCZoUbw4f+/zxdz2Pr1xf57AVLGCsNYLgcSS22z9yu03Ttvs4Z6Ab\nA5rYp+iU2EMncmJnaNu2aQBzFH4/GZeK4fbR67lwpkUw5+HEKWd3mmIo5hp8fwDHv8wd138918bI\nI73DgiN6oyg9RaBtarH3OHM0Pm/VQ1lkazb47MeMEYdvG1y6nBO6jxCaeb/OLUxDt6L9tcr2Gbvh\no8x/MSFNnw//9E/y2H/8KMtlD4nQNwddpV8NKHRymYzOHCo5uCNo/rkR1hYCB53iXPtKguwjk1JH\nGQmIawWTh6/Hv8iYtIU5PxMXEYZDtI8cf5B7fvpnecLtn8CHRNCanjmci5AKgk1QKNvvyWnb1k1Z\nKjrEuiYeXwPpIBrxVPtIKCGhmFSjGRpGQRbQSR/ZdDFJC3NuWU4hu7IgBhWy8WVuefELOZqE5cIz\n6IBYB7EuyUe8LuBtM41oLzGB6JWQ2tlkoogprkloalheOsiXOyWsP0g6dHR/VfAYeAvZSSsRLCDk\nIJiTsfOxjy4OF8KkLMy5ZTll7IZQJoAP/wV3vvQlXBmEplSqQlBJOQndaHMWdWI+SjFww4ygViBM\nwIkQNVHXNRprkN5ZBlTMDgaoRnLaFvm7pNE/OhwNDSY6F8otTMLCnFuWU8TON+6NoMYdP/x9HLz3\ncyw6paKG0CXnMTpCs4iTirbj2UTx6rJmmGPoMtUAqTEgcTABRaCr/VZcepM61B1BTfESxt77DhaP\nEwKgnXwBEahxCH3KGX+9O81eW5hzy3JK2HGhbBrY2OBDz3smh++9B9c0CBHxeYvXSYpPHZBqqobL\nDo/Dq8uiiSMFRx0jG5LAKnAFkc7MuvMMSKY5MUtgAKxqLpxac4dotEalDwZBfduzs0dNNY+Sb2Ev\nLcy5WO5HBn1YOcHNL/4WnuiV1ERSoYgZ3hy9xhF9mtiW+2yoKK51DTjL9eniEpdLCfUw/d0hEc4+\nZ3f6UBgJZU3+esF/uo3rf/bvMQdrbpkkQi1NjownpQA8HdIo73U6z90k2CvBnG/DJ8yOn+j+Ou/4\nJ9fy1Zf1uNQCSQvK0kjUiHYxhI2yoowdVGBaP3RZMB0ntOZIt6AJXaqNPp1uJ1ucorl+3cEGfRZm\nZIs6blGuAr/2prt4760VvcNX4VLge1/zRb782Vv4/pc+h3/3nIoGxXmHb90mSEFEc7MTYG7vZPZi\nSz5/pyfIjgvl+gq883/w9UcPItHhQoeOE9RqREsMiKGiExml5UwzzkDw1HWNiFAUBYw+EDnoYdLQ\nYTZm8GwVyl9981285x+yUIoKMRkbZZfDj/sG3vqX9/MvfvmzJAPTRNSESc6TDepIOOwcqqsuBnbb\nwpyL5YQ41xN71kevH2f19a/lvje/kSIVeEo6TaJhgIlhosRQU0SHiZuJj5gKLHqBGPHasOYacImU\najyuDR4XU1/Bs9VHWZOF8n2frugdPApJMQ1IESkIqHk6i0vcV13Gn96cMPEE5xEaUnvmPA6haH2Y\ns3A294bdFMy5WE6A8zmhwqZgqm75cKw8yG2v/DdsfOTDNP0B1UafMjYkVwOGmEdFR00uZuOjlcsd\nVRSKDnWTiGUHmoQP2UrOjTQmfJhnwWij3mNCWQHvvbWie+goooJpgNAw9MVCu60sSt7w+5/iV97+\neSoc1ngCFeZAURIgBOY+zJPZLcGc8qU2Z8j46XfObS6IB+/nEz/0z1n6zG3EusJjBAEfWo+WCc4U\nr9NtfY3jzGGieWAZirfsr+u2PjvSBtgA88xE/uFWobz+VbfSO3gUUcPUIQWY5Y7y4343M6Nz5Am8\n/xM1L/mFD1EXAVKBtOJLK5jYXDC3shuCORfLPWbbJ9FOc9MMS7kEThBSSnz4pS/m6PH7KZwnYBAb\n/Kh9mkPFITYbA2OH/S1VlORygrwYdKSg21lgo66hexBcSZDu1OtkIluV40L5/FffSufwNbgkkEqk\nUMwsR/1bhoI5Es7uFfSLx/Dsn/wgq64dJMeYYApjgjlnyE4L5lws9wgzO7eTJ6My4c27RPIHyAzu\n+Qx/8y3P4AmxnxtNpISYI/hNYbS2YW+agWCOSa4Ht7HO6WLQDCqqqkJV2XDA8WOk8Z6WU0qy3LwY\ncawCN7zpLp7/6mxR+thgGtBQIyonCeWQrZFd8wdYPPIU/tkNx1gl56CaRjyu7Q9KK5hzxtlJwZzu\nFbdP2JETNrQqAT53Nx/8gZfy1aXH8JgZ3hWbHzBzWHtq/XRr5Ihhx/Vhl3UxhzpH0e2QMJpYsdzp\nQVngUwPEyR7wGThTMOe9t1YsHjiKU8PMoaFuR/xu9yPoMF8S1fP8n7uTFQFxAbM0srDTFKeCTZKd\nEsyznikR+R0RuV9EPjV23xEReZ+I3N5+P9zeLyLy6yJyh4h8UkSu3ZGjnGEu9EQpYFiul3YO7v8C\nf/Pyl/FkrzQpojhw4aTnsdaSnERDjJ3AmctpQwbROXxZ5PehXsuF7F6Y1mz00wVz3vfpHMxBBUsl\nLug5CuUQh7hAZ+ESXvCf7mUF2sFtw7+kbbR8Lphb2QnB3M7Z+j3g+VvuexXwfjN7HPD+9meA64HH\ntV8vB37zgo/wIseR/ZMOgRPH+NiP/hCPTxXJDBNp64rzQjBhtI090xycWUBF8erw2o7KdcogVRyS\nLqgBgX4jWS+n5IIwdJmcLpizcPAoLglmDlcoZhcSxneIKwmdQ3zHL97PKjk/U4jthtzNBfMMXKhg\nnvWMmdlfAQ9uuftFwJva228CXjx2/5stcyNwSESuvKAjnGEu9OQMBxdaqwi3vOJf8cj+ClET0grl\n6LnGtmCzLJRDVJQiZcEMahxYWmA1DaA0QCmL0OblTPpIM8rpgznFkWuyC0UDLgzXxIWfHyce5xZ4\n0X/+Muu+DQjNBfOsXMhn8nzP2hVm9sX29peAK9rbR4HPjT3u3va+UxCRl4vIR0Xko+d5DFPLOQdz\nzoLbWKd+/X/m8mP3YTGNhoWNT2oRyyI5C0K53SNspMNqMqyzQL/W3C2pDe5EGF1NBvR37VjPxshH\nuSWYM0wPKmI1CubslFACIIYPDvOB77rhOCsEICBEvDkc4STBnDfg2OR8P5sXfOYsP/M5P7uZvdHM\nnmZmT7vQY5gmdqPWm3e8nQc/9C7qepCDOHryc8ySb3LUjuw088JPQhSRhoNecNUawSLBGrDc/9FD\n66NQuhOsC9/qoxyWMI623hpQH8/TR3l2ggSaZLzgNQ+wku8Bie2acJuFDOhcMMc4n8/p+Z69+4bb\n6/b7/e39nweuHnvcVe19FwU7LpRVBXffzT2/8+vQ9HPbMpWpmHR3PphAdODIKULVGTJdHBFvDUEj\ntVdUQFyJHyikbFkqrr1ET86S3hr1HgZzegePnrT13i2hzDgKV1B2i1MEE9qdhoInnDTOeM65f17P\n9wz+EfCy9vbLgHeM3f/9bVT8GcCJse36nHNl/QT3/OS/ZklSTmK2op142G6tZkwzk0CRcoOMJA4T\nKNzJTTBMYL0eULsOK2VBlEAjHR6qahrncp9O8gwey9GvPSe3Ij41mPP8V99K99CmRemKuLNb7zPi\nwAKh9Fz/n+5jBUjJ4+lTbzq+R4ON52xyLoK5ndShtwAfBp4gIveKyL8CbgCeKyK3A89pfwZ4F3AX\ncAfw28CPnduhzy670eX8ln/2nSzXG6BKiAvErqKuodDNJT8rW3CH0m27HTW+HUSWHKV6iripeBsx\nsrR4CBqj1MRCqghas6Q1TV2DKyDm1JuJXiu2BHOuf9WtlIevGeVR5mDOXhavO5yUlN2C5/7s5+m7\n/O6U9IlDwQQ8cVoSCGYOmYZ5vCKz8pE/lV15/7ThI8/5Zq6RNVIyvCxQl4kiJbxBlM7UNu7dShuK\nIUmg9g5vNSlWlFIQpIBeiU+RwaCicJ4meBYbz4mNFcrlDifW+/QOXMKd/cASyrXvfCssXXGWZ90d\ntjbFGLZZe9+ns49yuPXeTR/l2cnNR4QBb//pyzngGG3JrbUsA007PXImSut3HRH52HZiJ3OxvAB2\na7jYAz/xA3T+/hYSFViH6AV1SpEUo2zzEHf8qXccE1CrcdJt8yUjDmWQahaqAt8tacQQgwfrhpgC\nn9E+lx99FE/9jufRfcGLoFOA68J6Dcs9WDw0sdez1Ud5w5vafpTjwZxzrszZebQtSlj5zI188P9+\nHkGhcCAWSRJIQEmF0QHmgjkXy11mV943A97/Vm766Vdy9aVX0lFPkhzgyPXDDpWYAz0zQApGN3mS\nU5I6xJQYI0VxENYjdx827usP+KpnfhuP+vGXwZGvgFBQS6DEQSo43tQsd0t8E/MnfgIidKY2a8OE\n893Io7xQFOi6NdyxT/D//fLzKCHbkq1g5lSnLJhzsZyL5a6y4++bAWt380cveiHPKBdJjWA+4kxx\nFvDq2v6Ubib8lIrRmNIFajzeHFIE7tuo8E97Ok9+7augdMACdDo5f1IdKh7XToe1DqwDXZQwyag3\nucpyazBn3KLMwZzparCpAP3P84yr1vmPL/9aOtD2KYrQvqNCA1Zc1ObldsVy3qbkPNgVodTjPPAn\n7+bJsSB5IWok+EipgDrM1Xk9T7lQ5vHXRpKEBWEQB4TlS7m7CDz9x17J5a6Ab3oO5vLICBqHVjWu\nk31oTsirUiIVSpeAn9Br2WyNpyRxJ1mU3cPX4GKzQyWMu4MDtHeUv7rjDt7ztxXfea0nz9kNOBRt\n631EYu5YdBEL5naYW5bnwE6/V0bu2Vib52Pf8wIe+eUHwBlLKeBNqUND7ZUi9lo/1HSTvFAKWOFZ\nWa34QqE8623vz1bjcg+Sh2GvTVHiWA2SYQiGQxn2DA8TvJafKZgz7B6Uu0CVU+GjPBsKqHZ52885\nLokVCcGFMvuRJTffKNu2bxejYG7XspzeMzxl7PxFRVEaNAXKqka+cCdeEkupJCj0i0gVwKdeO4Vx\nMmx3gZjk0bW1Oe5bjVzz5j/gWW//M+zAEVg6gFFQu0S0ZtS8NtDgifikBN0cvuVxhAn2Zjydj/LX\n3nTXyQnnMyKU0A6rcA3/4Tfu5QE6+OBQq0HyfHYP1KSTeonOOZXpPstTwm5Y30rExw5OgfU1HtdZ\noKCHIqx0a6ID0YJJjgsYXxwPlwBvAhoCn2kqPnLoMp78nhvh6kfD0iVtaSMklGQRP0pCDxhQ4cDn\nVmyGQ3AT23bDmYM5W0sYXTHJ9KDtM+xT4Eh8YXWJm+6GvgacOCL5wuVToGh7W805M/Nt+DbY6fco\nkgixlQRr+MCzn8BTO4+gcUL0FWXKbvhpaYphkictiggu2ZZ/U1bKDvcde5Bn/f5/h8c8iSgOTwUU\nY9tsJTd6OJkaKDWXQQJMeqyOQbs9PTmYM771du0oiFkQynHy+XuI/+fnj3IpYNQUhIt2+z1kvg3f\nIXbeTwlhkIVyECIfeelzeWLZwxlEX9GLjqA6NWt33KJMKY16ZwIgyl1Nw50Lh3nW+2+Cx2ahDEQk\ndhB1oNkXJqcRSoAS2tpl8EzudQ9LGE/XZq176JrNxr17VsJ44ZwymsIM9Yf53p+7lQ2g0JLa0kUt\nlOfC3LI8A7vxvpi1fSjNUKn5xCt+lEffehtB26YSKnirwUluFDEhTLJ1NS6Uw+MWy9Ml11bv44uH\nr+AZ77gRNGLd9nGQm134YQBre1ZLnHR60JZgzq+1CeeLB9oO51OScH6+DM+fmeVZTqu38ZbXfS2X\nTfrApoC5ZXkB7N4FxAOKiOHX15AbP85SrVQugRrmBqifvFCOf0fb2T+aPYoiwoZzXPl9/5pnvPNG\nCJA6WezyrwTM0/ZSZNtWy6SE8mwzc0YljDMslLC5pr33pJSQxUfxlr+c8EHNGLN55neR3an1ZrNZ\nLQ30+9z9y7/IkUXPg91ELZEYqpx3OGE/5Uk2vhrOOYJzeBFQ5ZgpV/9fv4X86E+RPJgkvECBw9qg\njTIchzHdy+tMwZz33rplZs6MBHPOhpmRUgLn6Xrj3X91L1+a9EHNELN99meAGsBpG7iossdPj/GZ\nWz/KwaLEI1lmNExFQGfcohQRLKU2Ri3c0ySe8N/fDkcfjfmAjyDtxSVhY0KrMyGUcPqZOeNTGKc1\n4fx8EBG8FygaHmyWqTpX8eG/0ymdkzl9zP4K2EF2I5hTWgWa8qRCPEIgvudvufbzRq+t8RaVqRJK\ns9x0UqzdehvcXlVc96fvhQOXU3cPUMYGXCS5YV8habufZ0tt2jnTzJzO4Ws2o95hNqLe20URonpc\nbFiUASElfu3PHRsAaTDpw5t65gEedimYAyQigdBW4gKWIEb+/lueyXJaY7Fcwgo/FbXeJwmlGd57\nRLO1uPhTP0vn2c+lKZdxARwNkH2rOSUoEazTiuX0C8uZgjnT1j1oJ8kD4Dy1c5goTtZYTB0q9ax/\n+Wb+4FefyVco+IuwAHoe4NkmuyWUYm3QQocF+EpqBpx43//gqktKDh84jAvTJZSq8RShvLVWOt/6\nfOgsU3jINpgiY6WIoW31Ne3L6UzBnGHUe7j13m9COSS5hCchgNJjLTREi4TLnsIHb0o0YVqnsU8H\n+2s1nCO7JZQwFKCEuWxhNk2DN+Mjr/sVBqsb1HE6JqKMW5TOOZxzI6H8hzrxrD/7ELFbghq1rGAk\nRLM4GprLEpWZyNU7XTBnawmjC7ovhXLY4k8MhIRTB9pDvRGD8Tt/3bDG8FU3Ez3WaWV/rYhJYydr\nhlGgVhObmoICjq3wpE5BkM4Z/8Reky1bzcEcVZzlEbu3VYlnvucvoOtzSaYTSgqCLoBBjW42upjy\nVXSmmTnDfpTT3GZtp3EmeHU48vjeYMsE89AMeOFP3cI6kK9+1WQPdArZv6viLOyar3aLYHoJlMUC\nOMc7X3I9C2438zjPDzGHqeIRvDjuqo1nvPsDsFgScblaWxh11kbaypvpehkPz2mCOcWRa7Y07t3f\nQgmQnI12E0XKe5sydTEc3YNHeOM7HiDRyfWeU7H3mR7298o4DcMqhh37e2SrxSCrZLsQRzfNIQk4\nfi9fs9Qltc89TYJpZjgJ4AIfP3IN173jPXCgQySg6ojOSE4RlOS0TQV1J73eaWTooxQDxLEK/NKb\n7+L6V+XGvUWsRsGc/RT13g4q0C+UGCqQBrFA0znIWz6ZiIBREmujZrauibvJxbM62COB2vIUVmf/\nz00v/nYWmjgqN5sWRuVvIjQ+8K1v+GU4lIcQmBV4l6coehTD4cd6UE47p2uzNkw4H0W9JzpcbHI4\nczm3lxpzFY5ILyrLnQN8ZpBbj4ZSKOfW5YiLZoXsllBmIRk+CSdZWmaGlAX8+dtZqGtO+B5mMlWC\nORRKE8elL/if4ZGPAtfFCDjJC8Qbo/Zps2JmbI16DytzTko4D1yUQjmOWCCJICRiXaF14p47wBQS\niYRDmZnTvqtcvKtkhxlJn419E4GmT7O6wZULB+nFBkmaSwenCGeOO8oFih/5IUDpw6hbuajmfPOh\nUE7XoZ+W0wVzcvegsRLGfZZwfr44cxglq1FwriT4gl966xdQA598K5WRRCRe5FbmRbFS9tQ/KIzq\no/PScnzx13+NhhrfEdTXCLFN4J483uCzdeR/+oWfge4BjGxtFZBnkxuMlsmUC6W1X1uDOde/6lbK\nw9fsyxLGnSAo+NChKTzEhOt2+Z7X/jWIQ9Th1bV5D+6iLo3c16tlp4M5Z2VMTLzlL/7xZi4rEwsd\nB0kR16OWLma9iQtmbRV9b3ztO94K134jpNAOsXLt1L/su5oFhk0xThfMWRgP5vjZ6Ue5l5RiOKDy\nfVy1RL+4ln/5htupHSSnkBzBLu4Jh/t2xUw22qx5nkk8zl/+wL/geCroxy5qHdAO0UdE1tuywcng\nzFF0So6+8Htg4TLwvVEPylyg6Umz46I848yc8a33xRrM2S4OCK7EFvu4IvLAsbtzoroGkh/rS9rm\n5l5s7MtVM0mhbGhIaE7pdYsIXYIFnBlOGlJYpxPzoCi1ySSnO3OIeG5eX4Xv+0EG3UWSgEmD4EgE\nQHPi8pQvkdOVMJ4yM6fdes+F8uw4C6QI6jqsFY/Jxa2uwZHf65qqFc2L7328+F7xbqI1QZXQeIro\nYKA85XAA10fdgCI5yuTw5kgyubdeLeI7iaue8zysWMwRbyqUPEzMj/oITX+akHL6YM6ohHGUcD7f\nem8XT0G9sU63vJy3/SVUqWjLJKGkOOvv71f23eqZnFWp5FybDgTBO4XP3kISpfIlWNkWYitpQr5K\nE8V3HaINdz4w4Emvej3S61LaZnXOpmvAMc3LY1gMcLpgTvfQNVtKGOdCea5IUSI43vzWj9J4MIs0\nTeJifh/3zSvf82DOSU8OCYdRIMSsiXGFd77qZ2i0wDcBR+6GPqmREdHVqDfqDUjW5Smv+gmyyQWI\nIhGMhpw1Gpj2paGWa5vHgzmJNiPUAAAgAElEQVTPf3UO5vjYjAVz5lHv88HM0EI4cNUTeO5PfpKY\nEkXhUb34fJVD9sUqmmgwZ5Sv0qBEKrLPh6bgwD0PsBRKlD5ObWIjI0yAZBRV7jl58/H76Tz3W8EL\nKQDqsBBJzMYoiNO1WTupe9AM9aMsbGHSh3BahsUKSR2aKooi7zzmYjnDTLzGWsCctvmJjk6E0K+h\nOs6TLnes+RorBJNJ5uAovbSADxGh4qqnPw07cAm0M3OGKUwl050ndLaZOeNb71kQStggpj6EzayI\nia9nNkfomhmdULF06aNZ0SyU3k/3GtlNpn01zQSCI+DzNjZA7NW877nPZrFRvAY6cdLTGgP9TkNv\nEU4UXZ78q79NWhVqwLXJQWJh6q1K2MbMnBnpHpQMzBVYUVHVq5xIaSSUkxbM4fMnMwYxURQlv/un\n94+szYuV6V5RZ2HSi+pkHA2OPhA2VrFHHKUflvDNZLYtw2avQR3B+nS0olnp8/gffw0UBzBf4kQR\nDGR7s70nSeJhZubMYgmj9iD2KHSZVC9z5YEOBdNjYRbqiHUNolRW0MTDrNUyO4m3u8AMrKpTmWgw\n52EoKegBfPZODq4cR7UG9n7he1MQJfmK6JQoPTbSIl8sjsAzr4OuUiwYgen3UULroxwL5tzwprtG\n6UGjYE6oZ8aiTAJSHKcsTjBY/wLv/sWDvPnfFdQPfI6En7j1VoeKOiQ6xQFgmWZQ8db338wD7WFF\nLs7xE9O9sk7DNIrkKTzysTymLCf29LVX+gHKZgGvAa+QUp/LnvPNsLwA6mEGfHpnCuZs7R40K8Ec\nWCG4dcQ8pQmrJ1Z423+5miXgsNR8zWONYA/QSDM5wTTBq8cZqF/HXEW3Jyxe+VUUIQv9RlqnuQhN\nzLOuLhG5WkQ+ICKfFpFbROSV7f1HROR9InJ7+/1we7+IyK+LyB0i8kkRuXanDnYmhDJFPvBdL2Ct\nLEaRwxD2tqK2SD2CKkiFugFRV4mlJ5UL4BYZ7rmn/d0808ycrd2DZkIorUFYAO1RKKwfH/DO//JY\nFof/TIc3/NhT0QfupKhrZFKjaSXPXxJrXTlWIAm0ibz0lTeBQhmNMO1+m11gOyssAv/BzJ4MPAN4\nhYg8GXgV8H4zexzw/vZngOuBx7VfLwd+c8ePepqpNlhYPEJZDXBeSSmR0t5uWpJrKDRvxas60ikO\nUfbhkh95BdHJWDf36ewhM8zGetiZObPUPciypZgIJAfrqyv88Q2P4BBQEnMHekAivO1/fzYRR2OB\nOk5ms+tGKW4O9R4NDt9t0E7iyw665bCyS6f/iruDnHWVmdkXzezj7e1V4FbgKPAi4E3tw94EvLi9\n/SLgzZa5ETgkIlde6IFOv1UZ81zw1OfxVYPUNWXwFBNItZC2rVoKPXyxSNMkiu9+CXjNi1xocz4d\niTh16105fTDn1Jk5sxDMUWiFElGqlRWefHVkoW1EIcllHzOgBSwA3/TEDbxTfOEn1K4ir40yBVx0\n7cWpgy1cxkAhykK7ZhJIw8XSVOOc9oci8mjg64CbgCvM7IvtP30JuKK9fRT43Niv3dve98Wx+xCR\nl5Mtz4dl+kVyeHE1xAK4JQZxA+t16SejY5HkQXRva2oVB7FBxNGzFZZ+8EegWCChNDi6rWA6yY8e\nJcxPcHd1ch5lDuYMR0H0Dh3Fjc3MEZuNrTciKAEvxsbKCn/yuq/gAHlMRwTEO3yC4NdBSkgFP//d\nV/O//VHkb25bxSL53Mjerh8TqH3EAynUuI3AwtJRbr8TLnsMlEI+JtG2ei17wffz5nzbq01EloA/\nBH7CzFbG/82yop2TqpnZG83saWb2tId5zLn8yYmQ2i+xvL295d+/HC8RcYbTEnQBMcHZZDoBelOq\nsABrEYhYawUMN3ijZhmiEzcQtvoof/XNd53cPWiWZua0W+9NoTxxklBCtlQM2rkkPRIFOOgoPPuJ\ngfWH7su/j0fi5D4LogWUjo044Bd/5x8xD0HqUYG+t8jFMAtyWytORAqyUP6/Zva29u77htvr9vv9\n7f2fB64e+/Wr2vvOiVkQyrxWIsEAq1h/6Avc+ZlPk7qOhbVIoYoJJHHoBBv9akyw4EEcirWjx4av\nQdvu4m6y+mOn+ii3ljDOzMyc1qIcbr1PFUplWA6QBXPoEoEoiopy7RPgcr9Kocv0zUi+JE6oCsyr\nIyRP2Vki9ZZ5AKjFMAfmXfsYprz+68LZTjRcgP8K3Gpmvzr2T38EvKy9/TLgHWP3f38bFX8GcGJs\nu75/aCOGpTnQBhwsLi/xDd92PZcWCwRx9LVP9BVeHZO67jZJ+TtKWLgEolDg6SCtX7P1laETHWvb\np4ZWtLfOzJm57kFjFuXQR3kmoRy+3cNgiaNtyOIcyxjqwYuxbEZhESGRXL2Hr0UQk3zBDyArfdxq\nzb33QEGHitQKPFN/WnaC7bzEZwH/EvhWEflE+/XtwA3Ac0XkduA57c8A7wLuAu4Afhv4sXM9qKm3\nKodOh9FqF7BFSAt84c//gtWB4MMCqVcwqVVkoogzDgTPgabKe7syV3/74dvb+ivdBIcFaIQOBY2u\nQAP3n4DvePUnT5qZI97IWVjT/ok8NZjzlKNyGqE8XZ9QR6DBqKipiQPhD1/z9awXD1IFRV1DJ/o2\nrWcPLrwmiLmRb00B6ZaUl17Go6+E9bU1uppHTfiLZNjEWV+lmX2IM9sc33aaxxvwivM5mKkXSfK2\ntRFtAyN5ix0kIAncyv100gqJwP0llBpaC25vKX1JXyoK9WgDnauupsZToggK4lrfpSITW+hKIuF9\nASo4fwDzkUe4gNJnEJWDVqC+GZVuTtMI4VM4SzAnE84yILOgq6AC1gWfYGn1GHXvESSB5BPRlXiL\nhOSJfncWV5Z0h7mmdR8p4rpYE7G64Z/+yF9w4++9gCT7P6gzztRcqmdBKLG8kErINdWjODg4D1Sr\nrPcLuvUiS7FLSHv/9jpzrK/32agT640Sy5Kve9Mb8+IfC+4k3ASFEjDNkVaJozEFiQAO3v/apxNW\n7iD5U32UU7lOthHMsS1b7zPicg9pjX3UwfFjX8JrpEiBQoVeXdB4JTmGXVJ2BR0TShM3yhe2tEbv\nyJWckP3vo9zKVIjlddddN+lDOAccEPAmo2hmGIbED3S5ZjFQyTqN33sf5fCz05XEgmvo+cjt9z0A\nd97T1oHnAx0WO04KA5IEhlIYx4Jf4qBH5MbXPp547B/Q09RKT5VgbjOYA9uzwEZBw1CQtObt/+c3\nMZDAACFJaEWyzOGg3dq2SMSZ4oZjkM1hBkUh9JaWkXRis1P9RcRUiOXMMLyajs3SDrQpN9Lwxd/4\nr6ylGuc9lbRX/z0i58UBoljXoGs0KFdfdgk87qntMRcMZXKSW6dhPCC2UeBAQyK/lwXaNmr3/Mmv\nXEs8divKqa3BpkIwzyOY87B/LofacOR2eYUriUB9/834MMCVCYgoQrOLM5xymlsudXRaIOZxEnCh\niyW47BFPYqVufd9TcBr2irlYngcmtH4/YRh3WHWejZX7OLR4EAj42tHbI/vNtb7IoDntJIlDk6df\nNfz1iTXYWKdSMKZnhoowvPCMbbFxiDUghgVY1MQ73zCtgqmjEsZzD+aciSp/M0AdSJ8eEPylpHqZ\n1PTxrNJJtgdn0SGtr3hY/qgx0o/Qr4V/uJNcn3kRdU6fjk/OjCHQ+i8DtH62Do67/+5O0uoGC76k\n9AUpWR5Qv4sM8zgByuQIyRGdUGwIV8oyV3/1V0EoKJ0hTC46fzo2Sy87OBoSDSYdsgVc4J2yqIkP\nvv6pxGO30tcwHYJpucQvtT7KoUX5W6+4cksw5xyEUvPvgKLDNC7zLAKXLR/j8HJNsILE8m66Kk/C\nRBESIhUiTU4U8l02KHn7++4iEjDvyOUO+5/p+eTMGpLfvLLNUYxpnYe+dBvLBwMnWGWjrEguLyFn\nYaw5wc6Ru8IE+gE2ghvdJxpY7Cyz0u3Rv/c2KJneKDKAgFhB9qoOx/BmvFOiCB98/VPxD31q8hbm\nKVHvCwjmAKNmFAKMj5kVQBID4Lde80/YaAKN+N3zU54OMUwUEyUpeARvyoFykXs/+/mR/Qzj7qn9\ny1wsLwCxPBExbgxY8CWXLx+gHvQxS7lmts1j1F3UqdA2xnVA2Xrca1H6WtFdXIBQgO/t3gHsFAKh\nLQmNI8Fs55iPCeZEt+RbfJQXGswZ/VmpRjmvRmqzFiLg6ACNgWx8nsl8XF2+MFjEW8TZgCCRGIcd\nUbVNH9r/2/G5WJ4vRs5B1g6h14VY0ElLNLWjU3nKQQfDUSpA3JVyR22v+mWCblTEIPqIBcUL9FZW\n+Jaf/o+Aw5rBKHVoatmGYE7Mh7kl6n2hwZxNXPsaN4VWhwWp5ilizaKA9L+E1PWupgud8QjN4aXE\ntEClC6HEhWWoNvI2fbiupnjzshPMxfJ8sRx9NqdsiFKHhmbBSMsHMLdASIY0CXa5gYZCG9RRBkW2\nYr0ZiYqHJPHHv/CLgDJwDf026jzVjAnm5l2bgjmZoI+eEvW+8GDOODmQIgaeAocjkV0sLpT0gLXu\n5URLI//0XmMC6oX1ZKz2NygOXUblSyodBg33v5Ts/1e40xhEsj/SE4GGHkrZVCw3hqw/hLmafpGg\n8O1WfHfx6nAWSJIjl0UUtBC0PcYUwPU79Ojglen3LQkEArH9L981oaCPNW1z3jMHc+xcgzlnQmgF\nMyE0NC7XzAM89YoHWVooEaAT99Z1Oc5CIXQ7StcrLgRK193z5taTYi6W58BmSXWBJ+R8OOvkrVlY\nYCUmknicc/QQ9jIvfTgKQMyBdNgYVCT6fPsbXodH6SyWuaZYmH6xbDl1S57Zs6DP2Nb7TMGc8/FR\nPiyiCB6PJxAQzX//UY99KinWmCgxVCSnE+lk5QDUMUjLOEBTmu7g4Q4yF8vzJU9tAKCqFTSyvt4n\naUEymdiVHxQq43DnECEV/On/+mpqCmqBJAmTiDmdfv8lTDbos41gzrlFvbdLrpghudyHSKFs4A//\n5G/zYQFJ5Byvd7r95hvb8IkageNfujmX/Y695zHu7wSiuVieA8Nl4Wgwpzk1SJRumdM/HvWMZ7K0\nABISUpR77ot35ICPeGEwGHCos8Dzb7iBUhtKy6kfm23BZuTUTyLos81gTnt4O4+AeSVRQcid9n/m\nB78RizlRXLTTTmDczjlsm6dYcRYhVNT1R7cfDp+Myx/xVAaAc5vHsNeD+faaGfnETBECSkHC0eCo\nSXgsF4h/5h9wVaRDl9DAhu3dlTZ3zHQ0DsxXFAsFa15517//CbCCRoa7bzc7QjlkT4M+Osqj3J1g\nzvYQHAUFkHDW8Bv/7SacbeRGyEOfyzYxSe02KCe9n+YBbTdNUErUVZwxzTxUbJBIvQM4cv/RccHc\nz1wcr3KH8TiCQjc6PB6joCbwxfU1HixL1mIkNYmF6Cn3oBxs2Kkrulwl58xhA2V1dZVrv+slsPYg\nOeUbZvaUt4IZxz7EQ8EUJzskmFkMtwrlL4+COXsjlEMEB0lofMG//eGnU3Q9Sbc/IlclohIBJbkK\ndVX78xgm6CjCvrlWo5VgJ5frRhKqIM5TVRVNBdCQZsGlswPM6CdnCnCAz43FJEFZe1ylLMc1FtyA\nVCa0NPpheEXfnbfaETHR0XIVdfRcQKtERxa455abx9KXZnybNCaY41tyRzolSn7OgtmWMOppot6H\nz7eEcSfwOQK+sQpVKih9uY1fapPadRl0EaUA67Rb8TJ/1w65ZFNAGoQKSd3cZyCFnAFgHnXrIDWV\nQBl7dKuSkoogkYUAUdsS2mZX34WpYC6WF4KQpwy6iJUJDhWsrK8AjuiV6BVnea7MbrQ+MBlWB+nI\nHSUoNImy7BI7gaVj90Fa2/Hnnhi7EPSxsRLGvQ3mnJlhOo6q4mPkd9/yaRKdbfymgkREO7nNGsNy\nW9qfYxZHqcASSD+3YjOXB5NJn7b1LyJGHnXbXu5djfgBYosETWgDwUGigRCJWu3SuzEdzMXyAqhH\njv6AWIFbPEyvvAJrHJoCnaYczVeRXWg1IJbzKsXcyIVlArUl8NANjqY2cJ2ZSRfaFjsY9LFttllr\nn3bP8O28eeccGyHw9V/dY6HsML5VNrMt4t8Ku/qcgiR9cCsYFUhqxywlRFZR6yNiiOSiRXGKhgiS\nAEX8Bs4KxBYBEL9B0/sUG4ufoundy/HFt0I34jAKPCQIbjtiPrvM+L5skuTeiybQ4KiA5lCPUFVY\nY3gHcWODsrPbw8rc2P8zqQCtB5AkV4JUa2CXgOyjSovhllxO9mEaTbslh3e+4Vq+46c+TrjkSTjZ\nKpLZcjInmAUcyuDECk+5au+DOQ9LO9L9trvuY73zCGqt6bju6LVsXgg032fd/JP0cW6AaRe1HkiT\ne1O6QGIN8QVqXXDraOxgvg+yRlPeCdLgtUd15C245lGI/xxp8eP41COhbCzdTRkvI/FyChrMCiTs\nk3X1MMzF8rxxbU2sUjYOLZQnPP5xPHjXn3Bl7xLWm0RoKrSzuKtHoTDqgBQUom/vK4XghMsHBouH\nSS6gKVL4fbSoxwQztEt5KJjjQZ/TC6aiYtmfK5tC+foJBXNOh5FbRvZKWOORpJQoJbXNpk/OcUwW\nEFGcxPbSXKD0MErwG9C7HapHE90A8RskrZGwinbvJLn7cdLD/JfQ8kuIJvq9uyAdwdtHEemjGE3n\nASRcBvVhVh/8LD414DfnUe135mJ5jhhtFobLdYM1HidKF+Wqb76e+/76Q2ykRBUi5dLuCuU4w81i\nJynJCwPnKWLDclHwF//0er7xbz5M4QCLQMBkn/Q92GJhBkIb9GnHEyt88PVP5Rt/5pNjgpm33sk8\nJcbGiRXe9Ut56z1sCmEj8Z0Mla0j0qG0kLulm6cjHnMFmBKD4G0FrMCkS1q4CZ+O0LBG6t6MT5di\nnc8Te59E+o9HF27GJOKsi7lVcOuIHiYiIHX7mvPrdmkJs0WwHk14ENEit2pLB7G0RjCPpRLzfuT+\nuBiYi+W5YCACOCXmlGUM8KHN2XnMo0gCBQ3Luvdv7XAkgUWPd9BX4WCEg02kocJLnsST56coYR9u\nySNxJJjDLXlUNxJMf9lX4nBEdXhTNlZXtwilTlwoQdu58xUlgXfcCnRWSc7h3AoaPkdz6CbS4Gqk\nWCEW9+T7XY35Yxg9Ik2+qpvDureiUiF0SH4D0V47ZW9jM/DYpgk5A8J9iFRYqMkX1tRa6yVmdR47\nGav9s362yVwszwVhNEI2tB8lISfmdgC6y9x+rOKxZcCVUEzg05bHXBjeO5KDjS6c0IJAIOHx6hBp\nT/y+MC1btiGY73rDtTzvZ/6K8vDX4yRRr66dIpSTCOacRBuZdq5H10Hd6fO1T3qQhz74myx2ElI+\nQHIpq1rvk9kXKes0lIgtIDjUctAR/2VMS0QUQTB/HMyjvs3iMMmpRABSt2MktE01y5VCWSwVsw2c\n60MKeNflx7/zBzB09gocLoCL55XuEEKu2bW2jZYjf7BiGzt55MElukWg0j5r9M/y13YLRes+XVOq\ntQd5YmgI9Wr2xbnczg10d+NOk+BhK32E5T788eu/CTtxN2n1BE8+yilCmSYZzMkHDE5pyN2OPAXf\n/UvfzYFL7qQpPseGreVUJ2tAKnJuaIlqF5U+ySXMVaiLmC0ibOZlinZyipmFdo5UA67KQjl+DFbg\nUg+sRMwj5giyiMRDeFmgrpQ/+MC7qaa/4d9ZOZcmIHOxPA88DlGXK3mAkkhICkuH+EK/TxECh9IS\ni+XCRI5PDIIraNYrylSy4hMMaDNL8ik3aYeg2/7KKjpTpY/isJ6ynCJ/+Non8djeLdzwiitPEkpr\nL36TwMgFhoaSUByCj45VGo6UV2HJUThPp1zM6T7jdeHmEdncThshR4asOPVx2sv30xZLaA+xcPLj\nyD1SndRtTma+6DvpE+UhzBtaK37Yf3BGF9C5dkuai+X54vLiHrTmmQ4exErjWTe8hsVuwvcMqfPA\nsiZMphtL0etSdHtc1ilB17MVAXhzNBQwoePadU6q9GmyX5mGhj7may4r4Hd/9ls5SC4FNMLIopyM\nValoK9q5SjuPciDA+459ggcONmzEmsY1mG72uBxHqNp8W4e0iean6zQkpjj6CJZ/p30PTn9UtKLb\nyy3jLNKYEJaW+cMf+3WcD1kn25LKWRFNkVNzb7fDXCwvgAAkAn0CWi4gqcu973wvg0Fk3ZToAp1Y\nIhpIMhn3sOKItfKuZ30b6Aa1a1DLo7FqILWlkgnaSPk+QYaR8YxZQUmHLgttcCPkmvo2ELc3Q4tP\ng2V3iMMRzEGTZ4Z7IPX7XH6wx+JiRdeBN7+t/hmixSmW4qmP8bkE8my+GHO5h6aCeqXrPPrZVdZa\nkc3H07aVmwGxvJDem3OxvEAWAU2GL3KNbO9rvg6jxCdrW7j1CVS5zGwCOBQ1z9KRI7BS06Q+SRVR\nKMiuhBETEvTdxFuBoG3P45DFqXU9DITJp1C1hQIj75/kHYtXsF7ilW/8eSx0UVdSbMvHPD5z8SzY\nNi4RokQfqbVPKpSy06XfiXg0uw7aUt56cmb5njEXyx1g0QtKTZPgkhe+EImObujkaSomuXJiAldd\nh1KKISUcHqzAzbew6BdwIVsB0m6dhtMh9yWjBp6tBd1+qIVIoGEqmny71rIXRaWiocFcwwZG0y1o\nnMvbahTdw6CKQ/FmiDo6votVDRYT1YkNSkrU2vp1Wst8Gt7Lh+FCO7rv28/IXuNZpPCAKDf1clOB\nUBrRhVGvwL0kiaN2JUXoIrVxyRWXw2ANYvbhDT+gUWIrHvuZHOX2rT8wNzYJhG01ptgbOgBJcd4j\nasSUCJQs9RYoNEfiGhfzkLw96Q+n/397bx5nyVXX/b+/55yqureXmc4smSQTQthkk7AFCKsQRFYB\nZRFEZRVBfJRFWR4f/MHz+/n44E9FHkUgLIqPCAiIIIqsAQV5gISwrwFCJiFk1p7uvktVnXO+zx+n\nbncnmcz0JD19b8/U+/WadN26lXtP3Tr1qXPOd2sK36Ukw1YhIycLhj9+wksQDdgIrvRYxriMsUbW\no/RFK5briQW0Q2/Yp685oRfRMQxd1NR4V2G1ol8v4WwJ5SK9P34d9OZRquVj3SniamtJ01uJyad6\nInN72pQCzoUcsZYv1d9GdYALHrTGS8qfWnizXCP+RFBbQxSojKW2jiiGkCk9KuaHCwylppBk9KGY\n7P5zU405R2LCestJwPRWtp97S2odYow5QmaYm8paL5VBVMjrzrKfnIYUmHmAw3zosb9AFgLgsaeI\nUI6wkNaRJ3gYJBhsFumxxMve9Gq8BDBCEIcag43J4BJMxNv1V8zRqDVrPlo0YjTSGw5wnWnM1cK9\nb3k+meYw4XkG1ruQ2mSf7WYkRu7/ujeQ5Z3li3Vjafe7cUAR1+i4rseOwVVJOS4ZOVZrhpcOte1g\ne4auKdhZexgupFRcm8B6uZ4kK/OEPyA0JXD+i/98J6WDmGX4CEjyA41CckezYOP6z1pEYbaqsQSQ\ngG3EsmMcUjpmdm1nF7MQToFsv9ejFcv1RgzYLl88uAheb3RkWcsMA9nGQjZH3x1FCCVipE7RFnKM\nDtpk+zZqsAwQKYnUhFATpnO8Vpyd5/DRf4XYS+J688625QSw3/S4/MorGWw3uBhxYgmN07mLKeGz\nGZU9PgHURihtTGu7EumEmPJr5oFH3uPClEXdZVBPbkTDiSjP24rlOqL4tDCWTWN6C8zmM6s8/VYo\nXSRKwJqD5MyTa3kjlfpGURkZik2W9SO4hSTnk4hKIEiFagANOF8h1DjxuGiwmQMb+ac//h9QGkQj\nVeMC0jJ+ksGtZD+H+bpcwbao5CnYH6tJKNNIzxzTj/JmtUMMaEYw4GLqXSoRKss1V1yJVZsc4ZxA\nGOVLmBxOVB3zVizXhdjEijRoh52Peyg/mV6iFwfXsV4adRQhMhV6uHqazGdkXlOaf3PdKXlletTS\nI5iSYBSvUIkSt04vf6bXksp7SjVEV6BuipDP4q0QOkpd1PRMSb/ex+LCIa6uvs3jn3SQa//ulaCG\nQhcQX59cDumbjBTqqJSDQ8wz4Pfe9geIVAxMZLC8atBYp+VIj8v1R9RQ1CmssjKGIJbeFYd5+eOf\nR0cyKl+i3uOdZ0CgGpMf8XXavI7GnCNxzAUcEekA/07ybnDAe1X1/xGRWwHvArYDlwK/qqqViBTA\n3wL3BA4Av6SqV5yg9o+fJkuMMwGPUGZQBLjfK/6cn/zCwyHrgrWoT6O9gICpqTWnNnkTe2uJCMF4\nRA0WTwgB5zKsdmGoOJPh8GhH6A8OMeu3U5oeeddS+YCv+4gv6Nn96FSJ2dZjx62n2b3ldNgxB2fu\ng1hym6IDC9cyF94O8iqot6/0go0sMtMC0KTLC1iEqW6HT+/9AlfMDbDRkMVIEVL9nGAjUTZ+3SRK\nqgZgI0wXjtO1gxUoXIHUEaepdLCLjjHGi55QkRyxltXuErhQVZdEJAM+IyIfBl4MvFZV3yUibwSe\nDbyh+XtIVW8rIk8BXgP80glq/0SgBgJJzBwRjIE8cNmWLdxvsMSgD3QqNBhUwGiWyo/GIV1niSEt\n3Ds/C+LIxeKjpfaHmJrZRWDA0HrKeJiDg/346SG9HT9h7+F93O8ZdwNX0/v6kOkH9WFpB8xGYvUd\nTNYH/2MqGSLkuMLTr2qmt4AJFuSHqNlOZaCmZhp7SqXcGjfJJwFyhBhrhibwwg+8ji1zGUrAaFoz\n7LsUzy9jme0aQiV4Im9/4euokJU8lsawXI/8FOg2xxRLTdaJUXnArPmnwIXALzf73w68iiSWj2u2\nAd4L/KWIiK6P/8zk0TzQAmBxiEKIAVN0eORTf4n+31xEZUpwSjWELFOsBmoxqBpUHMEZYkw5Cg/G\n77N1ajdXT32Ps89d5IofHuKOjy/QawI7z4qcedfpFNRd7+a2piAMv41H6dwXqAJk+4lVhZhA8Kkk\nqigQhkQUIWM49KA96g0ATWoAACAASURBVC/+GVP3+p9YdpI3Z4B229HlBjDKMlRoBA2ogadd9CJO\nm+1Qab/xyhEqG3HRjCUCTAWsy7EswTBwGlsoNG9CNAED1RgzNY3YiFElrDH5r6QScJcCtwVeD3wf\nmFddXui6CtjdbO8G9gCoqheRw6Sp+v51bPdEIUBOCqGjdhjbxDQ86rHw+rfSyWcZ0MfFir5bolfO\nY2QL5pyS/jVDvFR0zuhzp/NvzxnbtsCuPezmljB9JXPze5HpnNm79uj3LCYI4j1GDmJ9hukoSKCS\nHBMjNhqCSVMnI3qd0UgMgvEGZ3LAcXjxg0zVz8Nl25uzSNEttp2On3BqVoQyGMHj+YZeyw6XQ1Rs\nMNSQkjWPKVTWEzm8cIDpTo4sVswi5LGD2mb1SSK2SfoxLjZKKGGNYqmqAbibiMwB7wfucHO/WESe\nCzwX4Jxzzrm5Hzd2RhEhIY/42pORI1NzTL2ooL7iR8zesw+D7WDmISvB70OzgMaAsYGynMbHaxE3\nTzSWuj6A9TVF11L6ComGzOZo7VFrKH2k6NT4GozJkiuJKNEqgsHEIz/vrauAGsSwRXL2f/z57Hjo\newj5nTCk0LWRryanWCbsjUBZTteLSno49aj43Q/8KTNbhIqAsTnep4fdOFwVDOmBWRnHbLeDyzJe\n9WsvZ4efo3YpFjwNDsbn37+RIjniuO4EVZ0HLgbuC8yJLKepORu4utm+GrgFQPP+VpKh5/qfdZGq\nnq+q5+/cufMmNn/SMCnVVpZj7IAq9rjaLJLd9zD9+lrmuZxhcZB+rBiSitkjOdWgi8YMBSqFEBVH\njvVF+kwDappsMupAMzJnCcFgTIFE1yRaAGI4pv+dkhFNpKoXmMu+Dd/8E2zcQ3p2xkb4R7keW9aL\nUUkSS0gO5hgqhKtZ4qv9r1BYj9GI9RYZx3ASkoCTUxmDbcpMlLbmgVvugnfJZcgQmiQs43mQjkMo\nYQ1iKSI7mxElItIFHgZ8iySaT2wOezrwgWb7g81rmvc/edKuVx6B9LSNSdDMFrIZT724SKfMKMgx\nQXBDjzUFYVASy4CRPsYcwrhDIIoQQHyKyW0W0I1aDHUyOKpLoYwUiK54chpVxKztp5agYAtckRMO\nfRDiZYjuXxZH4SSqMT4RpAS/QmB5yYNInyHPeMtzMV3BByFoTpQxueFIWuO2VFhqXPTksWb4/QNY\nOviQekSGJKE8xZZq1nI3nAlcLCJfBb4IfExVPwS8DHixiFxOWpN8a3P8W4Htzf4XAy9f/2ZPOqlg\nlEQ4/SFvhqKLNyU2DlFvcTqNrTuoNagJGFGMChoNJnZT8lYg2kEKS8Q3T3FpxiLJaCParJPiMXgi\n9rie9pnAoBTqsMDhi18Asg9pMoujhhUX+MlyOt58RJTYFCHJlqtHQs3P/ukT0C0GohCkgxlXzjiJ\nK1k1JUWCBaMccrA0KMkxqAGnfqxCOa5RJazNGv5V4O5H2P8D4N5H2D8EnrQurdvMpMEDMEc/3I2t\n8h9QOAbSxwRP3ZR4gOR2FPEg2appbzO6WJWgtUkwhpGSiG2OVYymyd1xN1Ej1kAMhq69lh//22M5\n6xFfRwhNUtxk6VRMU6umHWneNNLsQBSQmoDgVHn0m5+Bnl4QNBBLcMak5MRjIpDq9wxMjpGKchhw\nLufi3//fiBo6Mt417HEKJbS9fx05QoZqicBZbP2Z17KPMxkYR2agPkK2GCMuZfRukvGKGkzIbrD+\nGLFJKEWXH+43RSiXm6gRayISPNvd9zn4n4+iLtP3anKzZ3ntUpnYWOCJRAFNhXkFQDy+qdf+ngOf\nY99pAzQHLeVGk61sSDMFohi0aeeUr8j6Q6YLBwcHnM2O5YfnOCTjREfmrJVWLNeF1UK58jeEQAp8\nKvBxG3m9gJY3njQjCeUaRhZqkNhU8VtLaYC1YBxODIuL30YO/L/JlhQcStaMKgM6WktrBXMNpPU/\nL655lHkCiiPjB/T5q49chKuVIuS4Ij/6R51QkkXeRHAacTFS+h5xpsBIznue/1aUfGzLk5MgkiNa\nsVw3bvhTGmOIMQK3Z9d5v0XJ3DE/JTblD9bCkar33Rwiht32J/zoq++C+LeMVklZTgdSJ8FssxUd\ngwgxUolrVgE9imLI+DE9/tvfvZKeLBGNNL4345t6e5NCFm3zL1qPbJmhP6iIlw/YRpcOx04PeCrQ\niuW6YI6wbRBJ0yuNBnPmY1iomjDHCUbFckt7OT+8+C+Bf17JKqNZI5yp3njEt4J5RJJQDo1r/BFH\nrlgZBznM//7cO/hecSW1idgJKBDnNGKaGkxDowxjpDcs2cIWfv7eD2YbM8B4Ho6TNKqEVizXkSOt\n56TXEoBqJ2fc9Q8IUl3/f5w41AjnZN/k2s+9BcwnQRUVj1IQyEDAqmPZCNWygkZKk0SwE1ORsYCj\nosf7Lvsg7776EylJioxz6r0KTf120VlqY7DWscVO0T9wmN9+yK83ceARIVnIN4pJE0poxfKEsmwP\nyWoqW6FnPJzDi6dj/BRrDJ4aE5EYK04rP8b+j/0qwmcBj8QSF0uqJhONNE7srWvRyOE8rVEWQEdj\nCiSIDqXHT/2PR/BXV/wztq4nYkS5mgh0gtKtDVOVgYHyWw97Jt0ypuehGkaxOid6hDkpxpwj0Yrl\nCSSVLjUEMnKTUXI2Zz749djiMDFIE1Y4uQieLW4fP/7MmxC+QTAAhjw2ZWWBsJygeLLP5USiyyVq\nR+XfIkhEomFg+tz2/3s4W+94BnlZg5ksoVyNGKVygUNXH+Rpd/g5OlkX9dpk1E8VMlOdyRNzrSdV\nJEe0Ynmi0Ag6cvQBLxl56MKWu7FUd7G5go0bU9b0ZiAK26q/58qP/xk2fJFgUt1cC82t45takWm6\ndioSiVjNmmFXChNFHYdNj49e9Vm23fkMssqjMsGV0khXL1SR97/kdbjKgzjKTKia5YRRRv6Rk/2p\nRiuWJwpJ6dfQGmLEKajzEG9D2bk7MYCTDpvhEhix7DLv5MqP/BG2/DcqFdBRomJSrLOkJLan2k2k\npFyTKCuuVTiulSXe/72P8srP/iVxWI4vMud4iB47jJzrzqbItzMUiBYKs+IOl/L7mnV3Tp/0USVs\nhjt1kxOkSM7pCpYUJrb97i+hLzsZVgNUJ79KnqjBqOOszsf58cdfTX7w9VRCCotTh0UaH9GMeIqN\nOgQPEvAmCYni2MMSf37xm3jD199JYS2ZKcaWdGLNSCSzOU+49y8yyxZG8WVTMVn3r5u179QTSmjF\n8oSyHPEohtAkSlXpwfT9mTvnaWC2EEJBCAETCyb9cqgqpxdfYnDZKzj0mSekdTo5QGCYRpbim0w1\ncDJPyVcCmUY5eDKcGlDlMvbwpLf9Gh/b+x9U5XDyfwaJqESGknFwT4/fPu/JKIaOQgdAAsE0lSR1\nfSN4JtmYcyQm++48CRjlubQxrfMpU3jdBrf9Zfb6MzBWsWaKYMujf9CEoGJx0mem93EOfPg+QA+j\niwgQcHhZPQKZdKU4flLNHFbFyhs8EKTmv1/6Nn7j3S+hnPKogDNu4keUkQjdDgw8n37J39LFpmz/\nkh4FlVhg/evrbCaRHDHZV/JkQsDGiMQaawT87dl2+iMYuhwkoBLx1h+7NvgkIBku67Mz/xaHPnwh\nEr8MXEUeAAza/DvZ8mEqKcO5CxFiOscFPEN6PPB//TL/+I2PUpsKpymv6aQLpbFKNaxwXnj+BU9m\njgyJLoU/kh7uuaYyvC2tWG4cAohpKjmWeM3Ycpc/JA6248UySkPpJdsUmSQFy5Cajt3D0qefCz9+\nG8jVOJaIJEf8Ckcd9dhldkfDtTWw3rk8lhOErIGAkiuAoTSwQM0ifR78hifT2z7EFoEsuuQeNOFC\nqUKKLDOGcs8ST/iph1CQEUyKZV/OPXwCKjZuxlElTP49eXIhgFiMdgiuBp1h28M+AKVBAReSD543\nYeJvtmQJzzCmxulBet/9E/b+ywNBv4PhSjBg8eRBUnb3G5mSByJePJhja5beyPbNYdQqjz+qYcoH\nRZrFBm9rCuBDe/+dp7z7Wcg2Q2EMYiYkKucYBI1YMWiosXaWh976nszRIdBtDgAvca3Pr+Niswol\nTHYYyUmLiKWIlsrVKGdQn/Vsugf/glpHbjiRYEusnwGpiRPsvG7EYaNSx4qtsz+k/uQvIKZkQXex\n7cL3oNnpLPnTmDWjW2/VQ0DB+pCyEIsn0jw0NuIZHiJIquYKEddUx0lLkSvfP5p65zYlWa4l8KSL\nXsTl/lq2bjfUWpLbglKmsJsh/NNYrEZCPcRmM7zwwmfz+O33oWCGilS61VuPxazrgHIzi+SIyR6+\nnMwI5CFDmGHmri/k2vKnsJSIRiTalBDYpjo9ZsINJcF4nFqih2E2T4h9CvkGhy9+OEv0WXB9+k3N\nlhusyHoFL1AnI9hyBMwRho5yI9vLrHW4GdPvqY1QjlbovIdgzHVGVAHIKbmGRb60+D3u9KZfYU93\ngaldGV40xX2HJJTrnQVqvfFqcNEjMWJMRu2Vn99+L6brmeUwf6HGrbMf5ckglNCOLMdH41eUhy6V\nPQc57U4c6v2IraGHMRDEoVRgBgBNNvTJfbZFW9MlYxA8hR9QdcCZq/iZtzyZ+5z9OJ73iGdxpkRm\ncGQIISilEbSbUSDJaX/Zkt7Enq9eN7vh5jJrrtyraXopJhkvKkwq5SrNkqlLdXFy78EmA80+06fi\nMI///5+HnjvLzI4umR8Sag82RyRDZPKFMophJniCUUyWMawM8ZoeW5gCILimdhTQjqGOTCuW42Qk\nmNScfteXc9Xnvk/RuYyiZ8ldpLQ5aJV8F6We6HVMFRiYQK0eax157QjTc5i53XxtcClPeeOl5AuB\nqWGfj/7Bu+jYAld7yLpYApWsmLW0qUVtxadkHavU8Ab62QxMBXCrjlFGa4wrx1ahJnM2vdYkkqkA\nXGPhdoEaxbuKRXr885WX8pZ/egcLOwfkt8ippMKFAESiddgAIn7ik3tGNNUnF8+SVaa0Q9bzfPTF\nf4+qIC6l3EtisL65K0+WUSW0j5DxI81/Oren03ksZdhN7FSoWjKt8Cb5L0btNrneJhMV6NaRLZXB\nBcEaQ9cvwNWX0yuvpZg7jLuFx97+NB746ifzgfnPcTgL/IRDREpyAhmeXKFoFgprrwxCfYP4+drX\nK05JEsmIWA0QKkYmG+9TZnJYsaBnLqMcNgW3FJxC9AOg4ppwNYsMmafmNZf8DT/7J8/kzZ94O8Nd\nQ4x41IwS5EIQkwrMbQIhWG2m6efCdJ2TlcLfPuevGqu3pW7CNGWdx04nk1ACyCRUqT3//PP1kksu\nGXczxkwTUub/jQMf/Q262VVY38FlljIbEAVyb5enp2MqK30D1AQk2iYSRNE4RbQDYh056AqedODx\n5AH6bkBRK4PMUXvIHJTBk0vBwrf285uPfio/3jfPHz7iN1EqIhnTTDEYDOh2C1BHlICMijQEj7Op\nblHdPPOdKiJCrTFFhyAMyyHThQXNoBEFpaZHydXs49XvejNnbJ/jkz/6Is52yLowzxLYnDyzFHXA\nBaW0hk5tUIkM3KhO0th+9jWRFhWy1LOkxBo4tCRc8ox3sFU74JWQZU1G/PVjE4rkpap6/rEOasVy\nglCSvcN++1n0r/0Hcu0hdRdjoHIlNqZ5uzaWynGvk9WuRtTQrbpARbCBxaxLEXt0K/j4tp/mxd+/\nNWIc0yH1szpVfcUCLgRKa5MVvBqiOkUcVuyspjhr126e8ZincV5xaywwwDOrMMMcpQRyQBBsdCt3\nelTqUDLMA4acDMs880zjiBgGeP5935f5wmWf51Pf+E98ETHbZ8mykkotoooNnvlYggjTJiNTIUr6\nHqOGfpbq1Ey8UEpJ1G5TiKwkw7MUHB9/6ls4M8yCcWiTtf8UF0pYo1i2a5YTxGjdTe7wFg5f9RV2\n5l9C3QAfLLnP8NZjNGA14sWtayc/XrwNiBryeorKGHJqosnxZUWeFdTALaJnxuYMVCmtNAJj0DpQ\nNBlsbFQG1mGMhTAgbCsYes93+TEv/sf/iQwDtlKG8z22Ts3wkFteQDUTOXf3GZyz/Vxi4ZjqJP/A\nvIyU6rnqwH6++Z1v8rGv/QeuVxGdQadyirlp+lKTZzB1uy0IHtNPxpraKVPe4AJstTmxaW8USQ8x\nk0b+m0UowRDFgVRkGjFYXn/hy9kWpwg2xxCJjb9ly9poxXLCGC1h7r7w05T/PoezgSgBHyIudIim\nQgg4VYLIDUrlbhQuWLIAlQ30OktUEbp1RbfYQjZYIBPDu35k2FsathZVWuPDEMRgHVQq5DFSN87r\nmAwIZCIQawbVEp28wM3lqCpbdu2kHg742ODzDA5VTP9YsJ0cYwy+7gMwNT1Hb2mARodaT74ro2O6\nDKsBJjOo9umimACD+R5iDFiHkpPXgsS0HhklCaJRIUrjgK9pCj7ZNm+aNmZEOgQTKUJ6KJWHlHvs\nOg9LF0NECVhpjTnHQ/tYmUgMmIzF2RelgmERxKaOaEOOYpus5Yw1eXCwEExMwhlBYobxgawAnxke\nsHsLc/kiru6TxYAQmrQT4A0cLqC2cbmiZTSWYQwsSGBLd5o8CLGqqUPNEgtULlmiux1H5QJ9P6Sv\nJcHUeFtzqJxnIEO860NTg72MFdFGxAo2QscnASR39AjL7lhWlCBQOQsimOaH1Sb4dDMIZWzyTho1\nRKkoQoXVQOGnOIddjfdkQAmY1up93LQjy0lFCnac/3LgEdQfewpFOEiZK+JLjJjlqWD6M54s5RHI\nA3TrPEXgaBcrgYGBYWcnX73WY0OBzS0978ilBxJxIcPGZIlNJ7HyzM68wUlOXwNdHJVJ52ariMaV\nyHBLhohF6wKrefMxGY7yuiUu1GApCLVDDdjoUzgphmnXXXb7iZCcWUWRmH5fb8Abg4uROOFT72gi\nLqbf0Rsw6qjrJfx8xu8/53d5QOenmSFjJUXG+nAqiOSIdmQ50WwH7kDngtfgzTa0EqBA40p2gySa\n4xvzRInU1rPQCajtAZCpYaqMfOgnWzFFRgiBTMCELmgqSAFpanukZQRRsNFQm0iQFcvzKP/hSh7E\nCKakdp7aeURKbuy3EDzBxGZ0SiPWNzzORJo1vphGzZtgjVJlJJSRykYk5hiFpcGAx134SO7XuQNb\nWf98qaeSUMKEiOWll1467iZMMLth9mfJbv988mIaVQFjl0djy9NwSUWyxkXhhWhSXHtVRhaKjPkC\nSioE6IYSTImJjn4W8fbo01rRJFpFOHp0jGjEaI3ReEzvgCx6sgguHn2tN4ihMuCC2RRCOYp68sZg\nYg5EnKm58DYP4L/c6Vc5Tacn3nF+MzARYgmn3lPq+DgHbvFU+sMhxllijGCk6f9xVZVIHVscuVHI\nvKUMkU4mDIPgbZe8Dsvp5yLJqpzFtY2F67VaajVbW3STGmqxxzw2wtgMZ8dDM+vGKFQWUJcMU6Zi\nfmmJP7zwZZym3eZBur7ffSrerxPVI07FC7B27ojZej+Cj4goGuJ1+n9azbNNdMnGXlZDckgPNhDz\naYI9jT3xNDquYGAdi87QzyOqXVw0mEm3lGwCYpMExKriDdjQSUsVtmIwLPnUM9/Hdqaa2UY7/V4P\nJkos4dS9EGth+oKP0H3Y51nkPAwRo1ynamAwEZVAMB6jjo24vIaIEKhspLIwFRbxcZ5v/GQvtlpE\nQs1UiBTqmlIMrVLeXKIBFwNZVIJkmGiIsUdgCFdHPvGr72H7KDflKVwzZ72ZOLGEVjCPzk+z46F/\nzaF4HkTFqCHXTvIFHOmQBKIpmxHmibvEigPN8cxQeEO3JkXUuC38YHgatXbINcOKJcQmX2TLzUKF\nxg0LvNimumSkkprqB0v86++8k+10sOs8727vyQkVS2gvztE5j50/93YOxDuDGrz32MZB3cTGLUQi\n0QxO3AhTQkryYSrULoFEggFb5BSDAT9/zhQmmwGN1BOeXm6zkIx56YETyFbcx4yge0o+9Yp/YRsF\nwLrX9W6ZYLGEVjCPznmc/vC/YxCGaOaJscZIjaoisUjGH4Foe0Sz/hm8jUQcA5ScoNPUkrEYZihD\nJExZXvm1g0jX423ryrseGKkRInlUAk1SEIlEgSqWfOr3PsQOspTarl2jPCFMtFhCe6GOznmU3Quo\ngxBtk53C+GQVDY27iBqMKmrWN71bjBnEbrIaS0WhNR1TEbXi6spy5+1dtKzo6JCMwbp+96mGJZVJ\nVs2pJVter46qlGHIJ578PnZSrCrPu360998KEy+W0F6wo7HtgZ9j6mFfxm95MlU8DY9DXQ8rig3T\nKBlRLDZ0UwLhdfTFFDyWitwrxkMRIt1QUnR38IP5HG8sfZliyc4S1S5nuWlZO4aAly6eaQCi8cTo\nOTxc4Gy7g4uf8o/sWF5qWb9R/KluzDkSm6b3thfuaNyZqXv+dzq7nkaU04kBKvp408PFSKqhKJgw\ng9H1dS0axXQPzBSVyQmS8WNmWJw9m25IIyAlo1AhD+3K5XEhkSBZMzvwqCkxGlg4VHHBmffmLU/8\nC3aSr/u90d5rR2bNfVdErIhcJiIfal7fSkQ+LyKXi8i7RSRv9hfN68ub989dr8a2F/Fo3A6500uY\nucvvge+k2GlrCY0TuM+WCDaVphB1jeFnfRCNOKkRGWBtzVVyGtnMVtR4ZmKfqTiPmAWCLVlzgfBT\nHJVREg8QqVBTIkSyoeN5D382f/pz/5WdjErvto+gjeB4fuXfAb616vVrgNeq6m2BQ8Czm/3PBg41\n+1/bHLdutIJ5NM6FHU/iyupeZFYQDD4EQhRcsHhbJsEyPdZ9bUvBM8VAHIfn+/QqS20zBqZLjLMQ\npog6TWUssZ2OHxVtptOiab1SJZXWKPod6gPKM27ziGWrd7tGuXGs6ZcWkbOBRwNvaV4LcCHw3uaQ\ntwOPb7Yf17ymef+hss5XoL2gR2M3P/Xzr2L/zLPwoaY7M0WQiA05me9SWYMK2MpSZjcoTHuTiZLT\nNSVBIt/cu484XKDWDqXNmzroNUiJo8RMeCXEcRKbTEwmeiwpwCAYYL/lrHIHF7/o7zlN86a8RiuU\nG8la52J/DrwUmG1ebwfmVXXkk3IVsLvZ3g3sAVBVLyKHm+P3r/5AEXku8Nyb2nARYRJKYkwmF7Lj\nnjvg2jux+PWX0nEdiDXWT1EA/WLAbBnJgiWasOKbeROpQyTLI149dbaVPfUUZtaiBPIATitq26y9\n3czvOplRYyFGHH65OF0QQ/+HNf/lfk/mOfd7KgWmifNuhXKjOaZYishjgL2qeqmIPHi9vlhVLwIu\nar7jJqleK5hHQc6DM+7I7K5fZ/6SZ9Jd+ihIDxMinQqqTh8Xunjjk5UcOf5Su5KqKvqsQLXEaY5j\nhr7PKLTChT4RQXGYOJ6cm5sFbwzdUKMGPCCSIxT82xNez2nMYHA4UvLj9bZ6t6yNtdwd9wceKyJX\nAO8iTb9fB8yJyOiqnQ1c3WxfDdwCoHl/K3BgHdt8HdqLfTQykJyt93onV/Vux1B3IU6ahBvpCFFD\nFIuKHrdbUcQRdBo0w0YLQagwXDuzg6roMpCcoZmmNu1o8mgEKxA9IXjSY8tBz/JbD3kOu5gCH7F4\nNFUBGndzT1mOKZaq+gpVPVtVzwWeAnxSVZ8GXAw8sTns6cAHmu0PNq9p3v+knuDhXyuYRyNVgrzN\noz9Ads5TOVidRU1GjJHS+ZXEutFyPEkPDYEilmQMMNInmBopAm8fzLEvBJZU2dq1uMjxj1hPIYIE\nrEJXQbtdTJ1TXdnnXc94A7+44z5ARm4LJDqkLQUxVm5OL34Z8GIRuZy0JvnWZv9bge3N/hcDL795\nTVwb7YU/BrKLqTu8ktMf/ncsZfdnMZxOFE/pPMEEDJEsupRc+BioRKIIQWwqZWtSvsQqKA+d6VOY\nDlPDnGHfYdVgTjF3obVa+0OsoKrJY2ToLEt7h0zPC//xu+9mNzNM0wVMeoaZ1pdy3ExE3fCbumZ5\nJCbhfCYWBWIE+32ov8zej76Q2al9eBuIGpmtLZ4MxCJUR/6I691jQRxqBrjaQT3NJVtvzX+9+o4M\nPQzzPk6rNLLUYuKzjq8HKivlKgzxxhOISMRHTwyCdKfRfs3Tz38sL77DL1JiKegs9+XW6fyEs6a6\n4SedWEIrmGtCATkA8XIOfvC3cFv3kLFAHkvUCIMq0Mmun1XcrMrKnuKQRQ1iAqhhMdvOL32zy4Ez\nHwSVJ9NrKPV08pAR3EEAJBacjJimmqQ3BqtpOxhg2fl/ZT1YjNJHKELGbcJ2XvO0l7KLObrMEBDw\nAedSwVrTFhfbCNYkliflanFrJT82QTzWW3B3ZNvj383C599Mf/79WHsNMS7Q6WQEX6/KYJOs2aIs\nC6Y0AqDRApG6a7hlN/K90nNWXKQ2ClpQhAFKZGi6qE4j0hvLOZ8oDMkroLQWE0d1e9JvE2Mktw7f\n+JaGakDHdeEnFR940UVsx5LjUKapgQyIFoJGrJyUt+em5aRdeW+fokfH4AhuC0EVr6ez5V4vZfvD\n/4Gl6jyGYTc+AjYHRrHkzchIkjPQqAJQsqBHjBrmlmoIOacZiCGgpoNVjxpPv6jx7sarL25mrCoD\nZ6lNcvg3RKJ4QoyoDXgqkIiJiqssDz33wfyfF72D3UzTZQptkmRYQKixwroLZXs/3HxO6kdXO8K8\ncYQUeyy6FTGRoUYc57Ht4X/E4lc/zOEDn2GruwziIhpmMFlJlMjIYh6iYk3zSRKRaFiyHb7eF4ZT\ngbloGJSe2kb6WaQWRxYjXV2gEjvRyYBHcdlOj16BEpIxJ2hjhMHgoqfSimihMgarYFXwBzymUv7p\nt/+aOTKmmUbUECRdCxtJ6fWA9b4tW6FcH05qsYRWMI+Gg0Y1DWpSAbRgL2D27ucxyy8DP2Lfh5/B\nzk6AMCRK+h/EGIgexIAqEChNYH9WkE9vRX2PSMCYnGBqgjFonKFTe4Y2CdHIEDKJRh8VyEKk8FA6\nln1Sr48BlIhVMQa5nQAADVpJREFUyCNEIjZXlgYVql3iMDCtHe7SvQ0/87gH8aQzH0RGgcXiMdjm\nc0flQAJgW/egieWkF0toBfOoNPeSU4eXAY4CYhfinfHujux85Leh+ld0/wJ8+2WUOk2XATEMyGxO\nP0Cgh81BXMZ8McPs9FYWyz6lNczUA4gGLwVD69IoVGMzqkrmi2PV+94oTBNvFDVDJdLLUxG4IIbi\nesnmQ13iO+n2CZrG6UGUkoJONsfMHmGqk/POF7wRV0Wm8w74jKFLETo5LK9RYsATce2IcqI5JcQS\nWsE8JgJOu1QCmRkgkuGiAdnGMP8VOmctkJ31GPZ/9veYP3QpW9w+psolOjh6WmCM55CezrTbwqJf\nYqfvshgDRjMqk6bcTiMSV6zpjhT7HMWMP7mGREQDQSzBGKKmNplocKsMNgA+1rhORtcV9KsSUcOC\nDUgFW3uOdz/zL+jg2MZUMpA5YCBoN41Gczw0lu5RfXK3zpmYWqFcf05K16GjMQnnO8kokQE1XRq/\nSAUEekQyDBmLiFzFj774R+R7P8XcbB9nFgkUPOVLP81V23djpmpmKljqV9QzM6lkr0ayRhCjKKZx\n2IwYamOQxtJ+zPbJ2qfuRk2zznpsogmICuAIOAwRF2OTPBkqu7KWGeoBuXVYUxA7XYaHhixce5A/\n+7Xf5VFn3A8QCjqpmJxEXEjJL4JZKR2cfuvsRJT1boXy+Dl1/SyPxSSc88QSk2tlxGMwCAOIyVrb\nLNJRe8iyAcSrgGsJ//kCFvt7ePO+Gd47dQELocTZnNmhUpkAYnExogLBKMZaYgi4ZjFQpTF0rOGy\n+MbinIWji6CNefN91TE+12CcpYo1BjDRESUS1dNVEFWCiZTWQFVhXRfVgKrF76t5xgW/yKPv9TBu\nxRYMNiVdbiZsFZ4MTaGkYtKonbQMoeKIsI5elIlWKG8Sp66f5bFop+RHwSRvIGscaBpNRdPDagHq\n0HqJLJ8Buqi5HX1ux/QDLmOOD6Ff2sPMpz9OOD2n9o5y2kHo43wfbIE3Qo2S5Y6yV1FYh4sQQo2a\nVLxVxBw16sVpBFWMRKIe2RiS8mcO00RfAnKUjEpGwVclKkqGxWpFXyrc1BTO5fT6i2ndssyYqac5\ndM0iQ1fxz7/zem7L2eTRESrI8pS1fHUJ2gyTRpEClaRnjTRWc2iFcrNxSo4sR0zCuU8uESUgmi1b\ng+2NVA8Mwx4UXaLUGAqshwW3xAKev/rE2/mXL32Yqbmc4VRO0emgdUQt9OseZJYCiMFgRMgw9MuK\noliREhVDbTNcLImYNDWWCN6Q/JdWRpl1iEhm6Po0WrUky0wtjuv7eC7VfaaLjLKucDYnc44QAqrK\nTDmNsx36hw5ztzPvzKue8EK6KAVTFDgkWCpj8AKOSK4k74AjEFgllCdi3k0rlDeTdhq+Fibh/CeX\nuFwHBpYN50ekAnJqfFO7WiKUxiM4amr2cy1XMc9r3/56fnTtFeQzU8SdQsd2qf2AkkgkEmpPx2wB\n0hTaqsHEjH3DJU7LFeNyyjQcJXcdghekEcRq4MnzDjVDnHNJSzVQDzzBFMxkLMt9UE05JAuLxJzY\nq6gW+gwP9nnS/R7NYy58DLfgTKaAggKGJUXRaSpUmuVztoALw2UH/htFITTrp21d74mjFcu1Mgm/\nwWZHAVGPF5ecrDWmkV00RAVjYRAHYAoWmOeyhe9xly235SmvfwF3O+f2fOZbXySembIUWWwyzHSE\nnq3Zejij7ho6NlAr5FM5vcUFZrKt+N6ApWLAdNHBl8KgX7Fj21bmyz7GWaa9YFzG3sWS6S2QV47B\nvj7WdNCZjGfd/mHc4+735guXXsJv3O/JKMnXsRNzgnqCtUSE2nsKly3LnLDaWHPsEg/JGzXiWqGc\nRFqxPB4m4XfY9DTTzEAa0SERYsGwjnQKAxqoxJI3a6GBIUKHipIBNQbHJ/Z/lvvvuAc1wg/2Xs4z\n/+Yl3GfHHfmu38dcdBwcDpg6e4ZDV+5n2KvYtWs77Fdi9HC7nMGeiu232MLBQZ9tupWr9nyXX3/o\nr3DHu92Ni973Ri76lT+lMIYFDGcyTQ4MAENGh0hJpKgtZGl9VRCGMdAxFrc8n141nVdz9CH3CaQV\nynWjFcvjZRJ+i81OGkGR6sgQQDNUDErAaABxgEGJRGqiZmTNWl8J2CYfeJ2cdFAinoqcLiV95hmy\nnVn2cpiMKWYpWCIVXptC6AM7cdSN8E0PFelk9FGmKkOe5/TrHi5biceuafwfA2Br0CxV2aApheMD\n5JK8yR2rxHF8IZutUK4rrVjeFCbh99jsjARz9e08ilq5/i0emlXENKU1oH5ZqK5rCklrhRFQ73FG\n0txeR59Ok2zTQB2gcFSavjMIGFVijIQQyPNRve0I1HgsgsHGkD7zeiKoREoCBdK40q+/Jft4aIVy\n3Wldh24KrVvRzUcYZdBhuVKFvZH72y7nUU9/bWO1Tp8RCUGxdpVlHMVZ0xhamu8IKzkj1QlSOHw9\nIMsywCSDigjWWowxhBCazzSgBpHmGJPaccPzMTgiEl2TPGR8tEI5PiY39csYaTvkzUdu9MUNSVIY\nRz7vjLqlRlYJpWneE5A0wgysDCYxoG5lv8uKZRFejep1xRfJqJc/+8ZvB6fjH1e0/XK8jL8HTCjt\nCHOdWOP9LcuCNYqwTCO+Ubjl6L3UYSMWCM1odOTe2By6LHlHSkxhzA0FsVhLM2Xt53IiaIVy/LQj\ny6PQdtDxobAsUCOhXLFBjyJgIqszY6bt2GRwX3vXnvSr3PbDyaAVy2PQdtSN4/qDt1Eej5FIXteo\n0qxZahJHyyj124mJkBkXbf+bHNpp+Bpop+QbS8oOubJ945Znc52Rp6yKrjkZaIVysjh5etYJpu24\nG8valwjTuuWRjDmbmba/TR7tyPI4aEeYk8l6hxCOm1YoJ5OTq5dtAG1HbjmRtP1rcmnF8ibQduiW\nE0HbryabVixvIm3HbllP2v40+bRieTNoO3jLetD2o81BK5Y3k7ajt9wc2v6zeWjFch1oO3zLTaHt\nN5uLSXEdWgK+M+5G3AR2APthU3X85TZvIjZjm2FztnszthluXrtvuZaDJkUsv7OWfHKThohcstna\n3bZ549iM7d6MbYaNaXc7DW9paWlZA61YtrS0tKyBSRHLi8bdgJvIZmx32+aNYzO2ezO2GTag3RNR\ng6elpaVl0pmUkWVLS0vLRDN2sRSRR4jId0TkchF5+bjbM0JE3iYie0Xk66v2bRORj4nI95q/pzX7\nRUT+V3MOXxWRe4ypzbcQkYtF5Jsi8g0R+Z1N0u6OiHxBRL7StPvVzf5bicjnm/a9W0TyZn/RvL68\nef/ccbS7aYsVkctE5EObqM1XiMjXROTLInJJs2/S+8iciLxXRL4tIt8SkftueJtVdWz/SHldvw/c\nmlS19CvAncbZplVtexBwD+Drq/b9MfDyZvvlwGua7UcBHyalYLwA+PyY2nwmcI9mexb4LnCnTdBu\nAWaa7Qz4fNOefwCe0ux/I/D8Zvs3gTc2208B3j3GfvJi4O+BDzWvN0ObrwB2XG/fpPeRtwPPabZz\nYG6j2zyWi7XqB7gv8JFVr18BvGKcbbpe+869nlh+Bziz2T6T5B8K8CbgqUc6bszt/wDwsM3UbmAK\n+BJwH5KTsbt+XwE+Aty32XbNcTKGtp4NfAK4EPhQc3NOdJub7z+SWE5sHwG2Aj+8/u+10W0e9zR8\nN7Bn1eurmn2Tyi5VvabZ/gmwq9meuPNopnl3J43SJr7dzXT2y8Be4GOkGce8qvojtG253c37h4Ht\nG9tiAP4ceCkrZYK2M/lthlSJ46MicqmIPLfZN8l95FbAPuCvmyWPt4jINBvc5nGL5aZF0yNrIl0J\nRGQGeB/wQlVdWP3epLZbVYOq3o00Wrs3cIcxN+moiMhjgL2qeum423ITeICq3gN4JPACEXnQ6jcn\nsI840pLYG1T17kCPNO1eZiPaPG6xvBq4xarXZzf7JpVrReRMgObv3mb/xJyHiGQkoXyHqv5js3vi\n2z1CVeeBi0lT2DkRGYXkrm7bcrub97cCBza4qfcHHisiVwDvIk3FX8dktxkAVb26+bsXeD/p4TTJ\nfeQq4CpV/Xzz+r0k8dzQNo9bLL8I3K6xIOakhe8PjrlNR+ODwNOb7aeT1gRH+3+tscJdABxeNT3Y\nMEREgLcC31LVP1v11qS3e6eIzDXbXdI667dIovnE5rDrt3t0Pk8EPtmMLDYMVX2Fqp6tqueS+u0n\nVfVpTHCbAURkWkRmR9vAzwFfZ4L7iKr+BNgjIrdvdj0U+OaGt3kcC8zXW6R9FMlq+33g98fdnlXt\neidwDVCTnmzPJq0xfQL4HvBxYFtzrACvb87ha8D5Y2rzA0hTka8CX27+PWoTtPs84LKm3V8H/qDZ\nf2vgC8DlwHuAotnfaV5f3rx/6zH3lQezYg2f6DY37ftK8+8bo3tuE/SRuwGXNH3kn4DTNrrNbQRP\nS0tLyxoY9zS8paWlZVPQimVLS0vLGmjFsqWlpWUNtGLZ0tLSsgZasWxpaWlZA61YtrS0tKyBVixb\nWlpa1kArli0tLS1r4P8CwO8l7ZO6YfwAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "WjMdSDKlBcPh", - "colab_type": "text" - }, - "source": [ - "## Transform\n", - "This operation transforms the given image on the basis of the transform vector given by the user. " - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "HTh1Qpps8Rg5", - "colab_type": "code", - "outputId": "3badc7c5-ae57-44a8-b619-14ed7196663d", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 269 - } - }, - "source": [ - "transform = tfa.image.transform(google_img, [0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0])\n", - "_ = plt.imshow(transform)" - ], - "execution_count": 8, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAUsAAAD8CAYAAAD+D4bnAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsvXm8JVdZ9/t91lpVtYcz9Dxk7ETC\nJELAEFAQAQEBFdTPCwIKyFVRXwS5rwM4vCoOiHIN4sWL4icIvAyCijIFjAYVRQaTKJCRdEIgSSed\nTk/nnD1U1VrruX9UndOnQ0iapE/vs7vX9/PZ59SuXXvvtWv41bPWegZRVRKJRCJx75hJNyCRSCSm\ngSSWiUQicQwksUwkEoljIIllIpFIHANJLBOJROIYSGKZSCQSx8CaiKWIPFNErheR3SLy2rX4jkQi\nkTiRyPH2sxQRC3wZeDpwK/CfwAtV9Zrj+kWJRCJxAlkLy/JCYLeq3qSqFfBXwHPX4HsSiUTihOHW\n4DNPB25Z9fxW4HH39oYtW7borl271qApicSpw3g8BkBVERFEBFVdeX711VdPuIXrlrtUdet9bbQW\nYnlMiMjLgZcDnHXWWVx++eWTakoiMfXEGLn++usBEBFCCKgqWZatCObDH/7wCbdy3fLVY9loLbrh\ntwFnrnp+RrvuKFT1bap6gapesHXrfYp6IpE4BlSVGCPnnHMOO3fupK5rrLXEGCfdtKlnLcTyP4Hz\nROQcEcmBFwAfXoPvSSQSdyPGyBlnnEEIAWMMO3bsoKoqsiybdNOmnuMulqrqgZ8D/gG4FviAqqbB\nkkTiBGCtZTwes3fvXvI8Z8uWLagq1tpJN23qWZMxS1W9BLhkLT47kUh8Y6qqIoRAp9NhaWmJPXv2\n0O/3GQ6Hk27a1DOxCZ5EInH8yfOcpaUlrLV476mqirIscS5d6g+UtAcTiZOIZXehuq4JIdDtdokx\nUpblpJs29SSxTCROIqy19Pt9TjvtNMbjMbfccgsiQqfTmXTTpp6USCOROInw3rNz506uueYabr/9\nds455xzG43GyLI8DybJMJE4iyrJcsSSzLGNhYYGiKEi1th44ybJMJE4inHPs3r2b4XBIr9cjhICI\nJKf040ASy0TiJGMwGLBlyxa63S5lWeK9x3s/6WZNPUksE4mTiKIoMMawfft2du/ezdatWwkhkOf5\npJs29SSxTCROIgaDAd1uFxFhx44dAKkbfpxIYplInEQURQHAl7/8Zbz33HTTTSuZhxIPjCSWicRJ\nhLUW5xznnXceS0tLnHfeeZRlmSzL40ASy0TiJEJEGI/HRyX+TUJ5fEhimUicRBhjUFWuvvpqtm3b\nxo033kiMMWUdOg4ksUwkTiLG4zGqyjnnnMOGDRvodrucf/75yXXoOJDEMpE4ifDeIyI457jtttuY\nn59nNBoxOzs76aZNPUksE4mTiJmZGUIIHDx4kPn5eVSVG264IeWzPA6k2PBE4iSiLEtOP/10VJWy\nLLHWkmVZGrM8DiSxTCROIjqdDgcOHGA4HJJlGc458jynrutJN23qSWKZSJxEeO+JMfLgBz+YPM9Z\nWFhgz549ybI8DqQxy0TiJCLGSIwRVeW6665bEclkWT5wklgmEicRqkoIga997WsYY9izZ8+K72Xi\ngZG64YnEScRyZUcRWemSxxhT1qHjQLIsE4mTCGMMVVWtWJIigrWWEMKEWzb9JMsykTgJMMbwsIc9\nbNLNOKlJlmUikUgcA0ksE4lE4hhIYplIJBLHQBLLRCKROAaSWCYSicQxkMQykUgkjoEklolEInEM\nJLFMJBKJYyCJZSKRSBwDSSwTiUTiGEhimUgkEsdAEstEIpE4BpJYJhKJxDFwn2IpIm8XkTtF5KpV\n6zaJyD+KyA3t/43tehGRPxGR3SLyRRF5zFo2PpFIJE4Ux2JZvgN45t3WvRa4TFXPAy5rnwM8Cziv\nfbwceOvxaWYikUhMlvsUS1X9FHDgbqufC7yzXX4n8IOr1r9LGz4LbBCRncersYlEIjEp7u+Y5XZV\nvb1dvgPY3i6fDtyyartb23Vfh4i8XEQuF5HL9+3bdz+bkUgkEieGBzzBo03++m+6GpKqvk1VL1DV\nC7Zu3fpAm5FIJBJryv0Vy73L3ev2/53t+tuAM1dtd0a7LpFIJKaa+yuWHwZe2i6/FPjQqvUvaWfF\nHw8cXtVdTyQSianlPguWicj7gCcDW0TkVuA3gTcAHxCRnwC+Cjy/3fwS4NnAbmAIvGwN2pxIJBIn\nnPsUS1V94Td46XvuYVsFXvFAG3Vq4tv/5m7/aUeEPeAIsvyqRzAQDRDANMPGqoKIbd4T24+RCIya\n7fCgFcgYWILBfsL+mzh45zUcvvM6Om4/6BijC/TYh5FIZmqsiYgGREFVKVFEaNpEFzV9xuUMxA4j\n6VPGnKy3hXO+9Smw4VzonA5xBijA9IAOxBw0w2vEuognYMgwGGIEK4B4wKCYlR8kCsiRfaY45Hgf\njkTibqRSuOuGexkRkeXXa6y2CihCwKAGrILiUQIqOUEjyBCxQxx188njfXDwJu68/p8JS1eRm69Q\n9A5CrMgkMq/Klp6AVzRCFAfFHDFWxKioatsMQRC6ZCCRQMDKgBiXsN0DCBlzVaDIlSiR4VV/R6CD\nyhzjUUalM2RbH8f2XY+F7Y8Gsw3nZ4BZMu+JbgB0wHTwRBqBb24EFWAoycRi1YG6Zt/I8l0hkVg7\nklhOAQoEDBbTWHOt/4GVGqUiSoFGi4sWCDi5A/xXGe+5kr03XIKJe3D5VzEyoCeQzQCRIxZa+78G\norVgHMY4Yr2EiGAMaIyoSLOpCDZaNII1EaOBIOCoQCqCo5HoKBAhMyXEO+n2LBoFGdxMfe17qa5x\nLIY5hnEXp33Lj9I593EYNgB9bOzjzUYEh1UgRpwpQbutQA4IUqN0gQLb/pREYq1IYjkFCBFHIJA1\nNpaA4LFkiGZY9oIZwHA3C7s/hr/jI3R1P10T2CEjpCMEH4gB0BzjMkKo0WgwVogSUXxjoYmC8Wj0\niCioAVVohRKabnhN1QqtIMYhddP9N8ahDCBGRCyZWIiGqJ7oA3meEzQQVbEyZkNesiHso7jlv6hu\n6rE0PI8q38WOb/s+3JnfAcyBzILMQOg2BqRGoI8VD9QoJUJGsi4Ta0kSy3XD0Re6akBVMaZdHw3W\njJrtNAOtweyBweUMP/vXDOormJm/FR2P6BcWUUeFBy2anqwtESOgSsQjTkECimkMNTWAw6hphgmJ\nBFu3Xdyvt9miiQDtGCbgBFQIqljtIwTUVGADkYDRDLTAE1BXgYJgidGhBkZ4QmcR07uSWX855XUf\nYvyFcynzR7DtcS+C+YeA2YIyB3TRAAYH4tqx03tsZiJx3EhiuU4REUSEGCOyPIERb4Hhv7Hn8xdT\nxK9Q6GFyA8aMmc3AVR3EO8QpKp4ogDYWoyGiAqgiGttvOXL4owFRTxRtur1EVAzNm9pJFWjFU0Ht\n8jtBAkIAqZs1aprXtQMqqASQCswSVsGOZsBAsEqQCBIRNRQhtuJrUAkU3evJ9HqqL15KCID0udOf\nwdnf9fOIfTzB7iKoI4cklIk1J4nlOqaxLCGMbsQuvo+br7iYbfkim6sRVgzBRkpfYk2Os0pdjejm\nfYKHYDyWshG5aDDqWD7coh5MCTSWZRRp9E9MK1QAse3uwhGrN660bWWiadXnLtNYpAEYISpYNUjI\nMWqIEhl1R8uT9+24qaICUcGqRRFULCGO6HRyRqNFnMnInOFscxOjf/o5xrKJzs7voHv+T1LrU3Ak\nvUysLUksTySxGQKsgAIPClGExnasIRYrky0giPkih7/4R5RfvYyZ/oidxQijEZMZQDAChbWINt1r\nZzPq4JsxTY2ABTWrOvitq43QvEYzBwPL3fDl5eWlrxdJ2u1W1tzDTLREi0pcEa8IGIkEiURpvJyW\nv0OONAigcY0SASK5yYh1oLBZ8zmhQmKkyD25LDG+ax8HL/00G067AHnELwGPwNNnAMyMB5hOnxrI\nKUEzVAyBI99rj7TuHg9XIrGaJJYnEvGIQoFDxS3PpWAM1DZS1oG8iOR8Ee78O6rPv5tep8TOL5HV\n5crHRIk0krsscqvE7BsJm6zeBlYEYlVUv+jd3n/MfP17VosvLLe5/eZ4X+L09b8BwGgkmLhiQkoY\n0LeB8o5LKfd8Gpl7CHMX/i7z4XyGRUVXO2i0YCNeDELEsizipv1sk0zSxDGRxPIE4gUExS47lEvj\nJykGMs3Jii8w/tIfc/DAZ5mx+8iLRXwNnU5AySbd/IkTjxI1Jc8dMUY01MzHO/GjRfZf9nw2dLfS\ne8K7qOhhrMfXXWIGuQIEkGYAAjHt+Gwicd+k/scJRVGEgGsiUiQifj/cdSP/8Z3fzh2v+R46hz5B\nL96MGwXGOMpeoKo6k274uqSqa0QCyJhDec5QHbPZfobVV3nje3+Ax775+3jbFe9hmO0np6YS8FKA\nFtiYYYBxsioTx0gSyxOIRXAYbAUyrGBpgY+/8Ee58fnP4/xihuqLO+CrHaJXDlNj8oq+n6Pn7/uz\nTwVU2ke7nFkDGnHWUoQxmMOIKGq38f76Qjpnb+Yvr/s4T3zT83j3jf/AmGplMn9cN139DmnnJo6N\nJJYnEFEHpYLczn/87s9y7XO/l28/sI8tnR4DGTKr27j2PTVdHkq/GKOVoaoXidKddNPXCavHMoU2\nlAeAohBMDQsV/P7egrobUSrKHvTPPo83fe49vOGyP2efWWRASacwRF8RQ7jHb0ok7k4SyxOJLyEc\n4kPnfwePuOKLbOyNKbuR8dgzdhX1eIFNS9u47v+9niw8kpAZYidjFA9PuuXrhNWuTO0EjRqMGsy4\ng5s7jX/tPZpLwjZiBr1xZHPpkXI/SMmld32G733j8/jM8EoWWaAygWFI/fDEsZHE8n6ixCMTye2C\nZ3XaB0/jKwQED9USB9/3p1z6pCfw1E2nIUOH7FP6pstQBzgtWPIRU8ywcfxo9r5/DzbUZFbodzrQ\n5OJpvkgieoomj5B2H+jdsjOV4tjtzuDNN4DpbmMu9qiZY1QHDmVjxCg2LNA5M+d/f+winvPn/5O9\nxtNzrvExJaJEaI/r6nmf+1UKIHHSIU1WtclywQUX6OWXXz7pZhwzjSukby8g08xsK006MTWNQ/ZI\nqK0j83u54qd+mk37DzJPIPo2yiVGRI5YNapCML6Jp9aSSoSdzxPirv8m5F20NhgzABWiyds2RKze\nP2efaUTFkIeS0mUEU2NrobB9hjZwaXgQf3DTDIdndrEtViy6QG0sPR9ALbUxZNETjBJzR267dIeG\n7cMub3zZ69lBl+gt1mUrN6WIudfb0bTZpKvPt8RRXKGqF9zXRqeeafJA0cZlcTmTohAJxHZlIA4P\nQJURMkemAz79xKdy1p5bmfVjfFUSQiDGeCTmu8ViseKJ1FSi5PT40vsPYfRCMlcSbUkgJ9omuYWo\nAalPGaFsiES6QMAqOOeolwz/nZ3G628URhu/hU1E8EuIWmwElQpDTRO02TjEO2cZhjH74gK3d5d4\n2ut/gE/V1zOolrCxuQktby/f4JE49Uhi+c3SXi3L4XWBJh1ZhQHJoDeL5kPsTf/FPzzm4Ty438VH\nkC6IBWubuJG7W/TBjhG1uKgQ5igrz+n2NP7zjVfD4hzOeTT2iSpgRgge1Lbd8VMDpzWltZho2zSW\ngs5Z/vT6DmHbYzBlSR33M+hZVAuMOirpUq2aIBOgGowYjQZ0ZgvuigtseOgZ/Mo7XsdvXPbHLJoB\nHkcTD990y1ceqT9+SpPE8v4gQFQqhBpHRxuH54E1mDpy8+t/ixtf+eOcv6lH3TWEDjAMmChNEt17\n6A5Vpomz7vgOORHrFHzgdHcOX3rPYVy1iSK/m5uLdlBz6szmmrxDbj0mBsxIGFvPtd1N3NZ/FGZY\n0ZclQq4MTBcXAanwxgCWIG1go0KOYc7lxNGYIssYhYDMw5eWruZJf/g8lhg38VGr70OqxFhP4mcn\n1glJLO8XHoyQYXC+tTiqkv6B27n0Wc8g+/RnOM326HbnEXXkAZwG7nV0Mc42Y57iMYxBPEEMzhfM\nH3wk4aptiF/EWQuxi9F8VQz3qcFwVFPWY6wIxuUsbHwQr74mZ+QdamoqGwn0KapZci3BDHGMKOII\niAQDEcGq4IKQBcHFSA5UEQY20nnQDM9564tZ5DC1FYZ12Q6zLAeznzqWfOJoklh+kyg0XW51iDc4\nG4FDcPV/cPP3PYvH6hIzFSwuBnwM5CFiY87ICvfmpdL3JYpjqQjUxuGCQ82IKIZu2WfPRzMYPJp6\nIKipiFJhpELiqRMGaU1GZuHOrM+NGx/Gj362z8EtF5LZBVBDIMMGi9OKYIdIzLChQEWxGoliqK2h\nFtMk1WgfEmpEQciofaTeLnz3xS/h6Rc9j2FWU6FNAo679QhOsXvVKU8Sy3vgyEXQTt4sr9Rm8D+P\nhlLgsIsQFrnzjX/Cdb/wGuZmDN70GbuSQT5i4D3BVpTZGCXn3kLxMw6DZtT0CCYiMWtm3O0AEOZk\nls9ffCvduV2YVbkjmxRn8YjFs3p52pAmN6au1Bxq08ZJExqaSRMfLhu28urPLDHqnUZ++HYG1RJF\niOTBIFLj3YjKWCT2IfYpTbYSAx4xxFYkozSF0IIacidoGCMeykWlXzj2n13xtF/5Ye7icOMBpkfv\n13tIidy0mSSkJyNJLO+BCK3vXU2gZjn/QrO6BvEUCvOLB7nsZS9h4cPv4XRbE20PtMJWMEOXjs2Q\n6Mi8w8W4Kunu1zMyfYSanq9xwRCch9jF1X1gTBnHnDU4lz1/fQuZOYsY+/gI0dQYAkYVQ8RoU7FH\nTWhEZqpQMLENaXS4oGgUxsYSiYwF9ts5/uiLngM7HgUuo7AWio2ULlJLBrHABYPEAvCIDDAaGzeg\nCC5GjDbHwiw/jMFHAZOBBdtRYiZ0fM74sV2+/80/xogaY3MUgxKbRBzAkQkg3+b/9MTVN9nEScO0\nXU0nBKtN7kUhI9eMkkjlmtRgEguoDdS386kX/QiP2LvATHcjhi4LYbQ2DVKDDwF0wPiahxP+q6Kw\nIIXFhS5RCyIZkZxIBtjW4pyuCYlIRq1dihCoLIjx5IxxsYLYpWfG/NPwwfxbfj6+UoauYGRmKOJo\nxVn9eLNt2GfunJ384Ft+ggEL1FREDKK6Ukhu+YE0kUWW5VyZiZOJJJb3gJcaS2hrLRg6BAw1Q3xb\npKvmY0/6bi7Qw9jyIN2YccAHtrq1i+Euen2Mydgmm7jxwxvh8GZC6VGxRyeYwKGtY5PodHXJjRpc\nyCitYLVimMGSM3S9oaBiqX8ab7u1zyj0qbtdAsVKvkuzRv3eBY2MFpdY2DjgGRf9KHdwiEEYYZeH\nCY60vhHMlnvrRSSmkySW94CloMQSTJPNfFBZLAW9egyHb+OTT34c5+c5vqqpO5aBK+lYwZfjNWqR\nIZYBCJT1mC1hA7f9ySa6u3fi8zExlFhX4RkRZdSUetCm+Ng0YahwsgDqmnrnugEXewxdzY3ZJl5w\n5Tbob8LZg8yVB3GtK09tCoKsTWpW24loruTGUp/peOHfvJpf+cc/YFFGTXZ6BRvAxsaaPCKY07Xv\nE/dNOqL3gHgQDAYliCfPDBIqGBzmc095FhfOGGw+w2DJYkNGNJ4yq4hrNUYoHiMj0IzKBVSUDapc\n/YEBnTBHx2b4WskcWLc8htb4FzJlghkEaldjQ0Y+WkDiAMlnefMdW9gzey5jq3j6lCanGwcUwWM0\nNE76a4CLgSJEBlkjxoP6ILurr/Hy9/xvlmRA0DF+sNjscl227lP29ZOR6bqSThQCeYQRgkXJwggO\n3cEnvuvpPHTbDEM/pktGJ28SXAC4APWaTqgY0AJhjDclgwCbi9OJV+4E3UbmM4jgq9Xvma6uYBRQ\nLDYU1K4mdgtyzXnH0llcac4ls5FARm0MQztDFsC7cnUJn+OOVagMZKEtCSwZg2rIzYOvsB/P0Ch1\nrsRyuHaNSKwLkljeA2o9mEgXR4nA4QP82/c+h28/bSMDxhSdTSxpSVUErHpmS0MectZsd6ojxhlE\nRmSxsb78xjmwPW744EGqfQ4T5nHRkVlpxykbN5xpG7PMQ9MdH5k+Tkuu2PwdfHjvDoYRggiFDppR\n2RgZuwwXmpDGuEYVUkY2QwWM1BiFgh7DKlBuCLzw9f8XCwS0U2D6nWRNnuScumK5nAxDW68gBbRN\nu6Y1flghHgpqrvqR5/OdWztUo0NkY0tVVWQCEhqXlCAGldiWh10jpKY5XA4nDjMcY8rIlvxMrvmL\nA9DZgVAQbYEnNHXDNVt33XAVyGJs/R4NWWjisMd5TTQ1JYZSc3Rk2LflPH7nygMcyHaSS97MrWmO\nixGnTRIRbyxZ9Gs2G241YqIhtkMaUSLWNoM02YOFp/3RD7GfceNg5iNOPeAZx5SB/WRjfV1JJ5DK\neFybLchqE2ZYSkQUAl2qIkB9C599zHeys2vYOxyRZ33qfg40zuDLEXDLccdr1+1dTuTQJA8TbSYU\nRJv64Gd2Hsq1v70HsoeSa0EeQTRfl36WKpEgjtLkLBWeMh+RRU+v7GNClyp48npEZ9byPy7fxt7Z\nc1FzkGZAsBUtYPUeb/wm16a9K5+7ctNpjoWoIeZdzjx3By/4wCt5y+X/hwP+IOMgVCp0zPrb94kH\nxil8RA0QCASQJpytoInMqYFeOeRvLnwC586XmIVFutkcVRRG48GE2300ooZqybM1P4+r3ng9MIeL\nWZORR+p1l5VIFCIOGy1Ow0oEYcTiTaTbFfK5Lh/cN8fS7FmMXYcyy9DW6X49US7U7PcjbF7xNzf9\nI59auArjLHHs8VWK4TnZOGXFMleDRwhAhTS+ibWhItKt93PZc1/Ik7ZuZ1gPGWYwip6uQs+sL3fj\npmNuqXxkfnQuX33vEmgHI2UjlOss20aTJzKSx5rZEnIPlcnxdkR0JSM1XCcP46LD52HqRWwYEcjw\n6yBJ9d2pTU2oPaPxgEVd5LUffRORAd1OgeSnTsz+qcIpK5bNQKXBYFEctvW2md2/hxt//VfZ5Q9j\ngzBfbKXKHcbWjIua2Xp9jeJHQGzEmhqbdeHGs6F7OqbXWZ+T4WoIxhNtCQq1yamzjK7UuAHctfmR\n/PpVHWp3GrlEOlHoxiEinTWbxLm/xAJ64rB0sdaRzcPv/NObqaQk0GTDB6iq6t4/KDEVnLpiacFp\nk9piOZEvtoT/+Dzm8s+yIVPEZETbZ6aC0gVMhNKts10mkSiBKAHxho6b5/p37oZBRuFzbFxfljBA\nMAFvIqXLGNgMiSVUhnrHQ/n9zy5xa/8M5rMlxrbARtrclOtP+fOo1DauFE7r1ZF/uOU/+YfBldSU\nGGMIIZDn+aSbmjgOrLMr/8RRSgQNEJsZ8NIEuHM/X3nT65hVwduMwzLGaYkChc/IQgeJ68u6QQ3R\n5JjYJQslrjrE5lsfypUXDRB7BmGdTfJEA3mM5KXF+S5FHNOrSq7f8GB+6IrIF7c/CZU7WapLIobK\nZKBdvIFo1pdgOt+4LNWupBsCIWQUmzfyOx96Iy9418uBJjN+XU9XjH7inllfV9IJxQARTImLUCyN\n+fizn8asdxjpoD7QtRYfA2CwEeJKVcX1hMFGh5GAkYhBiCVsl/O4+X01uZkH28FYJURHDA6hRlWp\nRagweHVUwYCxED2iAUNENCCq0MabK66NQ48EamI7gSTSZoBXD9qs99RE8URfY4JgQpNbkpEwECFk\nisQRmfaIHfh/rl1iccMjGVeLGDND1zgcI4KtGWeesM6EEkClcVkSFYII4oRyvIRYw758wF/c/AFK\nhgQyIEIcgdaUk2544n5xn2aSiJwJvAvYTuON+DZVfbOIbALeD+wCbgaer6oHpamZ8Gbg2cAQ+HFV\nvXJtmn//sdB0n4ZAtsClP/o8HrVlE93MMRwOmzFMWHHVWX4W19eQZRObSQTNCVo0juh2TDcWyA1C\nfc0Q96BDLIlSdBxmXOK0Ty0R9SO6hSWqw/tIjIpkGd4rtfbZFzaBdnB2I53uZpSscZAXIYwdGkuo\nBxAPU8gCmb0TJ2N6cYnCGqooROcYE8hyj4YKLzAXhUoNuBynQ3Zv/XauunUrA1Fs9AQyotG2UiZk\nATqx8WVdT5IZZdll6cg8vWjTRiPwlo+8g6e/8hmclvWa32IMSspINK0cS5/SA7+gqleKyCxwhYj8\nI/DjwGWq+gYReS3wWuA1wLOA89rH44C3tv/XFeNygRlmwOUs/N07uLAY4Uq4bWEvs0UfF6bD6FbA\nW8UEg1GHKBhrKceBmc4WbnnnbZz7Sw+nf+Y1jBcX6VjLoah0+zW9BUMY9dk/mEFmzqO3/XxmHvIs\n6O6iS585tRBMk+cR2mJty+UxtF2hoG02JkZAgIXbGOy5mkN7r0QXP8VcdoBeHqirin5vhmEZUTek\nE4TSbuJ3/qOEYpZiEKDT+lOKIUoTbpiFJg9lZZmqKJmZHfM88Tefy02v+yeCZHgKAAqNbTq3xDRx\nn2KpqrcDt7fLiyJyLXA68Fzgye1m7wT+hUYsnwu8S5vyhZ8VkQ0isrP9nHXDTJE1A2hLB7nhTy5i\n12yBWzJ0Z/qNQEwJUQy1OLpUZIyJYhhoRlbkDKrD9OZ28vk3f5ELf7Ui61q82UxYVA6MHsX2x/4s\nbtND2e5mgQxMDuREcoQ2YsY01RSa+OtI40YQV8232BUBazI8RmRuK925b6X/kOcDSxD2wm2Xc+jL\nn8LfdS0b5q/HYBnnkT9dfBA397ZSmv0YN0tRNeU3VJrwRyVS2eYh6ywa6d6IwJiand96Gkss0mMD\nYJoLTkMSyynkm5qtEJFdwKOBzwHbVwngHTTddGiE9JZVb7u1XXeUWIrIy4GXA5x11lnfZLOPA7EL\nh+/gqmc+m/O2bWQw9pjMUPi1iwZZC2yEviplNSLrOMJ4yIZinqXBgHy2izpL5s9k760vZvtPPQc4\nj9l4Js7UCKHJkENGcyos18tuL2QZIOIAi7TlYZuEHjA2HsEggG3fsbzfVLImbFRMM0LgHkR99hOY\nOfsVWKCIB8Fczfs+/wn+9qqrCS6Qz29iaVBCzzXBMsFTtNnpKxuobCQLGSZOj8iEuiKaMd/9lhdz\n2c+9lw3M4wKpHz6lHLNYisiX46p1AAAgAElEQVQM8LfAq1V1YXU5V1VVkW9OYlT1bcDbAC644IIT\nL08a+cjTv4unzG3mwNiTAaU1axvfvQYYIjoakXf6jNWgheNQLAnzm/jKgZIn/PJL2PqMZzDun96Y\nO+USeW4pseQsIcsCuJq2h6302xVt2YTWqkSgo02iY5UjWyxnZreaYWkmQBoj3ZBhyKNre+7zwPl8\n/4WP50kXRg4w5CV/+DNs3GYI2mccPYGKAk+boB7RdeaFcAzM9WYZ1wOKXTM8+3Uv4FO/+deo7WBC\nILPJaX3aOKbbtIhkNEL5HlX9YLt6r4jsbF/fCdzZrr8NOHPV289o160vBnt52pm7OOyXcGWNU49l\n+pyHo4D0uwzwaNEhMsNet5Vz/uztPOHfP0H1Qy+i7G+hoxFkETrN+wQIZARsWxaB9hEI4gncfTJl\n2QItgO6RAm5tETcDKBmKXRFQUYeLBqcRoZnRLiU23f16Az06nE7OQ9jCZ375nbzrx/+Msw7PYG8b\nkWtOaQ2VNSgWE9dfUpB7x3DwwCI+QChHdB6xiT/+14tZYISznUk3LnE/uM+zr53dvhi4VlUvWvXS\nh4GXtssvBT60av1LpOHxwOGJjVcq6EpBqbhccwz8iMt+7H9wqKrI8xwnvrmoY2izi5/4i1Lbqox3\n//bl50bNSvmII+tju9ynR4cv3PgVzvjFn+exl3yQ+swzwc6RR0MRCrwYSplFZQaMJ481jgKrDqvN\nRIppI5rMylJsXGNoYtCPPMAbTzAebWfjhYjTiIsGoSZI3YiuoU3o0dSmKTAQl5rCYNpk5slj8xvO\nZo6Lf/IP+MDP/xln+o1ILCiNZWQd3qwMDhANR0ppyHr1f4v05vtEhHpcMgoD/ur6yxhTUlXN79bl\nP8rRVUQT65JjOc+eALwYeKqI/Hf7eDbwBuDpInID8LT2OcAlwE3AbuAvgP95/Jt9DLQnYU1ACSh1\nYzfGANddwRmLY9x41LrC5HgxeMmIy7kgTzDL3yttBp0jB6atRhgMah1BINQl9XiAiEKW85nbbmfb\nK1/Fs678L/je5+DtLBlt1EhbsdDR2IQCIO5uM9zNo/lnVsYiV0n1Udsh4HBYXDuW2Y5nimkbnmHJ\nsDhs+3msvg3YmWZR2vcaaLKB9uiELuewhb960Zt41YUvYeHaA8znXUK9hNYlNhdG1QhvI8GsR7/X\nBpWIj2OstfRNTm4UNy+86KKfpsxr6lA1N+8AaKAJkPRJLNcxousgQcEFF1ygl19++XH9zKbyHjg8\nAbDe4VnCHb6LL/zQj3B6blANqCrOOUII9/GJa4vVpgRsxDTWGKwk8RVtLKjgM2wGSoUpDdf5jCdf\ncgnMFVQI0EE8ZGLARioi2YpYTQHtqTikcXTvao1KzV3UvOETf8Glt36a2bmckgqtK2bpNFnTXY5T\nv66KhBkiQsBL4y4UtGQcambiLPVXlvjML78fKfoIQuEDiBBsY92vlXfU6nmGxFFcoaoX3NdGU3IV\nffNIk/QLxTU/UsBFzz8+4+nssLKS5ADA+/WQqLWxvFYLpY20SXIhGKXrAjMjGC0U5C96MU/+5CUw\nMwdlnzz2cNGQ2SN5F6fy4NaRHoYMoZKcSJctzPB7T38F//yTf8HC3iHW9un25xg7JZjY1t9ZP0K5\nTOP6BIHGo6AwjhFLdB42R11YHIHK10Rrm7EF1uOvSCwzldfTsSLtiKWGQGmBSz7J+Ztnm4QZbe1U\nEVkXd1yVJonwip1feayPZMFQ4OioIRtmXOHhWy79EJte/tNU+VzT9W3nPryBSiLaplBy6qbHqqTt\nuuYRNOJ8M50U2iR0HXpsCDN88pXvxtxSsziCUmFhOGBUHlrpJawXYns8lyO+HI4ejm7uGMuQf1/8\nEkLNrLN4AVzjUbTecnYmjjA9V9L9IUZsAGMziuoQn/qt38bXJaFw6y5sMdIW7DLNZEkhlo4tGAwr\n6qAMRp6l5z2PJ//LZejsBgKOXJtxQm88NR6oMe1UgWLWtJDXWtA4KCmYZrJLYiOYNVBZcNJlMzN8\n5NV/zo+c8zRG+wMbNm4lt+vwNFaDN7YpS9EKoI3NZJYQeeUbX8OAEggI2ggmUxWgdMqxDs+y44MC\nwViIChEOv/VP2bnJ0cvnqUfrz0XILE/kaGP0hhCoRdD5WfY4x42n7+K0V76CoekgFE33XCJok+Yi\nV8iDxUWLbS/PwHTNF1g1dLAohmCB9saR0ea1N2BxzIxyXvW45/H3r/j/OHz9fmZlFoddSeixHixM\nbcOf8gCCby1N17pBOU575BlcfMUHGePJVJYDRxPrmKkXy298ghlqYJwpjGoOfPyDOD9kWBs6PltJ\nlHEiMXrE/UUlHuUKJGpaFx0aq8pZFnod9uzYxvnvez9PfM/fg1p6bnlMs0lq4SWyEoFjDGoan0kL\nzaTHtHXromlrti9HC5nGLYmIaIUHJOswK/OcPZjlM7/8Hnr7+0jM27Hp5d/rUGmt6wnOmDdO9YHa\nRkYO6jYialAf5p1fuoQr482gNTY2/YKVekuJdcdUi2UTiXy0v9ryogCFCh1GcOPVzPlI1p1DXYVM\nKBfrsp+iSuMqFIwnSsSowWU9Qu3IgqMcjZFeQf7EJ/G4P38345kzyI2AbVxtVrpqwvL01VHuPyvR\ndLIcojglNLGWK4tHFhrhNOQ4hY5rfmG3P8+cbOITr3gHT9z+aGwpxBjweBRLIGtvRife06HpITQC\niVoK3yQDgYiJYPMOthv4mbf8L5bMQhsxBcm+XL9M0ZX09SxfW/c0ztMIpoW65r0vfSFRC6x6suCO\ndGFPMN5GBlkz/pYHQ682TR1w4xmZElvUzGwo2NDfzOf2Dtjx6leBK+jkU32Yji93O9ghBnzwvO6p\nP8vLvufFZFWXuq4QWaIXRrhg8NKdTFvvBdFIpyjYfvpOrmUfsW6STGs9WRe2xDdm6q9C+QZPIqBV\nDdd9mSdtnEHJVkoUSFsG4ERjtY3S0UYsbTTUBrwNxHKRaD37Dh7m0+MRz/63T4LZBM6tW8frE87d\nhLL2NdZYiJEePV52+vfyKz/8auQwYErUjJsNdf3FYVdVxXg84uDoED9z0a9R502RX3H5lIV1njpM\n/1FZ7rXc7UIygEjkoz/zcjY7bSNkHN74lddPNC56Oj7S8U0LRs4wds0seN/lqC+4cW4Lz/zkx6k6\n82Cb9BVTN+54gshcRojLLkMGFwt+oP8YLvrp38eGgtIK3npcXH/WWq/bxWukDBVs7XAzB3FENJKm\nxNcp0y+WK7kUObIQIuJr6o/+LU+Zm2UgBkNJ1KIpUTChk3GYmTZXY2SYRbyJzGiHzoJy+7jHjov/\nku/+m48Rys3k6ghSY6k5GQ7T8UDv9hDAGUsnL5AwJphIkIJnuPN5w/f9JnHQw4khmOFE231PhCog\neUanm+OywDP/8GUEypS+bR0z9Vfhqnkd6rpGYwQfYLDEZa9/HaNxRcS1/m6+SfUlk4nYCW1CidpE\nvPWYqkIXxuTbTuchv/XrsH0XZVVju81hsc1Ubhryvy8UsHkz4SegCt8182284Yd/Ca1yZm0x6RZ+\nHRGDCwajEE3Fhl2bWWRMmglfv0y9WMKR0yvLMkIIiMvgjjt43EyHBZsh49BMFsuI3BcYlrvlJ5Zu\nbejVYFqJz6MyNzPHFYN98KSngOtT9DPGUlIKoG79VZNcp9Q02ZA6eMR4LBnfOfut/Nj5z2KxWn+3\nmyitD6ZCFMVJzZ64Hx/WQ+ht4p6YerEUBUuNEAlaIRaox1z2Cz9FFEe/9fLWNjOOUBOxExlE12Dx\nviLmdZO4ozfLlw4c5Cnv/xDBFW0XLCMja7Iyr7gDTf1hOi7cLfHRUS9kgJUj2Y1qjXTp8hMPfwGP\nGZyDyTr4UGJdkzJOyamtIdPJjGdmWjJyhto4jArGKq/6k9+gso17UUVEqUE9niPucT5ZnhNj6q/C\nsGwhtqF/1mRQl2w6dIhoLBIqMG3AmbZOvxOabbQW/EyXzGdkalik4IK3Xwz9uVUHoskoaeEeVCHx\nDVkxHg2KwUnjVJar5Y9+8repv7wfi2FUDnGxSe3W8Y0b1yQwGgmmsTBthKoas+DGDO8hKUiSx/XB\nVItl05kNjWuIgMbmill817s4vdNbFwkyVlMyovKBaix0zCz28Y+HhzwUin6a8X6g3INbkSAYMXSx\nfPS176Cbb6DXmUElUgSP1clN9kVpxiuNRgxKHSrMfMb/+dz7qZqiw4ha7l75ZX2d0acWUy2WEXAa\n8W30g4uwUB3mjvf8JWggxogx5qh0bBPFwsYso9SCDT//Snb82i+jRY+q8mkS5zgiNG5FjW2p+HHN\nzriRlz7mhwjjHG8MlprKLs+pn3gUwcUmuzxAnucsmCHvvuIjHGbU3DzDkWQo0oZB2um+ZKeaqd7z\npv1rabsq1jBnhbksIsasJFYwZp38zFIZDBc5uGMLPPOp1Pk8opE81zQueZyRlf9Ct9MlRMcPn/Nd\nuP2B2e48C0RCXRLNZMQythNSTZk/QaMl62TIxpx/vu0zjKmhasYrLU3aumRWTpapvkJFIUiGaERb\n922+cgv5hi5xwpnP74loLKodHv3Ot1K5TRSASk0alVobVg/5VjGwSeb54Cvfym3X3MLs7AaczSd2\nARxJnxcJAqKCC4pxyu+9/SLGVNBxjVdU845VfxOTYKrFciX5gDQ1sKk9//Ubv81dd+7DrsP60kWR\n8QUR6G4nj6btWi1XTUysFQJ08wzFsZ2C333x/8LfNURmuuT1pOSnOT+XS4mIOkw0CIrZ2mFMABNb\ncWy64MuJYxKTYf0pyjdJU3ahHcsJY5Z2f5lt/U00PkQnniaO26DSJH1tMqAbnHoOB+EZH/wA1Fm7\n5yOiGaH1vEysASs7NrZlezOeu/PxZIvCYOwxPhxVIXJ19cy1xtA60a/6TlWQvuPSG/6lKUu8/IqY\nlVtrYjJM/b4XiVBJM3/4rx/kQZs6xAAxn8AAT1vO1htw0YAEUIfNcnxuOfd3fh9mtuCzmibHTDPT\naZO9sHasykDeiJKjwxx/+8p3YG8cMnBKELBecUFXZsdthGjimmWbF40r7myiYAyoCKihUwi/8e4/\n4jBLTY1ONXiypphZuqtOjKkWy8aoNGiWEeOQf3/L23ERxtSEqjzh7TFqsNHhbYUNGREhN4Idjfj0\nyMOjHkmwpi3+sPpHpLH7NaUt9VnRJCZBDZvIeNdr3oyzfVxb+kGImBgwND6QTXjqiacsS04772xq\nmtupSjNuuVzlMzEZplosAahgKAFbLdG7/RDGe9QZzCR64W3qtywqlQUbHeNygUEd+MG/+jvozRBc\ngWDJV5s8iRNEY8H7qJhhzbaywCw2Resqq3ijK5H4zZYTCl7o5JQ5vP8/PwStBSoASlthKTEJpl8s\nHQgO7trPGXNzK92UYE58fyVKJArk3hJsSRCHscr1eQFzmwlkWEwTmgdHZXZPvas1xhhyIg4QK5i8\nx85iG/Gri5Q+MDJKnRtY5UpkIxPp9oYQKKXivf/09yDhqHMjSeXkmGqxXI56yQnseetbsOLByERn\nwlUiiiPTmiCwqBlP+8uLIe9glZXHykWYLMu1py1LKwrE2OTAdAawvP1//zGzpkdQYakqKYNfmXRx\nE1ImYwxGI8MZpR04OFJLPsnlxJhqsVxOLuCWItf/y8cxeCoLog43qTg22to6GHIdkT/tubDrnKZ+\nTjsBtPxohDUeKVSWWDMMoKJgArk0t9kl4BF2B4/pn8XmOEvP9ujapkCTylElgU4oIXg6AmZ7hyF1\ns44mAYhNfZCJMdViCW0R+/EShURUlYBidNml6MTSFCJrvjuIIETOfMWrgKzNfNSMOSmRwKouVSoj\nsOaINhW6A9LWHIlYLI6MX/3+/xs9XDGb9ZDYHCdRJubXmGVZ46BeWIZ4HDHZk+uAqb5KRQ3RAdd/\nlrM6c6h0iFawhAkl0YhoqDFVQabKPteB7gZULTFUbeVFh+DazEJN2deUXegEIOBwWBxYgxVDFwEt\nmGUOUzqcifhQomIovMEbM5HZ50oFDQp1zeXcBH6EQdva4ulEmRRTLZYAop5rP/C3SOuD1vjHgU7o\np2VFgeZCdDk3LRwG9U19cJuS+K5LBBwZv/ezr2HfgQUym3Okdvdk7DkRQaxBQ83ff+wjK4mHVJUQ\nUzd8Uky9WFop2X/Nl7FZjlUli5HQzkpPglE5poqeYAqe+Yu/RLSCj4K2eRYT64sm9tpwfn4W9f4R\ncXlIZHlseRJtUmmSv0Tl+pu/zBjFooQQ1k9SmFOQ6d7zAowPsbmsqKtmhMnFpj63MglLzpDhkGrM\njQtj+L7vR/IMY8Cl6It1i6ohrwy/8ZxXMHYdXIiUJiIaJ3LMLEJQxUhkScYcxKPqca4tBZqYCNMt\nlgCLA3oGnMkIpuk6CTqRk1wUrBo6czNsftRjIJ85MhypaYh+PSKAEyjyHs867zsZ7R80qf0kTmx0\nUFVW4tWll7GXBUR1faUbPAWZ/j3/hS/Q7VgMBr/cg9KImcAMs1GDRuH2csBDX/OLYPpAM446yW5d\n4r6JEebosnFkiUYQseikrLh2XFIUTDfjS3ftPpLAOp1CE2PqxfKOj34MHyrqKuBN474zgeCdFYxx\nHHAe5vsQQIkQwTcFMBLrDAXqGDAiCI5nfdsTUCMTHTIRkTbLfxMV9u+f/yzWLSeznly7TnWmXCwj\nX7t6N5lGcqkwGqmMwYUCpDrhrQkmEvOAjXMwvx1vgWXXIKZ+Z5+0WGNBLYEOP/89P42pa4hjQjTo\nJJJpSCQaS1NjfswXvvallTR+YpNpOSmm+/qNESlrRAPOCEaXXYbMROqCi0Idlcf92q9RYRDaXpNZ\nznadWM8IjS9mJ+s0lqad1OWxnLnSgHgOLh2kbPvfqX8yOe7zbBCRjoh8XkS+ICJXi8jr2vXniMjn\nRGS3iLxfRPJ2fdE+392+vmvNWh+V3ApWHCKWKE328bAqoeqJxAALZQ2Pejg5R/JUBkl+5+sZA2Ai\nluZmWw5rLHZi1UFlRRgbA6DTySmJRPVIOosmxrHcOkvgqar6KOB84Jki8njgD4A3qeqDgIPAT7Tb\n/wRwsF3/pna7tSFGcg2wPCDfrFxJ1X+iEYVSM+h321i5tnRA8+oJb0/ivjlyVGJbDVIolypU7ATL\nEy+fyQ4wZLnlIEuYCeXXTDTc597XhqX2adY+FHgq8Dft+ncCP9guP7d9Tvv698ha3aJjpFM44v/f\n3rlHS3aVBf737b3Pqar76O50p0nSHZIQEwgRyMMYQBB5BDQoAoKKMMi4UAcHGVi4UFg6jKxxXIMz\n8lKRBBJ5OiRGGDIRBtCAqDDEBAJBAiRAIAmE7qT79u17b9Wps/f+5o+9q/p26KQ7j3tPVd/zW6u6\nzqNu11fn7P2dvb/9PWIkeEVNbLZAg1j2i4HObM67lnw/WzU52ay+Pw6DLkfEOFQVacDla7RAqdk3\nt9NxDMdGnVZhNsURXXkRsSJyPbAL+CTwTWBBVX3+yG3Azry9E7gVIJ/fB2x7MIUeoymzdRCw1hEx\n2Ny4m3AdCgY2n/xQIh1QD1YwtGm1pgMlAiWGHVt3UAdQbcg+OLa3j4qaRW4a3EKb9bRZjkijqGpQ\n1bOBE4HzgTMe6BeLyG+KyLUicu3u3bvv33+iSqdrUSOM3NBSLeZmIi+8MTzml36JGAqwqYyBoBDJ\nq5ntqGCSCTkC7Bcv/AWCKE2t7xwYzSYXJmPhuhtvaGcoDXOfmoOqLgCfAh4PbBGRUUzhicDteft2\n4KEA+fxm4K5D/F8Xq+p5qnre9u3b7+VLD7ytfkUUQsAEwdeKlyFlSHXEdR0WeEbfoZIN7xpYDhHO\neiyO/gHFqBaEVGxqbUU6tJykXIieUc2iVa+7X9T1eI1JQoxk80QCnvVGyTXnY5FmAOI564TH0Itl\nSozWgFeFlwIXPUKNUUsohVt230RbMrlZDhtALSLbgVpVF0SkBzydtGjzKeD5wAeBlwAfyX9yZd7/\nXD5/teqD50p7wKkieZ9LVJw40IhoTtfa0CN4GBVm5kA8aIlNObUaNTOJ+lQmWAAimJRJU3Ept2MD\nWEip6dRgR8oo1y9q4lpFwI7K4Yqyhc2ElQqZ14ZmvgZDyIaBFNCwUu1PsrTDy8Y4kmwTJwDvEZHk\nJQuXq+pVIvJV4IMi8kfAF4FL8ucvAd4nIjcDe4AXPJgCG0DR5EKRw8JGGVqaYPQcUFXq4LMmd4w9\nKxuuS6aSUg0r6fbJyH+PkdJaZ3nyP6O7ldagYyp9I83qAsn/Fhj6/Qo3ZxuTZ3Wkpaqyb9++hiRp\nGXFYZamqXwbOOcTxb5Hsl3c/PgB+8UGRDn6o92haI8xxaj45okvqZQ/iAPaIUT2QjrWKEZxFMROz\npCNaHHCIH40uiaTJ+fpP6wRAKnKaiDTq1TTybeJxNyodkcpIJMVdAt57JDRTJVTzjCRmAVWVpaWl\ncamLlmaYuoy0Y6dcVRgMiD6tWEZ07ES8nos7IgKavjtqBCtUQGdkMJA4Uu+NoAKhrnCFJURHyFlr\nSgxQN6CgLDUFSoWidJhJJR+kRvE4eusukWAYAmXe7yKE2tMrZhtLXHHQfTHCUn+ZALjWfagxplBZ\ncqAl1R4DBFXENP/MjQLYgrSKaZJCb7hdDxhSFOBjH2t6ydHZk1ybhLHyXC9MFEoRKgqcdel+GqgR\nIkVjDXJkHkiZ0yHGSIwxjXwbRkQIISS33eab+YZl6pTlQeT8ft5PRhiYZqObweTel2SKNBcb7mqL\nc4L6DlWZKhrWDrq4HN63zph0PWocUsNWB5aKLp3Vl2zd0fF7qqDofUjKckLyR6oqDUVftmSmQln+\nUIdWBREQGXsvToK7bhr1SiqKpZpXns3Ya6aJtm6tEDwMSnjDJV/jmh9sYoktGOngYgNO1+KJzlDE\ngrj3Zq544yPYToFTj6hrfOSU1pki3vs0kmuw8qZKKso3aj0phkcmYKy7MZkKZbm6/6hqWjUVoDSE\nOMQYiw8Rk6vxaXZHkXVo6KImLVUoQIAYsAGwSqDC+lmiGyXVWP9mrsaM3WK6RUl0W9kShilxu2lC\npkiIEcFT203MkBuhmBSJtc7SAOMnWVpASSVyxdnGLM3JKd2iEkA7CENMdESNmHZ42RhToSzvzjjU\nvOgQjckDzQOLO+udcSitiAtOBEyk1pET+mj5Mh6wta6zbKtVoSipLnaUlAVcfshTfB04kAX87t4L\njYyY8gM1/Zs89SuUbqfTWNahAxz4/k1z8xgxE+RnsfGY7hF9URAixCZrk5iUm0YFjCiEIdHkCCNW\nO1lP96U+2kmOTEmVD5isKoqi0O122xbUMFM3sjzoae8sah0mKtWwT1mOnD/Wt1lZa1MGJGdhOEBm\nITmk2Fx4anLcPVZfv7S93qOnpJKaH7VlDhE0MASKYnK6hjGGsiiaNudueCajBz8AxDoCSlGsn4P1\n3S9ayp8JhZCyt3PAUT2Opk2TsALVcgiSk75ZtRewdDqd9XXYvQeEZDqZn59vWpQNz3QrSzWIsyCC\nNDRtiqsih0prYWFhZK2EKDm+V9tU6ROMMtKLihIZAr1Ot8FQgoNRVbZu3tJWdmyY6VaWxjDwNa4s\nMObAtCk1/PX5aaninqICPWMYfuyj+KpmdSpEaSqWj4P7l0p2nJ+Asryj2Oem+3/KYBVHcZiA5bo7\nv4CWijSVmVxGmdJT+CWq7Dh+Z2rSTZXnbZlyZSmCKwIrdQV1blhELB4b1+6nRUZ5M8EEzekpBOsH\n3Pavn6XjCqwlZUVCseqycmpaNUwWTdRJOhSppXgUh1PHdd+6hv12gMZmQgmseiKGIB0KrTCV4dRt\np6ZUcpNi692ATLmyNAyGQzREhsMDpW9DY+3J8K2bboYwXsagnXtPOgd3ARW47vrrMMq62sHvjU5Z\ncvpxp66yhLc0wXQrS2NYqQIFBudG03CTlWUzo7gSoFoBTQltR+Rkco3I1HLvrC5uF4jsWriTsiwP\negCvLym4wmRb+LCKHMex43DalmaY7t4rhmBLrIId1QBQgzZka1JgtixgZR9En52H3LjaZPNrqy2H\nYpTVPq2ER6Tr8HVFcymHUqEywRMF+v0B3YPW61uaYLqVpRX2R4MGiOKzDcyMw9bWH8OO2XmqD1wC\ndc04a0SmiUHB3SN4zCjeucGYZzOJTw0FpWZAjdvcwcSAKwua6CKj4hqiNRhhZX/FfC6LO4mXbqMw\n3crSGNzMPN2iHC8WjKo6NqEsRSH2+3z5kx8bCbhKWbYjgokmh6wCdOd7GNGxl8P6M+qWaa3e2gIX\nNSdkadVlU0y1slQMZ551DiLCMBwodjUeQTWAjeCW9oMecEYfZeNumWCyfXDAgCoMUVVCaKgUbkZI\nnkI7jjuBYjwIaI2WTTHVfViATRc8gwUTKV1y/3ZBEGrCOO/1+qECVT3ghN4sLC6kZm0jFTEnA153\nkVqOiAjWoAT+8ebPUYjBmw5EaSaIRyzOD9FC6Pg5HnnKmRhVtK6bKjXVwpQrSwBOfRgBiD6gYoij\nHJINPIBFYX5uDmvgK6//z9joUZPTjrWNfGJRlBADniF/ffWHc0SW5Pj1ZmzfTgzBewhw/o+eC9ZR\nuCLVm2pphOlXlsduY/9yRS8WoI6qqDCxk8rRrjMG0GGkX61Qfe1LsPA9AganJuWObKCdT2IEz6QF\noWg0WGOoCNwod1Gpx0VFTEPTcA14K5QBBv2an9zyqFUJPybs4m0gplxZRuh1qVwXoUgLLCaCWg6s\nKa6rNCn5sCqbCg9Xfignth0NLNtFnkkkxshKf4nP7voS7phuOqhKbGiBxxhDMKABlvf12USZ3JtU\nCQ2065bElCtLg5qS488+l9paTG5I65Eh/Z6oDRhn2dIp+adL3wkhVVAMtDPxScU5Q+wJb7/qfVjx\n2FEq/oaK4GmIqCpF2WFTOcMMjEupTEpyj43IlCvLNFY7+Zd/mRUrCAGJFiQ0ojBVQCyYaJBhZHuv\nB4N9xNoTNdlSWyaLFP3DIT0AAB1sSURBVJAqLAHfWvo+neDTEZXGzAUqEaMpkOHMkx9BhwK8EmWS\nKtJvPKa792qubXPGGdw16GNkiAsl2oC9MhEJ9ZBZU+CDsJkO+AprhZI2eetEojCMfT76zc/QO24e\n2x9mJekac9wXEYiKJ/K0JzwZgiDiqGltlk0y3cpSIsErFJv4wXAFL13KEJEwxEtn/cUBumIZAlYc\nZR345DOfBf1dye1SIWW3qaly4YlUw3vtpuhtBA/ZtzvigQGAr2A4QImoRJZMn7ddeQl946kKQxEM\nKhGrsRHXIWuU2bkZhiJcuO3HwDqCS4pSWmNOY0y3sgSMczDT4eFPfgpRXHL7sIbQQKMaKYEokSAQ\njecR85vghq8iUkMEn1MDj1W5GVUVbFkzclSAAAWAtVA6aiKRyKdv/xzH7txKpyyxLsXyB4mNKSYf\nA7v37sXFglkMIYc5TkYOpI3LVCtLxRAVQvCc8opXMqiHqFWClVRGdJ2JGEQNNkLt0qhF+0v806te\nD8NFMGBHlc5DKnk6NFCvu6Qbj0Dyd7Ue8MIQg8HQp+KP3vUmVnxFWFnCWUsQUDGNRYEJBjfXY7Br\nkTIXB7akEN7QPlYbY6qVJaTpri0UHrKDvdUQa4rU0BuqnxLEYPVAmYL5QnikGPjM1SA1UsOQ7Kmu\naWRjW6P9mjLyRkhZx4HC4r3HErhh8dvMnn4cngFdK/T7K3gzSvAsjXUQcV1mhw7RlHpQUpg4zQZg\nbmymWlmOa3ETCcUM1dwxrFRDuq5MlRbXGRXw+YqWISlAjZ5NmwqufctbYbCcFCbgETBQEGkmH/fG\nwoxKNLhkJp4xJT70+Y0//R1WSo86hRjGCX8NzWVyr6qKuvL8+MmPyj7D5MZumPIuO9VM/5VXSMM0\nw+Mvfh/VXBdqpR42ZwhXibgIQYTSdbgz7uNHqor/93PPAfZQKrgo+FQsNyUJXiNx2wiepGccaVQ2\nlPRwCnHIbjtATu8ByVVHxeANuGjS/TNKbOA6daylu6L8wfNeCeLSJCT/EHsUdNlpZcqvfEQNBAxW\na9j2EG6841ZWBlUaua0zomA1KQNvI0Ecw5VUD8h0DGd2DHve9W4QTwypU7alAtaBlNsMRSmISIAl\nB//lo39Ot5sebFZTUr8gKczQhfGfrTvdssv3vn4LcxQp678mRR8kta+WZphyZTnKbp2NTN1ZTnj0\nY3DOYExTxabSe8jZ2kMIzLiSfvD0+3v4/nuvAK3wDqhXVchtdebakUfTEYtEBwoLBP75ezdgfBzf\nM80zlPVi9TRf5cA3rwwqfvYnLqBHJ8WkaVrcafVks0y9srRKimoQCwJnveYP0Thc0+qO90YEUIML\nho4HmSkY+BrrLTFGTuoJfObTIFWaGw4C9RpOw1sAPIEKg8H7wLLr8+I/fQmz2yxKhwjUYjBYukGo\nTWRowUS7hpFgBm/jOCKnEksZAAkEHK/4mRdSUuRm4bEaxrkHWpph6pXluJKjZK/v006l7Mzj6/1N\ninVIur15VjrCF3//DylX9oDxeHXUVWg7wRqiGKoQcTXEcsjlt36Y7qmOoo6EsrkLLzHV2kkPe09F\nBDEs377AiW4HqKFWZeQkaqCZ/JotwJQry1GhKYsBCWCFqreV65b6zJrJ816U0lBEyxk9w5cvfAbs\nvg3bU1zZW7MyGG0ED4ChjLN44G9v7HHJx5fpyyyVF0LVULIMGdWUzMXJQk1VKhoMr3n2y5hnBiro\nyGh+Lq2LWcNMtbKEVMZBYgTM2PH47F/7VWpvMMYQ4+Q0sLC8yDB4lqJysoV/edaFyJ7d9MQ3qryO\nVkZlIUTBKewp4J1XfIdi76/A7jOwTlOGoUZI7TJILkMWPfUwUO1e5tlnXIhTB86MFSrjfEOT0543\nGkfcQ0XEisgXReSqvP8wEfm8iNwsIpeJSJmPd/L+zfn8KWsjOgcajwKa/eP6ke0veiG31ElRGjM5\nSmg0squsI4rhUZtn+cfnPBvu2N2oK8/RSIwRa20Kf5VIKOEXXvVZSnccpi4oF55Db+VkxDUzAxmF\nUqoYooBF2TyzhRM4hlkKwGVXodGqvBn9YUtD3BdN8krgxlX7bwTerKqnAXuBl+bjLwX25uNvzp9b\nGxTU1GBDzkAEpvRgZ9hz8mmISONFp1bjpaAIaQrmxbJ//35Os8I///uXpkyvLQ8ao1mFqjIIhjsA\nu+0MQuyD7Mf4ndi9z0YaqNWUiKDJrxMVTFDowxte+nv0KJJStHFsnhnnYWlpjCNSliJyIvCzwLvy\nvgBPBa7IH3kP8Jy8/ey8Tz7/tPz5Bx+FCk8YTU0Ugq3B9LjgHRdR1/U4ImMSkDgLQCdWBEp8McM8\nMLOyG0KbAfvBxHuPMckUsxTgWS//MsPg2e9rbJxBouD9o7H7z2pEvlHHi6Ss6E4cS7sXOWPmVGw2\nKSkxVwk9MOtoFWZzHOnI8i3A73Lgrm0DFlR11MNvA3bm7Z3ArQD5/L78+YMQkd8UkWtF5Nrdu3ff\nP+kFCnqY0ZMYMHTAGOjNcb3tEQrP9k4njeZcjWHkbvTgo9kZfWS8N2rGLxcN3gViKXSxFFZQZ/ja\n4jLnvu+D4HprINHRhVKj1IxNL+PhVj0Omh5SEahwkvwSvxfhgt+5mmN2ns5MZ4ZeOZccvPE436O7\n+HTM8CSM9MEuEWIudieR5Nu1NmacIbMUWlNqRW0MAyzPfdRP0KMzLp0sGBCDYHIijbTd0gyHvfIi\n8nPALlW97sH8YlW9WFXPU9Xztm/ffv/+kxz+JaP8WwKSbT10LBf+zd+we0nZ65cIYlBsyvRjV7kc\nPYiIpgUnSP9/bSNDG6nzqxP79Ic1A2MQZ9m15QTO/5fPwM6drJXh/mgKdxQtEC1QDEE83ni8eAKC\n2lS+w/oOVguGtuT/fA1++Q9v5qSdj84+NybHfOvYCVz6P0px168jg0di/CasLUECqiVq+mtW+K6M\nkSCCjVDESK2GVz/1N3CM6oNDGws+WRzJnXgC8PMicgvwQdL0+63AFhFx+TMnArfn7duBhwLk85uB\nux5EmY8MY6A7w13bT2AYU5mJok4jzGjWbsprNaVpi5JGmOT3KBExMD/bgWKWm7vH8NjL3gudrahr\nym42ZYzyhRLRlLqXNIZ3eBRVsALRG668Ht70we8yV25hr78XU4wWuMEZFIvPwoXNWPqgFmX0Whss\nQ7w4wGJVWfrOXcwz16rGCeaw90ZVX6eqJ6rqKcALgKtV9UXAp4Dn54+9BPhI3r4y75PPX62pEPO6\nohiY3cST3vGX3NWvgTQVDkZTVdq1kkjTNC45mUdcjJQhUkRYGnoWFxe5c9vxnH/ZB8BtRSVSj3Jc\nttw7pkJltHqd5hQFaTTvKBGJ9IPyvQAXffROSrOFvplD7sUjwuAxeIrBmcjiBZioSOyB1KBd0lT8\nwSdKqskUMQQsL3jsMzEUuWxE6xkxiTyQHvp7wKtF5GaSTfKSfPwSYFs+/mrgtQ9MxPtHyj9gYNMm\nbipnCBqzN7TgfFwz1RSFnDw22UZlUONimk65mRnuCj3OfNdfgN2UpupESl1D5X0UoTn7JxgsBhMU\noWaUPnmI4QdWeO7rr0HoUpUFmEghBys8EWG85igBdBmtZ7FL5yP7noGhwlCD9JE1Kj0brMUiuKLD\nvh8s8ooLXtwmgZ5w7tNjU1U/DXw6b38LOP8QnxkAv/ggyPbACRE68zz7I1dxx4tfSL28TKyFTtHB\nx7XVTpIVYLecJRYF+6oBexaHnPd3HwG3lWGowFnKmG9BO7A8LJEORlPECwJiIxFPTYEHPvGFIW/9\nu1vYfuyjWImCmBVstEicTS5mh8SAuuTy7XdQLD0d5q/B2zty6oq1WXgTU1AYQYeBJz7sx9nBsVQA\nwyFl2ZplJpG1mWNMBBGRikAPO3MCP1jaw3E2ouVmVuoKtwY1oQ1gYl5IIa2GD4uShUJYmNvEee+8\nDOY3g1QUroMk6wBD28dSYNfgdtx7uGMzvp0pd+T9+ctIGhAGkCEBxyKzXH2NctHHv03NFow7hZWg\ndGKN+B6Vi/iiTxnu6dpG1CjEYVLC1UmYPS/GbH0/TjzRVLAGo8uZYhPL/V2EXQP+/LcvxmoHIyEp\nSjWt8/kEclSPZ4JxyQfTwln//WL2lbMsD+/CD2vKkFx6ghi8MeOUalbvfYo+cg+Cg12DTFZACwS0\nHtDRiEeR0nHr3DbO/cCV6NZjU7GsUVncnBGsoMAczc+tg4gIdR7xFQSgosav9i3Mn1PfB/VjF6FU\ntssABYFZVujwmRvg4qv3MCx3YIsOIiHVqsmJfEduW/dGciUrMRIxZpFi+Ym4hecisUC1RgmoXUk1\nlbSH0kFNSFP4eyBlzTeUMYLU1C4Sszwqhjt2f4fFhcDbf/ttzMQuKLg2Z/5Ec9T20NGCSUkypPOY\nR3BbpTxqZg6zTHIfMilDto3JzuhNCgY6HCP7YlrxTnZKk6fdxg+RmVlWVvbjnOP6LVt4ykWXomEU\njbGq444j2I7a23AIHKplKuuao65KOuOkUUgaVakY+hQYCRhJaXs7WoBULEqH/cBLX3sNvePOxXU2\n4Yd1Hq3qD5WEMIeJu1cMaBchgOmjwWGrM9D+o9G5zxHVoOpRHJgaZIgcZmh8YJFPx7KkJMORAMzN\nzTG7q8PDORGQdjA5BRzVI0uLAS8gntDp8bTLL+POfZG5uRlq56lzOFnHp1XrYO6mzA5B8qVMdaWD\nSa+Y35HIMbMdRIRoCnYvw1Pe+W7ozCGzjnaVM5GuVQCps/N1BK3BeDDLeeELrDOIljgtKHIgyy46\n/Ke3fI0XveGbxG0PZ19/meWV8IASPcYshdIBLYi6gtTH4hafjls5BysBCfOgNYJHpEoNQe/Ztphs\n1hFvBNTigqEMqRa5SsAvKu/77TfTC5FBdrW3OQSy1ZqTyVE7pMmeO2AhoBg6hO4s7pzz+P4XPs+W\nrmOlTPW9GdftTtNxcxilZtRQm+xHSbJTjrLDrAwqYuxz57LlnL//GJQzqcPQyc4uLUiFGo/aPjVp\nFdvaNMKzGCw1FiHUIIUQSeVpr/4GvO393yB0HoKUPfo1dEvBDReJ0k0jv/uMSVnUTZX2tMSZiNZd\nTP2ItDBU3I6Wt6KjsKHYOaz7gii4bA4IYnAxJk8JowQTOa17Ctvp0rM9qlUT8LaG/ORy1CpLSA/p\nCkNXA0HBdrbziD97I5947vP4iT19Ku1TuUgUpSuOnk/T8cMtlEeBaPIocxhg6Nm0aQtLUanLLj+Y\n3cZ5H/gr6jCDGkMBiDej4s/rSn5eAAcieGyTETxqkTiPskiRq2In061ZVSIkPXoWC/iTS7/JZ7+2\nQty8E8oSO3t8Np0oVi1xaIgyn+2Z94cIeDAVqCVqB6MWIx6ImP7puN2vgB1/AFITpUa1lxvA8B5T\n6xkiNiqVSW5j/XqZ5Riw1qGDkktf8sc4Svb7wJxziI+p1AiNNJOWI+CoVZZKmsp1MKARK6AKygzP\neN/7+cxTn86pnSI5qhcRL6lIVRQwcmR+j6IgUZnpbWJ5GFh0loWtD+G8iy7CM4/pSjIFBNLw8+i2\nehwZUhNjTddsZmk4yFekwocO0cCKwHW3wNv++lr22tNQewK940oYOIpqkaEFLw5vBMlJJmqrFOH+\nX10DoKP5RPJLUomI9CGCrU4nLv4MzH8UKMH0QQ/vUjQK67Qacb0u0QpFv8TtUTpDJZYFpQNqnyLO\nWFWTqWXiOGqVpRBTJT91qHEINQMp6FUlzG/iSX/8Wu5806W4hX0saWA4kxZ6DtfhUsc6MPWenZ3H\nR8N+Ve48/iGc/45340XAgYpjtL40lGrN3IOmi0hZRAb1ImFmljuBBe3w5r/8Ml+6eQ/V/OnM9DYT\n5TR6NqDU1MOAGoPHUYb0ABraQO2GAHTrlPDi/potRQ1ogQGiCUQMJudHFa0xMsAuPRUpvovp3kyU\nPnovo0rIs4/sQtYJkSBDiI7+LQt8/HV/S0c7VCgdFbAGb2JrpplwjuqeK5qjaUjxxAWAgwEl3ac8\nmT3/4x2cVPbw1jAgEPKIcrSSecjRZe4gRlMWmOVhzcow8PVhn5++/DIGM5vo+hW81Nj0jQSp8bQ1\nnwEkdhA8teylLjbzH/7gJlSPwc8+HD2+YtYHIoE6WkIok5uRRqgDsZhlUNQIfazC7DDFhQcR9H6H\nQGWfUy2zaSKX3NQORIeIgtSI34JbvBDvPpCm7OLT344SbYwUp6RFGsURJCKaAhpDrfSKWV72wl9n\nK6m4XolA5aGAITADo4FtywRyFPdew6hAvSO5pzgAC10Klt1Otj7lp/iuW0FLQ+E90QRMVLw52Jcy\njVrSezAQZkuMeowTBgRuq2t++oorGWzZSRfAzeBkZuxLaSmYodcqS0BNICJ0zVY2W4vO7oS5GZxE\nCi2ItgtSULo0BrcUIAWUJUbq5H4TS9ASbxze8AAUZZZJQE2VVufVpXsuNWrq5FKmHuNLipWz6Cw8\nB1tvBVFUO3lFv8q+ti7FlEtIPprDtFg1NBWiM4RbIs86+QLKnPpNALoOrGNmlA6uVZQTy4btvbMM\nOfY1/5Ud/+5lmFji7DxSF/gi+cMVfuQe5PHWj9/B0x32GbgSU1m+89CTeMKn/xl2Powu/aZ/1g8x\niQXLVqOqB70mDdEZRAqUIcIysnIO5V0vpjN4GCJ1GkEaJRhPEJsUKGBkhTnxSSXOHcu2/bN84jUX\nM4ttPcimlMnoMQ2gdFBnmHvBi7lh/hi0qiiNYNXjVjXmKMk3bvQSwIfI5trxhZ07eeJF74GyRzBC\nVbfDgvvKKKnFQcktJok8rQaQMINVxVRnYPf+PNg9EOZQnUFNQCXkxZ8CtCBQsRwC8dt93v+yP8X7\nSFSdmAdVy31jw961AIh6tNfjye99LzdUFRRCqNKiwThRLmB8xA48XQoESz/Cl058CE/7s7+AYhPe\nJOecjnQb+jUta4rpI1KAdCAajHfY6jTMyhkYyb5malEqQvCECF5LBkbomnkuf/mb2cIs824OpEDb\nqMapZMMqS6dkO6SH2R5P+fj/5bv7lzC9TYRccW+UTZvKM9Odp6qVWg3f7Xue+PZLqTuzDLNLkD1M\n/HHLlCLJWT1qAWZ57M1pwhzFvl/AeYsTj8QSEcGYeawLIBFTzLF4052cxDYMBRZDSVrMaZk+NnAP\nz6uZGJQCZrdx1pveRN0pUxTPKvNZb3aOoQh7BG7SyOM/8lHobcKXyd3E5VA8baisassaIjXEmXGU\nT5RI1B4xzlKunINd+jGM35YW0FUgdvChwtnIzF7Dx3//gxR0CJhc3z62azhTyoZVll7SaqRoylaN\ndXDOT7HvhB1ACk2TvBCinQ4LrmDp1FN40oeuZOUhO4kyoHDFOKNNcDXVBFruJ7EGzzRhwkxyKzJL\nyRYZjkkn7AKiSrHwa7iFC1Ii4ujwoaJr5hjc/n2ueOnb2EFBxKQHqknXvGwv/VSyYZWly0b2NNXO\nrbewPPLt7+aLDBkYpTAFnW5BP1bcsXmeH/uzv4LZ45gBjJlLEb25ppSloEunsd/TskaIYqTCxB5G\nHdauYI3HxFk8QlShXPxpOkuPo64jHdulunUvl7/qco7nWLrMp3BTA+NqkRu210037W0jjb761TA7\nI0cu/PCVfHt/ZGVYM7hzme8f81Ae++6L0BmHTk4Z8paGqbVL4Q0zw4Ji4XmU/AhxsMibf+e/cVJ9\nPyuWtkwsrbLMlJ0uUSMhBOgcy1M+dhXf6Ffc5oXHXPpO6s7x1FgGE+hL2dIMRpIbUDQ1Uh/HzF3P\n51U/+xIex6PoubZrHW0c1eGOR44hlSxwSMcQosNu3srTPnIldITYmcUgWHWMk3W3bHgKBtROCAhO\n+/RvLviVY5+NVUeQCtuaZY4qNm63X51Jm5QijFyD2hrwdAjHnQybTiRiU+W9AGqm6/ky6RE804x1\nXSR0idrlqacscs1bzyLgWInLiJ+udtJyeDZsjxl5BskhXoGI01xiIqfO6hIJrvWRazmAGQTKnuPJ\np+zlFc87CbEVAx/oWPcA8mu2TCob+vE3TvByt3ZtBcATrKEGut6AjQTxdFCgXeVpgWA8z/iRO3n5\nc3ZiCHgMm4wlDAx0m6mc2bJ2bFhlKfe4A6PsL6mO4CjDuWlV5FFKlAMZpiSlIMo1goao6xCHNU4t\n3hVUBGakIqws8arn7uDCszdTkErUW5NSr9mOsIEnbUct7R29RwyCST5yWZnK2K7ZclQxtt9GlAP5\nTFWgrkNK8CEeVDHDGh+Fwf5vc+HZqbZOUo2S2sc41XnbTo42NuzIcqMwcTV4Jg2VvLgX0whTYorU\nQUBLjASiGqxxdHVAXPw273nj2ey0P5knHIYQAta22TGOdtrHX8uGR1aPLHMteM3lcS2KiCHGSH/h\nJj78P8/mRL+CMKoDD9ZavPeNyd+yPrTKsqXloAh6gzFCPezjTEBjpGtrdsz/gA/9yXkcA5hOiR0V\nV8o4107SjnZaZdmysUklOvNO8j/1g4q5nsXqgK4MWLrjev78t05lE6liKLiUqWoCE6e0rB2tsmxp\nIbv5aKr6UzpLtbSA0RXOfrjlo296EpsLj8tF71qnoI2JTELdExHZD3y9aTnuB8cCdzYtxH2klXn9\nmEa5p1FmeGByn6yqh818MimGlq+r6nlNC3FfEZFrp03uVub1YxrlnkaZYX3kbqfhLS0tLUdAqyxb\nWlpajoBJUZYXNy3A/WQa5W5lXj+mUe5plBnWQe6JWOBpaWlpmXQmZWTZ0tLSMtE0rixF5GdE5Osi\ncrOIvLZpeUaIyKUisktEvrLq2FYR+aSI3JTfj8nHRUTeln/Dl0Xk3IZkfqiIfEpEvioi/yYir5wS\nubsico2IfCnL/YZ8/GEi8vks32UiUubjnbx/cz5/ShNyZ1msiHxRRK6aIplvEZEbROR6Ebk2H5v0\nNrJFRK4Qka+JyI0i8vh1l1lVG3uRcjx8EzgVKIEvAWc2KdMq2Z4EnAt8ZdWxPwFem7dfC7wxbz8T\n+Bgp38zjgM83JPMJwLl5ex74BnDmFMgtwFzeLoDPZ3kuB16Qj78D+K28/R+Bd+TtFwCXNdhOXg38\nNXBV3p8GmW8Bjr3bsUlvI+8Bfj1vl8CW9Za5kZu16gI8Hvj4qv3XAa9rUqa7yXfK3ZTl14ET8vYJ\nJP9QgIuAXznU5xqW/yPA06dJbmAG+ALwWJKTsbt7WwE+Djw+b7v8OWlA1hOBfwCeClyVO+dEy5y/\n/1DKcmLbCLAZ+Pbdr9d6y9z0NHwncOuq/dvysUnlOFX9ft6+Azgub0/c78jTvHNIo7SJlztPZ68H\ndgGfJM04FlR1lM5ntWxjufP5fcC29ZUYgLcAv8uBIPFtTL7MkGoDfEJErhOR38zHJrmNPAzYDfxV\nNnm8S0RmWWeZm1aWU4umR9ZEuhKIyBzwt8CrVHVx9blJlVtVg6qeTRqtnQ+c0bBI94qI/BywS1Wv\na1qW+8ETVfVc4ELg5SLypNUnJ7CNOJJJ7C9V9RxgmTTtHrMeMjetLG8HHrpq/8R8bFL5gYicAJDf\nd+XjE/M7RKQgKcoPqOqH8uGJl3uEqi4AnyJNYbeIyCgkd7VsY7nz+c3AXess6hOAnxeRW4APkqbi\nb2WyZQZAVW/P77uAD5MeTpPcRm4DblPVz+f9K0jKc11lblpZ/itwel5BLEmG7ysbluneuBJ4Sd5+\nCckmODr+q3kV7nHAvlXTg3VDRAS4BLhRVd+06tSky71dRLbk7R7JznojSWk+P3/s7nKPfs/zgavz\nyGLdUNXXqeqJqnoKqd1eraovYoJlBhCRWRGZH20DzwC+wgS3EVW9A7hVRB6RDz0N+Oq6y9yEgflu\nRtpnklZtvwn8ftPyrJLrfwHfJ6UwvA14KcnG9A/ATcDfA1vzZwX4i/wbbgDOa0jmJ5KmIl8Grs+v\nZ06B3I8Bvpjl/grw+nz8VOAa4Gbgb4BOPt7N+zfn86c23FaezIHV8ImWOcv3pfz6t1Gfm4I2cjZw\nbW4j/xs4Zr1lbiN4WlpaWo6ApqfhLS0tLVNBqyxbWlpajoBWWba0tLQcAa2ybGlpaTkCWmXZ0tLS\ncgS0yrKlpaXlCGiVZUtLS8sR0CrLlpaWliPg/wOMh6HDIWxH6AAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "O79BrK-bC8oh", - "colab_type": "text" - }, - "source": [ - "## Random HSV in YIQ\n", - "This operation changes color scale of a given RGB image to YIQ but here delta hue and saturation values are picked randomly from the given range." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "zZBI-9XvBSuh", - "colab_type": "code", - "outputId": "acbec9e8-b217-4d8c-f6ec-d8c272fd71fa", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 269 - } - }, - "source": [ - "delta = 0.5\n", - "lower_saturation = 0.1\n", - "upper_saturation = 0.9\n", - "lower_value = 0.2\n", - "upper_value = 0.8\n", - "rand_hsvinyiq = tfa.image.random_hsv_in_yiq(google_img, delta, lower_saturation, upper_saturation, lower_value, upper_value)\n", - "_ = plt.imshow(rand_hsvinyiq)" - ], - "execution_count": 9, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAUsAAAD8CAYAAAD+D4bnAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsvVm0bFWZ5/v75lwrYjenP/Scg6Ag\nKKB0AgqiQiKKqNimZlVq1XAMX7LGqBx1c9zKcV/uuG9VLzfvrZcaw3uzbpk1MjFNEgWFMsUWFEQU\nFKRvpDk0Bzj92XtHxFpzfvdhzrVixT7dPmc3EXH2/EHsE+2KGRFr/dc359eJqpJIJBKJw2OGPYBE\nIpEYB5JYJhKJxAJIYplIJBILIIllIpFILIAklolEIrEAklgmEonEAlgWsRSRj4nIUyLyrIj89XK8\nRyKRSKwkstRxliJigaeB64FtwIPAl1X18SV9o0QikVhBlsOyvBx4VlWfV9Ue8C3g08vwPolEIrFi\nZMuwzdOBlxu3twFXHO4F09PTumnTpmUYSiKRSByebdu2vaWqJx7pecshlgtCRL4OfB1g48aN/OVf\n/uWwhpJIJFYxf/VXf/XiQp63HNPwV4Ctjdtb4n0DqOo3VPUyVb1senp6GYaRSCQSS8dyiOWDwDki\ncpaItIAvAXcsw/skEonEirHk03BVLUXk3wH/Aljgv6nqY0v9PolEIrGSLMuapareBdy1HNtOJBKJ\nYZAyeBKJRGIBJLFMJBKJBZDEMpFIJBZAEstEIpFYAEksE4lEYgEksUyMFam9XmJYJLFMjBUy7AEk\nVi1JLBPji5JMzcSKMbRCGonjB2UpLT4N/yugSqi3GhRRvYLEf73HO496DwpiDSIgxsSLxHFJHFz4\nV0TCvQLhTyKxMJJYJg7JkURQo2kn8VnVNVWNogTqFIw0NhheZQTUO9Q7XK/krSf/yLYXXmJupoN6\njzWGdp7jvcOIwVozYEmKCM45BEFEUPXhvSW8P2IwNkycnHcYI3R7PVShcCUYQ5bnvP2cc1i39WQm\n1q3BZAaTZahEkVXFq2JMNQE7+LehA48GgVcUI/aov/PE6JLEMnFIDiWUQUQraZRaVKUhnuo8YoWS\nEovFiOBLjy9Ltj/xDC8890d6M3O0TEZmDFlm8WXBtLVgbRBAr7VoqfM47zFiojhGa1MVrx4j4d0r\n5VJVXFlijMEaS1mWZBK22zY5XpUMy6tPP8/LzzyLd57COVSEcy+6gI1nnU7WbiNiwEQxlsHvYPC7\nkvrsUlmvieOLJJaJo0Yaf6trA9aVCGIFVYf0Sna/8gpPPvwIc/v2YUSYbk3RNsLExCTOeYwxeA+o\nQTWKjUaxNLYWR1O9kQwORhC89i1cMQbvXLBGAbxiscHCtIbSObLMoiheHRbBiqGdZSDCtsee4Y+P\nPkGvLOiWPbac9TbO++DlYCxqTbBm43JBfyzan+6TpPJ4JIll4qjwPoibRnkUKlEL/6qCd3Ps3bad\nRx94mFYh5JllQoT2xNooMAZRwENGEEhQbGaqJUtUFCwoHh/eKLymmmbPQxpmn6piTTVt13qs1ga1\ntdbU2zBiamXzEhdLFVoitPKc6Vab2e27uP+WO+iWJaecsYV3XH4R2eQEJsvqMYVPEN5P0lrocUkS\ny8RRISJ45+v1QIDC9cjJeO33T/PUI48xmeVkxjBNBrYvbCpxnVM88yeqArVQEq+HKf2gDbtY53cl\n8c2/omFciu8LnwqCwfjw2GTWZiqfYGb7Th6566fs2LOTS66+is1nb8W2crxIvX5arVuKpGCT44kk\nlokFo6qoDw4PV5T09s3w8I9/gc52aFmLyQwTCqb0SG7w6jDW4r1HNHqhNTqBULRSyEh4rHKTwKA4\nHp1MHrim2LhPtPEc31/r1EFx0/ifiOBN5V0Ka52bptfzyh+e4sXfPYZX2FfM8c6LL2TLBe8MSwcm\nCeXxRhLLxBGpZ5oKWpbMzczxqzt+wGTWYsLm+DwPzg8HedYKrm4TbvenzJXzB6i95lCF+srAuy0h\nlYucweXF6oqoQaUppTrwYhUJywDe15aj8x4xhrL0gJCJsqk1yRuPPsPTDz/GOy+5kNPfdTYmC+ut\nKUTp+CCJZaL2bocbitYOlTgJVsU7x47nXuahe37JCVNrWZtPIkbw6vvT53rOrKgnTEOV2lqEWrfm\nvfsyxpZHs/FQ21c5XGT7gZP26v7gy9H6lgImM2zKJtn+h6d55Q9PMXnSJi64+nKyifagYKpHRQa+\nl2qUyT00uiSxXMXoPDGopqMiUk+R1Tkeu+cBtj/3Iies28jmqXWUEvqFqI+vN3JQp8tqw0XrE4Wp\ndovemzv51T/dyTsvv4gTzj4Dk2UEa9VQRd7HkFA8YJJIjjRJLFcx1aFZW5aqiIme4tIzs3sPP/nn\nOzh5eh0bp6ZxvkSsYEptWJHBQ548wFXIE1hr6HV74JWpPOelh/7Ab396L+e//31sueBcxGZxeg4S\nrdNqhXNps6ESS0kSy1VNFYsTnSoCxcwMv/ruD5gwFquGU9dtBiS4QXyYJlZRORVJKBsIIebTGDDg\n4/rEies38+aTf+TVx56m9I4rPn8T+WQrhvBLjC+NoUvJiz6SpF9lFeMRnPOolqgv2fXMy/zmtruZ\nzlq4wuEF6oDIIJP1qlpiAYjGoHkTogEUWjZjwmb87Jbb2PbQ42hZAIoIMZ40HZKjSrIsVzGGsDbp\nuj1+8j9uY/PadbSt4Asly1pxah4tH/FxjVNiiI0f9vBHHtG4NolHLHineAQxlk3Ta3jtyed54reP\n8P5PXc+akzahDoy1aR4+oiSxXMWUnTkevvteOrv2sXF6XVirrPKgvYCR2ksuA+E3fvm818cJdbxo\ndJR5B1ZMkE7vwQsGYdOatfzhR7+kY5WrPnUDMtlGsEkvR5Bk8x8nePyAhAV7RqnsQVVHo8oErtvl\nnltux8z0mMpbMbIlTBWNhBJn/W31UxuT2bMwqoD2mLsZIgziYyLxtgnPaWFZ53Luue1OtOgQrPYQ\n0uS8rzZYbzUxHJJleRyggCFM+bRyGVR1HeqDy4JC2e1w9999m60nnMK6iWnU+UH90wO3DYeLVEws\nlOZ32PxevQmZUVPO8rvbfsZM2eEDn/8Ett2Osa5ax2WmU9XwSJblcUAzBOgAURPwzuFdyVP3/4b7\nbvk+p6zZxMzM/n6RnMPFZSeWiWB1mjoSIRT/yLxhXWuS+7/1fV599On48/SXQxLDI4nlcYMOiGaI\nCAoebDHw43+4lX0vvML6NVMUOASDeh8ydWzaDYaBUQkXH1zhNrc4Kel0OuRG2P74s+zZ9jp4V7+m\naZGmc9zKko6S44jgqw5BkN6HKjquKPn5t7/HlBNarRZFUWIQrDVILKTrvU9LkUPAi+IlFBSxxqLq\ncXiy3JJlGaA88fMH+P3d96IurDlXFqZzjTXoxIqQxPK4IDhyhGChhOQQh5vt8qP//i3WkNPK2rhu\nOc9Jkw62oSLUYgkh4sA0kued87Ssxe3ax7/8f9/CF72YYqpk1lblSIYy9NVIcvAcF4QwFE+wVOgU\n/PCb32bT9Bo2T66llWV0tMRLP587HWLDpariJJjGdDrMDcSH+FcxoWpTWZRsmFjDA9/+n0xsWMtF\nH/sIYvurmYmVIVmWY0J1MHkNoST9pod9t44qWO+455bbOGXDBjIJnQ57vR4Gk4pdjBTzLfzGamRV\nCUmDeGZZTmYsuVjcnhn2bHs1Tsul+aqVHf4qJInlmFAdVkaCBal1bFA46JwvkbLgZ7d8j01r1lN2\nS8TaeOCRPN4jhjb+O4CGjmr9W8cfUODxe37N0/c/jOt164l4SBpIP/ByksRybNCYDiIxSxtcrCWJ\ngnEld/4/f89ayeh1CvJWm6IshjvkxLFxqBNbFNCWzdj3ynYevOunqC/rx5JULi9JLMeA0IWwjjKv\no+6sCF48rujxs2/ezinrNlCUHYwJj7dtXqcpJsaHwXVI7YtnfVEmTIu9e/eGSlCaJg4rQXLwjAES\nCh/W2Tl1yV5VtNfj3n/8HhvWrAHvQpqigHqXkuOOAwShLEusGGyW0St6WLHscLNc96efRjJbP9eo\npBCwZSSJ5big1OuPEoWy7HR4+J/vZkM+Qa/skdkW6omlwaoKQUMddeIYqJIKqhuZzXAudMQ07Rbv\nufFDtNZOo7UnXZNQrgBpGj4miMS6sASrsex2ufubtyImxFgaYyl9CDgPYSlSd1RMjBdVWFGFqtKa\naOOtwGRONjkZCgzH54pKiNdMZ8Zl5YhiKSL/TUTeEJE/NO7bJCJ3i8gz8d+N8X4Rkf8iIs+KyCMi\ncslyDn51oVgMHsX1Cu75H9/llA2bKJ3DOQUbWs968Q2RTFF448ZAFni8YjJLz5d0xHHxDR+ue/k4\nV6JeYxJCOi0uNwuxLP878LF59/018GNVPQf4cbwN8HHgnHj5OvBfl2aYqxU9oOya9kp+9g/fZdOa\nacqyR2bzsE7pPJnJGgeb0i/QtjqRxn8jR8NZJ/WJLdxZehfWKY2h1WrhgVlfcNXNNyKtFhDK6Fmb\n1aX0Uvm85eeIYqmq9wA75939aeCb8fo3gZsb9/+dBn4FbBCRU5dqsKuBWty0amRVHQCC787yk2/e\nyuZ16+mUBXnexpi0knIoDhvLOGzq8Nd+SQyNU+mWtbRarVDLUoQ5HNd88ZOYzGKTHg6NY3XwnKyq\nr8XrrwMnx+unAy83nrct3vca8xCRrxOsTzZu3HiMwzh+qHpGVzUovShGwwK+wyGF475b7mRNK4Oi\nRNTgSp9cdGNKmAHo4AJlVa6t3heE8z52NfnUJIiJ+f/JfhwWizZLVPWYQrxU9RuqepmqXjY9Pb3Y\nYYw9MiCV4R5iq1Sryv3f/xcmW23aWRuHx2YmeMfdauuFE62wererUgMbN5sIMILdJ+d7vNVpaDvh\nNbbzMPSmLPnUVBRKH513o/dZVgvHKpbbq+l1/PeNeP8rwNbG87bE+xJHYl5WYj39VsfzDzyC2dfB\niMEbC0iYstXOnFWC9kWxNsiqSzNFULXfnrdKmh9F4rAEQb3He0+W5xix7M89V9z0UYjprQZzDCZJ\nYik5VrG8A/hqvP5V4PbG/V+JXvErgT2N6XricDQW/Ct8WbJ/+062P/sik62J6Oyp0h5DKOVq0soD\nfb4Nr7/WnWdDfxs/2mktdVp/ZKI9iRqDWmG/OD5w00cx1oYYyipeVlLBjGFyxBUvEbkF+DBwgohs\nA/534D8B3xaRrwEvAl+MT78LuBF4FpgF/u0yjPm4xKnHiqAejBEKdUhR8sD3f8y6iQk84LwjEwui\niMooa8GSIPOmz8ZInHFr/98KBURqsfSh6Wy1mhHSRUeNxpAKV+KtMNud5eo/vTlkZ6lHxKI+LMdU\nFadE7CE2mFhOjiiWqvrlQzx03UGeq8BfLHZQq5EgDIJ3PYxkZB5+/Z0fsmFqGsSg6rHGHMSyGEER\niGbQYOeYeeMUUO/r54kxeO8xNlRu75Ulc0WP9tQkm086kVPetoXpk07AtsIua6wN2zSmrrxTxRyi\nHu886j1zO/ewd/sO3nzlVd58401aec5ke4JMDK4ssSZsxzuPEVPVKglroqoY6U++ait+oV95HFjd\nFpfGMqUNDW+LXo8sb+FRLrz+aiY2rI9rlo3NmLAxQxLJYZJ8qSNCdTgZmyOiPHX/b8hN6B/t8aOp\niYdAsKiE5rwmqo9SFasFjapjJMOVBXm7zazrMNvrcOKJp3Dme85j+sQN2HYLMRbvFaQqk0udxamx\n3mN4TMBEC1IsJg8itXZqkulTT2LLJRfgnceXJb4s2f74s7z47HPQ6zCRt8laOb1eqERujGBNhkFw\naG3JS/0baOPvYWiEB1XCWVdSc55Cw6lPjFAYYXLjOgYlNTFKJLEcFdTjnSBGmNu5l90vvUqmFucV\nLw5rxuenUnGIGkSjxx7fdPyG6bQxlN4x40ta6zZy+UeuxeYZ3lhE+g6uIF6memXzXQasPiDqZl3h\nsb5TjAEBkxmMbUE75/RLz+f0S89HnWfHMy/y5O8fBe9oWUMrb1EUBWpMUDcN7R9CamH/cxyJwfAg\nasEkbifLMoyxzIly5WeujybkGJ0VVxnjcwQe7yihN7R6fvat29m8di1iBZsJfszCg+pCtFJVSTJ1\n7KBB2TM3w/qTNvPeD15OPj2JsVk114w2aF8oxUgjrLzZRqHh7RYAP+gFr9C+1Q79qbRYAwhiLSed\nfzYnnncWrtdjx7Mv8tiDD7Fh7QZcr8TUKt+wJqUxkiNpW/WkxnQ8vH/o4767N8eHvvxpvA/fkveK\nNcmyHEWSWI4IIhYrnvu/80NO2XQCzjs8gGpcVxsjjKHX6zLZnkAE5rpd2pOT7Ni1m/UnbOaaz9+I\nsYIaCdZhQ3CCZGp9o4o5HbQpw99KuKpyyAfqZN+qmyexB2xJrMVOTnLyheex+Zyz2PH0C/zugQc4\nYc0mvPOIlVDMxCt4wUg/rfRQaPVHwJUO9Uq73capR6xhxhV86EufRMRgs2ppIgnlqJLEckTwZcGD\n3/8R+VxBoYpY+ut94scqRqhwjnZ7Alc6ds/s5x0Xnc8Zl7wbk4UYUZBoqEV7U6gL2FaWYSVB9adu\nzKxl8JEomzLg/Go+ergyEwcJ0ySbaHPyhe/khgvfiS96zLy1m4fuuof102vwlDiJXukBB9aBCNUa\nZTgpSCYU3kNmuPCGD9KankRsVlvdqsHrndYsR5OUWDwizO7YiXRKxAh5y0ahDMUwxu3gWdtqgXre\n6u3nw//m85xx6QUYm4OGqa+qj1P0hkjFIPsqT1qoemRX0+dDy1KV12KQ+tL/ry5aV9OPZdf6dYOP\nx/a0IkjeZu2pJ3H1n3+S2bZjptfBYshiDOThxlS/GdF7bwxqwBmlvXYtYgyqsfFYFEqnLsVSjijJ\nslxx+j2+KwtJvefXP/gpGyfX4koXg88bjoohNKOq0i+1Fq3a14FRwTlPnmeUvgwee6eYzKLO88Lu\nHVzzieu56MRNSGaReY6Y+rYc7B0PfmJYyjQ/Oci1Q72XRMvQtif4wGdupJjrcv9tP2B9PkkGOOco\nvGNyYoKy16NadVUEpyV57LDpYszkjO9yzSc/EVrdNmU6RqkbsUv6WRNLR7Ish4qgTlHnWDsxjfeV\nZdV0asDwPKRVJZz+MCqPcLud0y17iBjyLEcEdu/dx6Z3buWGP/8Ca04+AaxB5UCr7VCMpkhobdWq\nGvLJNh/88k1sfMfpoSq9EayYENcZ1zGN9oOlSh/WO1tZi11+jg9/8ZNk7dYhP+lofgcJSJblihJy\nfJsWJagvuO+OHzLhq2nhaBwsff9zNaa4QqdhStzpztFqtSkLBybDAx/+ys2YvI0Yqb3Y8xmNT3d0\n1Ha9IfyCVthy4bmoV1545EnWTU7jugVkFi9VXGYsjGEEXyr7WwXXfuHTIDGYfiy/idVNEssVpDLj\nK0H03vG7n/+C6QI8cYHfSAy2HgUUq4JoiP/zjUDDVruNd8q+2RnOvvYiNr79NJAMEcF5Xzss5nt3\nx00mPGFNU8KiYrxXEJuz9bILedul5/Pqo0/z/O/+wBShlYfG0CJVT5blXHDTR8gmJsIa5dh9A4mK\nJJZDoI4fVE/n9T1M5NFLXKXZjQpCPPgbkY4uZLPMdDrMuZIP/6ubsRMt1FiqxENbFSQ+DsJgmsWX\nm4sIYgQ89ArHKe8+mxPPfhv33nI7a9sTZHkenDkKs3hsqxXSOePMQn0dVpoYI9JPtsKEA0XQsuDe\n2+4kF4Pzo1rNO0ylvXgcDmMs6oVWq41MtfnIv/4M+fQUxobUwIV47kdlmWGhVOcuLzqv+G7ITMpb\nLTyGfGqSa/7VZyijB77oFux3Pd7/2RswWVbXoxwwUBNjRRLLFSQWxgFVyl6PiULJs5wwvx09mnGE\nRgXXK5iYnmRHbx9XfuZj2FaOU8U5R5XTMoKSvygqYQsS2A+ZD2uYIUBUrOA0xGee+/7L2Nfr0jGe\nqz9/E2IsTl1dj1IEPG5YHyexCNI0fAURJFY3V3526/fZnE/hyjLkLqtvPqsOZq6smOUSoWaAd/Uu\nMm/qaZSQm24MOzv7ueZPb0LzEBJkUTSmDlY0t3PgNseLysHVDDeqW7JH8RN8yOhRYdPbT+M9a69i\nzYb1eGvIjAzUo0y53+NLEssl5/AL+GXpmXv9ObacPMH+nSVZ1qYsimCl1DGNEsN0+lKzvAfZYHqM\nxp7U3ivGgrUZ+2bmuPxzH6U1PY0Y2/iI88qJNf4e+Oj4cdDxV19V/NfE+pLhtmH9SScd+PTG92VS\nPcqxJE3Dl5zDiIOCzYSZ3ds49ayNnHHuBjrlvlqcRKqcFY0ZPICE5mXLOVJlXoZMjK00Rmi32+yf\n6/CBP/sErTVrYsfBZB0lVh9JLFcSAePmyK1SugLTLjn30q14OxuyOVSjNWkahWZlGX+k6K5oGq5B\nOQEoyh5l4bjiCzdg2228hyzP64rdicRqIonlSqLQ2/cGqCJqQmwls5x72Vl0ir3BmpMMjZk8igYj\nbhkdQFWLBoTQMMtYslg+rD0xwfa9u8gnpwCDzaqQoLTbJFYfaa9fZgbjJgtef/45BHDeY2K7AOdm\nOe99W3FZtx8ETSjxgGisRLMMY6vzFwGFzASR9F4pvWP73l38yVc+h3f0m6VxYOGJRGI1kMRymRGp\nMnKUXdueYGoyD+mA1mPJ8WWY0nq6nH3+Jt5+8QnMuj2ohHAclViibTnGBo2ii0E8HR5ngLUTfPTf\nfDmkL2bNkg9pvTKxOkliuQIIgut1mN31JipVCa7QhMzYHO9caN5lDcbMcv5lW/DZbCgRhkHIBvzL\n/Qo9cFiH0oJGRi2YIoJYw86ZvVx2w4fxEgLo+0HY/XdNJFYbSSyXHB34qzGpxc2+QSsXxCvqJRa7\nVVRDZkzIjoHSecQ6zr7gNAr24XzRqMpNo23CAeVxj2GocbtVFThRZjpzXPdnn8NMTIS1y/o9kkgm\nVjdJLJecpmtZQr/nosf2V148rNyEkl4eVcF5i3Ml5168lTWbLHOui0ho5KVaIjhCNxshlHo4NjRG\nV3sNGTrq4L3XXk3WzlN0UCIxjySWy0QlmSqKMV1yKTnsel803kzshogoZTHHaeecwBlnb6bUWYyx\neAdQVa/RRdl7qh41oViGEcNsp8u600/Cqab85URiHkksl5j5cigIrz3zCMbrYa21ZjMFkVCVyGQZ\nZTHL5LqCd152Kpp1wdjQd1tDvt1iWvNYY9CyRIwy1yv4wJ9/ChUJDdKSWiYSAySxXGIGS0ko+C7i\nu6Gu42EEaOCRWgypA8Rd2eWsCzaz8ZQMj8OKoqr0yuKYx1qXinOw5T3nYFt5w5mTSCSaJLFccqqG\nAqEAQ2//Doz6RjOug6Na/RSh0nas0BD1VaIwdjlhyyRbzl7LbLEfsUJmjz29X70ny1vsLbpsfc+5\njdXPZFUmEvNJYrkMVOuJALu3vxQr0ngOZ7HJAeuPPjp8QnUbxZOZnKIsaK2Dd11xJj2/n5z8mMfp\n8XTmelzzpU8hdfHeZFsmEgcjieVSo7EaduWpLnuICVkxh7PYlFhotpqCNxAJ1ccVxRiDMeDdLOdd\n9nb2Fm+iPrqTNFQxH1S6ECep0l/ftBI6SrbyCboTYHOLEtvUNuocJRKJPkkslxoB8EH5fIF3Rci5\nzrKjM9c01Efs367/xPhwxZezXHjFO3jbezbRLWZi6wIf2gvWotevyCgKzpU4Df10dnZmuObmj4E1\njSK39MuDJxKJmiSWy4CqggquO0ueh4Bz74+9OnYQzf4U2XkfOygq6rsgc5x/1Tvo6f4gjWKjldrc\niGBQ8iwDCdWDLr3uakyrBRL6xfg6lD7ZlYnEfJJYLgcSuiHufes11Hm8c+Q2P+a5bSik0bQWw4Z8\n7N2j4lE3x3mXbaW91ofGWMYyv8mDAq4sUVW886w5eTOlV/CDcZVJKhOJA0liueRUhTMcnf07g0fb\nGPxip7YD5cjDFF2MARGMMThX4sseW8/ZjGeW0nXqmbuIBgdTtDhFhN1792FbOdZYjIn9Yao3mr/s\nmUgkklguG1ogrlvdWCLxaW5F5l2LVdbVce6lpzO13oL1YGKbCGNw6pmYmERFuODyS+JaZnOLjRz0\nJRlvInH8kMRyqVFAS2Z3vUFumvLjD5gWL3iTKrWwCVpXVJfYp6eKyUQ8Ko6yN8dpb9/A2y8+ETvl\n8c5HB4+n1JJ9cx1Oee/ZtSB6/GBbifqDJBKJiiSWS40EAdvz5mugvnaWCAx6t4+WeUalauOuhjNH\nwrw/eMt9l7edt4nTzlkHXlAxiLG896pL0Ua1cxPLDVfjZN610WKwx7rTgzvO9CCX1f68Y93WgY8e\n7PrxzxHFUkS2ishPReRxEXlMRP59vH+TiNwtIs/EfzfG+0VE/ouIPCsij4jIJcv9IUYNEQHfw1jT\nj/ZRWXS6tcZwoqboDoYX9R+r2kV4dUyug4n1JSKe/XMzbDzz9MUNZEjMPzS9dxgRvDuw8pIc5HIw\nVtPzjnVbg6wugWyyEMuyBP4XVX03cCXwFyLybuCvgR+r6jnAj+NtgI8D58TL14H/uuSjHllixSB1\nCCXe+9qTPbS6FOqwmeH0czdx6tZJOvQgb2GOgz46xhjUO4w1B7Wa0mVpLoOM6oxj+TliYrGqvga8\nFq/vE5EngNOBTwMfjk/7JvAz4D/G+/9Og0v4VyKyQUROjdtZFZSdOYy4YOVJPD9rs87lSmIonCe3\nBa31GR/+7EdjAeHQ7Wc8kfpfVWFfp8tEO2M1H8hLTfVNKoAqubEMZ/8dHY6qCoOInAlcDDwAnNwQ\nwNeBk+P104GXGy/bFu8bEEsR+TrB8mTjxo1HOexRRUAdO159OeSD+6rxmPYLXK4wPsZQFqWnKxn5\n9Dp8WSJWxk5b5g/Xoxhruf2un9Jas2Ew8ehgn+1g3/9qet7hnjPvMdXYZkSE3sw+vnzjhxEtMSav\nXybjtgMtkgWLpYisAf4Z+EtV3SuNeaWqqhxlC0JV/QbwDYCtW7ceR6cspezso21DP5vaxTOkFEIj\nMaMnE9bkG/HkYS11bAmHaX3Lg6tiW4/F97Canne45+i8G1rlVijWWJz6gU5Q83+H1cCCxFJEcoJQ\n/r2q3hbv3l5Nr0XkVOCNeP/DIGJoAAAgAElEQVQrwNbGy7fE+1YJHkMPVPHqwTQnNCu/c4V1J6Us\nHCdu3YqRflrjOLa07X+LVcGSEEOqGm34eFIaZQNvWM9b6LaqaVBwJoamelWpwNXMQrzhAvwt8ISq\n/p+Nh+4AvhqvfxW4vXH/V6JX/Epgz2par0QVIz5Mfxtz77Czrbx1KTZk+XgniG3HMbpYMm4cmWfd\nqI/lPxUIDjURhYNc5CCX1fS8hW5LTFg2EhPCtEwWHGhyHDgFF8NCLMurgD8HHhWR38X7/jfgPwHf\nFpGvAS8CX4yP3QXcCDwLzAL/dklHPOKologJsYvqq0M7lj4bwrqlKx1GwObTSD4NeIxUzc7Gmfhl\nmio8Sxo92kFSW4xFIQLqtd5XbG3PA3E906vDiD3sdo4nFuIN/wWHtr+vO8jzFfiLRY5rTFE6e/YE\nc111JGYtIgYxwvoTT63uYSQGlhgres4xYUPFqtFPXFgeVrddveR4ZvfvDnIkMmBFKiwug+cYUVW8\n97TWrj8gRyeRWCg798z0d+dota826z2J5VKiysy+PXXAdz9AILolhrBvWRObRdjWvEeOowCExLJQ\nhQ4BvLljd9DIsV3rXjxJLJcUxeDrdTNGwJZT9YgxiGmRBDJxrLy1c1d08qwua7JJEsslJVQDUq+1\nA2VITvABVBXk2BubJRI79+yprw8WHFw9J+AklkuJKmiJRs9scNg2zsRD2K8EwSvHgfc7MUzmOr3G\njGl1ksRySfFYHCbLUB8zSkSHal4qSsdDvyl4dSWJZ2KhKJ1eWUUMI/Tb4K0mklguMSGLxA9o0fDP\nx/3ujaOwjpoYP7I8H4UdeagksVxijAkpYoOSNMzYRmkEDieJTBwbeX7sDfeOF5JYLimDRTNCo7Dh\n7mHGGMRkDaNgle/xiWNAsMYuvunemJPEcsnpx1aGLDw9VBXVFcGrJ8tb84oPr+6dPnH0iDEYaebr\nrr6TbhLLJSZMuAd3pLqQwdCodvJmkHwisTCq9smB1SeSFUksl5j5AenDZ6na8CZWNas4GL0iieUS\n48NpOJSz0mBnDjPGUVXxrt8RMdSyTD97YuGIcJAYy1EzCpafdNQsE/2da/g7VW1brvIF+sSxsjoz\nduaTxHKpEQA/TxqHt4MJAt6TSrMlEosjieWSIoCNrhQ/uMwzpJYlIoJ3PaqqR6st6yKRWCqSWC4p\nAsagquHiD90PZqVQrSoh+Xosq3cilUgcO0kslxjnoGpxMCrTXoOfN5TkIU8cHau9iAYksVxaJHq+\nq0rSZhTEUhFWb8HWxNLgfdqHklguKYLO+0qbjXCHhZFqVMNfFkgcH6zGte8klkuKUCrYaFmqulAO\nWCVm8Ky8ZCqQGRPqbGoVb7kad/XEsaJAb2627u/oG/vxalrQSWK5pAhZ3qbahaTOpR1eWbRQ/FfB\n90Knx+TiSRwDzpVhP1rFu04Sy6VEhA2bTsB5j3o/IjHgQbiLuX1Q2was6p0+cfS0Mxur/6/eWUkS\nyyVFaK9Zi/OhSVjwIA47g0cQgc6eHYMZPEMt7JEYN9p5Vi/eDMrl6pHOJJZLimDbk1ibo973K7XE\nMm3DqG0pBNHeu2sHaJFcPIljYu30NP0lpX4q72rak5JYLikCktMrFWNMI9xiuE4VEZjIDWgHH6u4\nJ7sycTRsOe0UYHUX+cuGPYDjjeBQMag6MpvhxGGqOhbLpVKNKXXoASS18ehKB0bBFfT27yFbvyaM\nhePBvpS4tHCwSINxOJz7DsCBvzpqFdGEjRs3rPoYimRZLjVikHwCkNqyVGdQ8azYARy7SSqKsYIx\nFmNh12svIs6HR0bD+7QI4nerg3n40QfBoSR0dNDGGBVieJn60RDKKmW3YvOGafC+Ie+wugKHkmW5\n9Ihh3YbNFLtn+51wZfkOWql6k4v0TVcJQmIUHJ5MDc5AWXYRKQGDURMt0hE4Mo+CpnXj1WOsAbFx\nPVhrH1ZTcEbxgJYBO0UBj/eCmNGrECVAK7eI9h08qzFaN4nlkmOY3rCJXbtfJnh1zLLu9xr1TkVD\nmWFtvJ0arAgOBfW0Wi18sR/T2gBiGKUD8lgI4dHhM5ooMDomXn6ppaZqbgdeDWJDseaR+mVU8V4R\ne+BEdKTGucwksVxqVLDtNfQ8tKwgKvVkZTmWLAUQFXycigrgREK1IeMRsVj1zJqMXz7/HP6FbXzm\nus8zYS3jtqsP2sGClbD7fuHGaw7a+6j/utET0FCvXujnVIUq+799/EW2bd/R32eGOCeXRiZaZtKK\nXRLLJcarImpw5OC74U5ZPlnS+Fc0zPW9gDEWXxTBerGeYmqCnz/2FPnUFBSOni9o2daYLlgfmLCZ\n15lJ44PHYzAE+zh8nhLHK6+8BlnIAhsmUp9wDWXRw4iZN6bROwEtN+N5vIwwxgiIoTW5AZHKsbOM\niOLrdS4TVr+KAmssagy7BO575jmm1q0DSianJ/nlb385ZtJSUdlg0rhHG9PZ8blI7ckPVJZxp1eO\njAyJhFKDzrmwolSdmusBjudedKwksVxiFEVE2HzKaf087GXc+6tpOIR1SyNh3dJlOS8XHX7zwsv0\nsgz1ilOl0+2wY+d2Sl8s36CWk8Z36dXhtekQGbcL9b8KdHsFrckWo5BdVXnCnQtT8DAjX13iOJ8k\nlsuEySbolRZfWT4qx5zBI4S+4xrzck28LVJNvxWxgmqoB+Mm2jy+cwdPbN+BTE4hxuDVk5scaw1T\na6Z48oUnqMNXtArB6XeBHEUkrmdU36IRE6eH44fUfz2C4L3jj6/tCI4U+lbdSqHxpF79+mIF5z25\nEU4++cQ42jhqWX2ecEhiueSEA1oweQtvJzFiqScwx7h/aSwqbPCoCF4EL2EC6kWxeYuyKMjzjI5p\n8ZPnX2RnryTLBaP95mlVYWInnkeeejRaZdp3Dum4HQIy3getVBNyxYjhoUeewNiMYVhwTcNR8ahT\nrM0oyoIrLjpnxcczihxRLEVkQkR+LSK/F5HHROT/iPefJSIPiMizIvKPItKK97fj7Wfj42cu70cY\nLYKFFuy/zaduAR/P1ou2EqSyQcLB5aNzQAzqFdtq8cfde/jFyy+RtVuUWqLa/3l9NfNTxaunNd2i\nVIfzLizm07cqRtm6PJ7ox0gEWhMTMaRrGAzGaoiC856iLMmNHdKYRouF/DJd4FpVfS9wEfAxEbkS\n+M/A36jq2cAu4Gvx+V8DdsX7/yY+b9UQGtJ7VCzt9RsAiTp57AIU1iVBGjIWYioNRj0+z3n09e08\ntX8/Jm9FK5HQPG3ettQrFsFmwr2/uQcRok9W+wHuiRWhn8MjFN5j84yhnaq0eUVxpUOAouiRSxJL\nWIBYamB/vJnHiwLXArfG+78J3ByvfzreJj5+nQwzWGwISBXwLRZPKL4rgyfuoyLsvmE67QVKD5Ll\neHX0bMZDL7/Ea85hsxZGfVx7EkytmoPrfKLgCsf23dvpuu7A+7BK16NWnsGdYfvOvYSq+nrAYyuC\nDN4w1pBZwwkb1iNppgEscM1SRKyI/A54A7gbeA7YraplfMo24PR4/XTgZYD4+B5g81IOerSpQkMA\nLKX2Y9ZkEV7OcCCB8YI1EtaVWm3ue3EbezC0rMWrw0nla43On/5C1ACCILlw9z3/QuF7tXWZWBm8\nr9oTCz3vuP+3j/SdO0MYTz9xIu49Auodl1xwzkCI02pmQWKpqk5VLwK2AJcD5y32jUXk6yLyGxH5\nzczMzGI3N0LE3V0VJGPthpNATPQ4L8IbHl0BBqWjykNvvMG9r72Gyw1o2NWNgvgQ5KzRCeQFREMy\noIrixUfnEOTW0jE97n/kAZx3GJWB/iqJ5cMYi4jBO8/Lr+3AZS1EbH9WsuJoPwwUQY0ys28PJ29c\nN5Q6rKPIUa0mq+pu4KfA+4ENIlJlAG0BXonXXwG2AsTH1wM7DrKtb6jqZap62fT09DEOfzTxcSql\nCmtOPh3EYk1GXZL/gJC7IIbVbUGx1jQsPYOKYKywz1p+/cqr7Cg1Tu8NiMSTv8SDLSCAadSFC+ue\n/R1fVDCZ8PKbL+G0xKuLDimNVk9icTRPPL4OzFIU711IdxT45UOPItZSnaqGY8hFazLuIRaDdyVW\nDIxES+fhsxBv+IkisiFenwSuB54giObn49O+Ctwer98RbxMf/4mOfz2wo0RBYmkHM8nMXBfvfVUw\nvXEGrwgqaTRMnbXKmhCN+6lHRHlD4YEXXkRb7ZgpJBzowukPobrUb6fSvxCq9qhXpqYmuf0nt4eg\ndiOo1yXw3icOpB9daWKu9Ru79pG3J0OsbGXBDe2r17jGHS6XvvfCoS0LjCILsSxPBX4qIo8ADwJ3\nq+r3gf8I/AcReZawJvm38fl/C2yO9/8H4K+XftijzYA/y7TYfOoZGBPiLetg8sZOWaFSVc0Jzhkr\nhkI9CHTbEzz66na8zYONUtUbXESoiRcFrxRFD20JT7z0ZPUJjoN6l6NFZbE17HpK9fzwnl+RZVmM\ntqxmAcP77qsRFL0e55116tDHM0ocsZCGqj4CXHyQ+58nrF/Ov78DfGFJRjemVAUSEMEprDlhC6+/\n8Qp5NrjTNR3kGi3I6l71oZKQyXNe7XV5atsfKWxG3s4Q5/FGcCKLyowzxmDF0O11UXU8/OTveOeW\nc2jZNsmeWAqqVEZl8PtUSq/88bU3mVq/Eec1RC7Ex4YVjRBWhBRvwBU98nr9NIklpAyeZaAqNuCo\nzspqp5nccCJOQwqZ94qLVdSrEEwj1b+CwaBG2W2En73wIk++uRPyFsZanBqcsYhC7hY5UlWcd7RM\nTp5ltNqWO++9i1JLil6vfl6/l1DiaKlSSMPv7Kmkc+9ch4eeeD7IqAnl8rwJaQfD0KYg5zasq3vl\nyksv7ItD0kogieXyEUtameh8WX/amXhsmI6LYOdlRZSlw5qMsgA1hjlreOCVbcia6RhjqYTzvEdi\nxV+NRTMWNUyVaNUKZVHQdXM88dLjkFWin46UxSKYGB0RdKdU5bv/86d4MowRyrJA60BcGcp6cbMU\ncW92hjNP3YzGor9p/TqQxHLJqXZ6U98yImCmo1hmGDExL7tPZjN63mOnWryyfz+/fP4lsnwS1+th\n8zxOuUPzM0FRUVTMoiqDVyP18VjI8oxWO+eRJx+h6wqqCkomFX49RpoJAX0x8qpsPPlUMmsoS0du\nw4lVRiBEx6snM0KGiVEapHCySDoKlgWp/1UXG2uJ4aQzzqIoiugAkoFnK2BaGU+8tZ3Hdu1E2pNY\nJYRu+EaSoyi+burj8Iv4CZtWpUosGuw9U9OTfPfH36Xnezhd5Fw/ERDwPuRb3/XjX9Htlah6Wnm7\nPhkNUyq9L0M5tizj8ksuCHuVCGLGvFjJEpLEcpmorAmxtrpGNn0ypQqudHjAZibEZIqha3N+8/I2\ntnUKyNtYA2qCMKpKDDsKYT9S5SWqwSximqzE2EsN03nVELtZupL2ZM6td/8TxUDdy/BePk3NF0QI\nZAjl1wAwwuPPbWN/2XfoOFfiddD6HAaqsUyLgzNOqRLuzAGuqdVMEssVRCXntHe9D5sbMiM4pziv\nvKEl925/hT3GkktGpiUq4QALU+6Vse4k/leFTmdtyx0/v70uFOw1TMlSjPLCUFHUKyIWp8r3f/4A\nT738Oq3WxLCHdgDW5EgmFLN7sbGfcFWLKp0aA0ksVxCnHsw0Ra/EqeClxZN79vL77dtxhaOFAVeG\nYr1iEM1ATZ1pvpJUB0ipBXf+4k66vhs89SOwrjYuqDrEhGT9h55+gf2dEud0JE019Z6i8Gw9dRMH\nVvhPcglJLFeULKYmzricnjHc+8KL7HAOzSx5ZvF41BgUg3gTC/f6Fa/6UlmYxljyPKPQDt/72R2U\ndd0UoSjL+vnJ9hikiiCwJpRce377W7yw7Q2yvI2xdmS0shpnaExmMShXXHx+HWGhVOuVozLi4ZLE\ncoURgTMuvJxfPPccrJnAq2LLEBzkTOWZjpNh42L62XB21rCmKah4bFu49e5/oqtdFCXLsjRJOyJC\noZ77H3wsRC6gMfRr2OMKVNWwIOxxrrM3ntCjUCaNHCCJ5QriUQqnqFjETOGcByvkNsMg/bAgPN7E\n8CAE1SEUX62i5QHUMNuZw7YNt9x5Cx03h/MuVnxI3tImoRRfSD7o+pLv/Oh+Jqamq1SFxt/RoAoN\n897xJ9dcSaxPNfCLpl83kMRyBTFAbgVDm8999HN09s1gJcfFEmsSPd2iglGD8TbE3i13O92DoiH1\nTUAxZDZD8azfuIbv3Rum5E5LvC8pyjHtFLloDpQ9kVA76OmXXuM7P3kQm4X2HeH7DEssfii/Z6Bq\nTAbVNFxxzmG0y0kb1ja6OIbeTNWrEkksV5iQh2ONkJmcKy++irIoQqk1ojCF1nnBvhSWJEvnmIiF\ng6t2FkYMgqF0oWfPt3/4LV7dvQ1EyW0WMj3CC/tTc20cmEP4CEtCPX6NRdb6eO9Q9XhCCqt6pVTP\nz3/zBx5++mWMyXAq8buTWH6vqh2w8mhdVaiqPKVYI6CeGz9UWZX9GOH+OJNtCUksh4YCbzvlTGb3\nzFG6MuSFaxX3SEMghZFZ5GowMdHiF7+9jx/edzeFFvSTfPrTcpVY2Yhm/sqYIYNX+59DwVgQg5ax\nXpCB2374c7bvmsW78mBbGzrVGrRKaHVSOqXs7KOdZym19QgksVxB+itWwULJTM5nb/wsmckPqO0y\nKC6jd2Y3Vmi1Mzp0+Ie7bmHOdVAGp5d911S/zJdnvIpy1NZknQzQbDMWTgtF6SnF84N7H4RsLe3J\nNjYfvSZfzWVoUMrS4Zzjsx//CNaY2uGTRPPgJLFcQerqMxAcOgjT2TS2jDneUuV8R2tMR9cec97H\nbJ8eazZO852ffoeu68UA9jBiMxB6IvV948RA4IxUNSdj8L5TSt+lsB3+7vbvs68AMUqv26lq7o0W\ndXprkHubtXCdGfLGryIirLL+ggvmiPUsE0tHv7JLOORK58is5dPX3cx37/0uNjeUvqQoCnKb1Q6f\nUdQXE2MIjQKlw2TwgwfuYnbfLJdfdCVbTzyDlsmig6qKFD3Q0zryOAUbLEzx1EVQnHiefvUZHnjo\nl6zbtJZNJ7Uoe2+i/iQEizGDFZtGQoBUgnnkw07lfcmXP3ltCBdKHJH0La0gMu9aZkPhCkFoSxuc\n4EpPu91Ph+tPYEcLRes2GV6i51wdrTUtHn7qIf7xzlvY29sbp+bhU3h1I/lZDodYE1M8Y1FmHD3t\n8vOHfs5jLzzCuk1rcU7p9bpg9mHkTZQCjNZW2kgIJYRFcR9qEYiB3sw+rASnXeLIJLEcEk5DUzBj\nBDGWP7nqemb27Kedteh1+r28R9UME9VYASms4bkqvrBbUhZdpjdOcefP7+S3zzxML+aWGw2FZceN\nyivc04JfP/kg//yjW9mxdzuxKwfOO6wNBXzV7sdkO4G5oY75YCge8SHYvCgLPnL1ZXVc7/j9KitP\nmoYPgarGpdTreDBpJ7n2g9dx30O/ILSjUIyGkB3v/UDXxtGgOaUW8iynLIJoiBicd+TtjBdef57H\nn3mM8895Nxe843xyk1O1Wati/qoKPNV9Um91EG1M5uc90B+THHDnPCEQDsg6qrpkxmc3g+ydljh1\nPPiHX/PHV1+kNdUin8z7zhIhZDOpj7c9MItICeJQXYt6QXD1d1Z3+Zw3tvkzj2Oh/9kaLUckfEZj\nDA6Hlkpv315O3bweEDKbj+o5eaRIYjkkDrZznrT2JFp2gsw6CtcDEbz3mJETyshBnBi155h4jIpn\nYm2bl958mSeeeYJN6zZx7Qeup2Uy1Ck2y8JnNDHK74AOh411v8a3NpBq2Xhu/znNa83tHNjjRqX5\nHEHxOPU4dTzy9O954qnHWbNxDdNrJyldsJLrcc43yWL8okgX2AX0ELM5lNkDVE2d+aR1WFjfv46G\n9d1Fz9wbdTCqOHPnPFkrpzc7y5c+/SdkkkqwHQ1JLEcExWOw3HDlDdz36H3s2v8WJR4xFvVuBC3L\nQ9MPUQkB6qV3KEq+tkXXdLn93ttADb2ZLldcfAVnnvI2cs1DzGKtlX37sgpzr3zR1XscPM3yUBPK\n+ZEFjWsaAoScerbvep2f3PcT2msmwIRPMHXCNM6VuE6JzWK/HDxyuFUsUaCLUAIlsBbVSWLh0NoI\n7n+iGFgmwmIah9dbk74AV1vLspxud46PXXMJkzaP3UHnvzJxKJJYjgiCgBHarQk+eMk1/MOdf8/E\n2jaM4bk/JP9o1ISYwRKn22VZht6Xokyun+Dh537Pr/7wa9a11nDRuy9iy0lb41Zi7SORGIIEBzHj\n6ufOf2xQFOuiY0F4VeuY0K7r8rvHHuKl116mNJ58Imdi/USYTfuYteMc6pUsz2LReu1bo4fSNRWU\nsCRhZB/el/HJk4RuSo7+KSFkdoVTgm+kHB4LTfGVwfvVszbPWD81SbB+KyFNK5YLIYnliNCcHGaS\n8fmPf5E77v0OWnryvB3WxMaMqgeXjVaxaviUGqsHew3N1yYnWxT0eODJB7j/D/fTmelw4sYTuPzi\nK1g7uZZMciqDy0gscSaVzMyfZFdXNFp/0YutDqeeN/e8wR9feoHX3nydbtkln8jBQjZlydWGtE1D\ndH0KVk3YqAmNiqs57UK6MAbRK/FqEdNFdAfeb0BkGtWMqttj48ks5uRYnZyqOF2N031jDEYM+3e/\nxZ996jrwJRrXKQ++Opw4GEksR47g4pgwbbSj5NMtdMw8yEahnvbKwSy+KuecEIOpGoQTaGVCtn6S\nWb+fnzz4I8DgS48rHbnkbFi/kXVr1jI1NcXUxBStvEU7b6HeU5Ylc90Oc905du7Zxe69e+h0OpS+\nQHLI8wwxBmOEbMqgPUOWG3zhQviTic6mKhmgsSw5sHKqcc1SDlOgrv7cseSZCiIFxr6F+i7KiWHb\nKo2e4c2FhqNHBkzd6DwUQb0Do1x/zeVQOrJWK3zfdThUqhu1EJJYjghNT2glIp+9/nPc+qNbaU1k\njOPZv2qGFm7QdwZX1h5hvVBEaovRVc4eBcUhArZtsa1gye3r7WZmz150d+hfI1WaHn1LttYLC3YS\nMmlFwQr50MYYyqIMFYKch8xg4+tUfdA3lTqPuvEp+h/kSPn6A46qcGZQPKhFZB9CBkwC7ej88fG5\nph7nMXzj8S3D92GtxZUFBti3dw8nb1xHVn1H1Xc2fqs8Q2N8vAarDEHIbYsvXP9F9r05gxOHYCi9\nrw/A4G0e8kAPgjJvXNGKrCtwi1ZFjYKTRCUKFYgx9etNDENCBYPFxBYN3vsYoxrT9HyM2HHhet+5\nrBhrkFgxKQiywbvQF0dMcNaIp18gyYTu7CrRKlYZvBC2q4ezKud/G7WAm/haQWQXYt5AzB4wZdi8\n+jg19/V46nVWfLgc5k2V0EzOa/gyy7IgM5bZmb186ZMfipk6wdKt14ElWZULJYnlCCMIVjK+/Jk/\no9hf4HxJO2/Vj6low9c5YjQjYgavHvDYIV93sLCcg10OOYYjPT7wz5Hff/4YFkLzefVrqw07jOzG\nyA5ECpCske3TVP1qvdocNqaoWkc1cTk1MxmlK7nhmitpyegV9hg3kliOOEaEtmnx+Ru+gLUW9WXI\nWqsdDYmxRHxc81RE9iPyJiIhxIoqRTKsU8QQpUrsDp+aaEycxgPdssf6CcPJG9fWTrbEsZO+wVEl\nTqWq7JK2nUB7inM+1oiME7QRNSwTC0RCjKeYHmLeRKRD/7D0gEUxYU1zAeXtKg+4FQPdWT50xcXR\n4kw7ymJJYjmqiIS1tqrKtsKnr72Z3lwZ9vtYdzAdA+NKZSlKdP44oIOYHYjsjxamQSSsY2pcxFQ9\n9CGrQOl9aAGiji/ddC0TrTy+TZqDLJYklmOCEUNu2nz541/mpHUnMdmaJDPNNa7E+FFZi0JYjyxA\nuoh5A2PeQGS2ETQuMZj90AiKNVCWXT7xoYvJYkB/tYXE4khiOSYoinqPwXD1RddgfYuiU9ahLYlx\noxLJGMtTecshhA2YfRizG+ghQiz1BiKHX7PMBN5/8XlMZq36vn7weWIxJLEcYQadsiFtEA0ZPh/9\nwEeZbE+Rm+zAmL8R7NmTmE8IS1K1wUMu/V86/M1RuhjzJkg32pa+jlCtQor6t0BUOfO0U9iyeQNV\nNj2NLSYWRxLLEUbmXVRCHCKEY+v6qz7K/p1zWGtCMQilX5kwRRuPNnHtUSqnTSMkqaGbYDpg3kJk\nD32HT3w4LGuGEn6qnHHKZi4770x86ea170j7wVKQxHJMscbSNi0+94kvsPutfVgsNjeIhv7UIbDa\np+NkLNG4fulRzRDpYcwOxOwC6WFUsLHkW1F41Aub1k3wvvPfgXMu1tdM1uRSk8RybJiXY62ClsqU\nneTPP/kV3rHlHPCx7rWEQrRGj1zsITGKCGjMLpJQnUiRkCYp20Bm6rXqzBrOPHEd177vPUCIswSS\n428ZSGI5RlRpvRAiQWweUvtzk/OuM97F+979PorCh55U3oW0t8SYEjzkisS6kx5HLA2Xv4XXnXgc\np65bw/svehdoGWNyk0guFwsWSxGxIvKwiHw/3j5LRB4QkWdF5B9FpBXvb8fbz8bHz1yeoa9eDsjG\nU6XdmuCUjadx0zWfwGARBOfTNHw8qat6xDVNxWsoNoIIruxh832849RJrr7kXbEtsRndivrHCUfz\n7f574InG7f8M/I2qnk2on/+1eP/XgF3x/r+Jz0ssmn7lwfn6573Ho+QmZ41dw9tOOhNjM1qtVnKM\njy1aO+lCVaZQkxNVim6XS8+9iEvf/V7ECMaCNYYxLHk6VixILEVkC/AJ4P+NtwW4Frg1PuWbwM3x\n+qfjbeLj10maGywaP6/Cw4BoGgmOHUDE8p53Xsz733sle/fsPqCeZGIMENOI+gl54d4rxgjelXz8\nmk9y1slnxxbgLnafDPU4E8vHQr/e/wv4X+knp24GdqtqGW9vA06P108HXgaIj++Jzx9ARL4uIr8R\nkd/MzMwc4/BXD+aAQJ/PwUMAAA+tSURBVKLGYzEtEsBaQ24yTll3Gn/2sX/NzJ7ZsNYZWylksUGY\nSIzti5WL6jyPoyyqkzg26u+4UW4PtF/l3IeamqgD8VibUcyVnH3aOWye2kxVJ95II20ysawcUSxF\n5CbgDVX97VK+sap+Q1UvU9XLpqenl3LTqx5VxYqlbSf4yk1f5carPxXaJSD0ii7WCt73vawqghdC\nnUUFq4L16eBbTqzGivLKwPcPgnMF1oSTWaka2mp0DZ+57rNccu4lqdbUkFiIZXkV8CkReQH4FmH6\n/X8DG0SkqrS+BXglXn8F2AoQH18P7FjCMSeOQLXqEbyohkk7yceuuoHOvg7WZJTekbWyyrCsXgWx\nOqY2LJzE8nBgG1+pf49WllH4AmMteMO7znw3N///7Z1djBxVdsd/51ZVz4c92B6PPeCxCdjmy8oG\nzLIYsmhhSXbZtfnYD6LAmsBDokhJHhLxkIAiRcpj8hAlkaJsopAoD8mG7G4SCFJEvAubKFLkXVjA\nfAXwbpAwwTMeMDBje7q7qk4e7q3qmvaMdxigu2bm/Cyrq25Vd/+7pvrf59577r2fvZ3BaAhsXsq+\n8RPNUlUfVNXtqnoRcBfwhKoeBJ4E7gyn3Qc8ErYfDfuE40+oZcj2nDRNy5URATYObOLu/QdJ8gYR\nMVma0an+lTEmKpCLhmngjI+Ls69xp0kkzTKSqMHp002+8tkvc8n5u4iixNurdeL0jQ/TJPw7wP0i\nchTfJvlQKH8I2BzK7wce+HASjeUQx0XQ77+AORC5mNtuvJ1r9+zj9HtnyFFaWYsojkiztBJpWjXv\n40fKpTYkzG7u25GBPOL9k6f40k1fYsA1cOFvKVBZ3MzoNR9owTJV/R7wvbD9Y+DaBc6ZA37hI9Bm\nfBSozpvL0Iljx5Yd3HXga3z70DdxcUSWZgwPDdFutnCIHx1k38mPFdEi6vfRZRRFZFlOJDHrhtbx\n5Zu+QiRCpnmlE0fD75j9cfqBre64ysnFr3HtyhR2Px3YgBvgrlu+Ritr8/Bj38CNOr8cbHGOfSE/\nXgR8dqz/MWudafLJy69h97bdPruhiPClWIVHuybHMHqNZWatcqTsO+2kHTnnIxWHYzBqcPC2g3z6\nyhuYefs0WV6kruRh0UW/LGs1YUml08Yp899szXls5/rSScUKj0Xvts8OysnyFBf5nMks89tCxMz0\nLDd/6mYunbgUp2Fm/OKiU1zWNXZha4iZ5SrnXF8xDW2ZzkVsHRnn3jt+CU4pmgpRHJNnKRqGTJYd\nEiE9My/zMXXeHLZrauIOgVzy8roUV7ucXc2v/0EufpmxLMuJIkeWtVEV5t6f47KJy7j3jnvZOjIO\n+Ghzkbcy+oxVw9cw4kNE8tCsKZnw1S9+lbmsxaPfeZTG+phWs+VnMIqLirwi5TS0xevMn+Rj7VCJ\nrEPLRbE4cZmIJf46Ry7CxUKz2SQZSNix+SL2Xr6XBL82uoofyx87Sw2qK2aWaxzNMqIo8m1icYxD\nSFyDu7/wi8y2Z3jk8X8l2pigmvskanw06dQqhtLdC1ZOCeV3NBilokjkO87as20+f+PPs2X9ON2R\naIRDc0XcWr+y9cTMcg2TkyOR87PbqBBWrfAdQhJzXmMTB/ffQyptnnvtCEdeeZ6RjetDFJmTZilx\nnHQmmg3j99ZKdksWTLFYsliLpYtxNBoJOMeZuTnas21u/dwBNg5u8r3gIXosBgC4MA2bOHzvN87a\nKGuImeUappzfRDrZRUJnaQpVHxFFWcynLvskey+5kjffeZP/+O//pLEuYWBgiFbaIkkS0jTFhZ53\nVNdEldxpqIj7BEjyLEOBRpww12zROtNi35X7uHT75cFTBVzn+voshdD9JiECFTGjrClmlmsYmVcN\n1HK/XD41OKhEfpmK2CVcuPlC7rn1Hl77v6M8c+SHfjlBR7mY2loarFXt1/LR5ACz783iGhG33XgH\nQ9GAn1tUXMhb1crzOoZYLDlWNU+jfphZGpXUoipKWcssDwsiEQJcOnEJu7btpJ2nPP3iDzj21jGi\nweisV1p4jPkChrDgadXq/eJPXfD5cq6Di7zmuTScVSwhl8Qv79Futdm0eZT9txwglrhrIt6wjLFz\nC34AWeQvYNQLM0tjESpV9AWPOmJxxFHCz37iBto/3ebY9BscfvowOGVgcAAlJ3IRaZaV7XlAuUJl\n9dXOHpii5YMU05YtQe65DleD3mpDQXeEDT5SVpRWs0VjoEGW574jLM2IIz/NXfNUi/Et57PvqusY\ncAMkLllktvJOO+Uy5Rs1wMzSWBZaps34ymOSJ+zaupuLb9mJSk6ufg2gQ/91iFMzJ4kGY+Ik8ilI\nWU6e5zgnvve3SLFByohv/hQfxZvqwkGgnFXSpRVyXGiX1YprFvao5cTkqupzTEO7azzQQHE0Z+cY\njAf5xBVXsnP7TiKJcOqIIr/ut+bVUU/zusWNVYKZpbEspOIHIiBFHqYT0IjY+XWADtx4gFwzMs2Z\nfPstXnz1JaZOTRMnMRK7cvIIAXLNyyiyfPGqCUroJOmuGhez6M7rhq9Ejlq1XD8iqXNY0Fx8uk7o\npGm1M9JWSuQito9tY8+le9g4vJFIotAp5vxicA7aee6XdBCIzBtXNWaWxrLQeSNWOp1DPu2oc57g\ncOLLJrZcyMTYDh91okydnOTVH7/GiZPTNLMWLhaixM3rVRcpok/f4aTkoWoe0m1E0GLxmcWCS1Vf\nrdYcca4caJSFYYd52xvqecMj7J7YzRU7ryiNUXDhsfN5oTP7T1m9nlf7NtdcjZhZGstE5m2VkZsC\nIl2+VTlXfFtnrsq20e1sG53wwy41J9ecdt7m+NRxjk8d543jx8hJO2PUI78OjThvpiJCHozSmyGo\n5qgqeRYeg1FKHqFpzvrhIUY3jLJ1bCsT529jMBkOugpzdkTik8NR/16LjeIUnR/1VpPMzS5XH2aW\nxrKQs/Y6M393EmHocg6/4RffKkIxf0IkkOUZjXiAXRO72Tmxi1zzEDV2ItjFFBQU53RScypRb4hY\nFcWJbyboNGP69ylyTwuTzMPzuz+L0t2hPv/Hw1h9mFkay+JsU1o4qiqmfZNq+bwqtG9qVIVIorI9\nshhFdLbzLJyKdLaerqcs6GD+gIYlZxHxo5rKf5WFxOiY4/yZnBZ/XWN1YWZpLIvFRpl0ly6UZF3t\nhykjua7o81zv/EH0nPslO+2snUwpd9bxYmvp9mdGuRqxKdoMwzCWgJmlYRjGEjCzNAzDWAJmloZh\nGEvAzNIwDGMJmFkahmEsgVqkDqVpyvHjx/2kBIE8DyMznJ+sIE1TnHO02+3yPOccee5HasRxTJqm\nfmaYPCcLyyWIzE/jKOZb3Lp1a48+nWEYq4FaRJaqWhpiFEWsX7++NLo8z2k2mwwPDxPH8bznQBiV\nIUKWZeWxwiDjOC7NtHhOcb5hGMYHoRaRJfhIcmxsjDzPmZmZIUkSsiwjyzLGx8eZnJwkSRJEhKGh\nIUZGRgA4ceJEGSVOTk6yYcMGnHOcOXMG5xzNZpMtW7YAcPLkSTNKwzCWRW3MMooikiQBvKllWRbm\nPPTBr3OO0dFRTp06xdDQENPT0wwNDTE8PMzU1BRZljEyMkKSJExPTzM87CdIiOOYOI5RVRqNBmma\nrqmlDwzD+GioRTUcIMsyTp48SbvdZmhoiPPOOw/nHFmW0Ww22bBhQ1mlPnPmDMPDwwwODjI3N8fo\n6Cjj4+PMzs4CMDY2RrvdBnzEWjwvyzLSNC3bQw3DMJZKrSLLVqvFu+++i4hw+vTpMiJ85513yk4d\nEaHdbpMkCTMzM4gIs7Oz5HlOHMdMT0+T5znr1q1jdnYWVWVycrKMUK0abhjGcqhVZFkYWXW7MEzw\nvebgjVXVT/MlIszNzdFsNhERnHMMDw9z+vRpRKSshi/UM24YhrFUpA7tdyIyA7zSbx3LYAyY7reI\nD4hp7h0rUfdK1AwfTvdPqeqWn3RSXarhr6jqNf0W8UERkadWmm7T3DtWou6VqBl6o7s21XDDMIw6\nY2ZpGIaxBOpiln/ZbwHLZCXqNs29YyXqXomaoQe6a9HBYxiGUXfqElkahmHUmr6bpYh8QUReEZGj\nIvJAv/UUiMhfi8iUiLxQKRsVkUMi8lp43BTKRUT+NHyGIyJydZ807xCRJ0XkJRF5UUR+c4XoHhSR\n74vIc0H374fyi0XkcND3sIg0QvlA2D8ajl/UD91BSyQiz4jIYytI8+si8ryIPCsiT4Wyut8jG0Xk\nWyLyPyLysohc33PNqtq3/0AE/AjYCTSA54A9/dRU0fYZ4GrghUrZHwIPhO0HgD8I2/uBf8Mv63cd\ncLhPmi8Arg7bI8CrwJ4VoFuA9WE7AQ4HPf8I3BXKvw78Wtj+deDrYfsu4OE+3if3A38PPBb2V4Lm\n14GxrrK63yN/C/xK2G4AG3utuS9/rMoFuB54vLL/IPBgPzV16buoyyxfAS4I2xfg80MB/gK4e6Hz\n+qz/EeBzK0k3MAz8ENiHTzKOu+8V4HHg+rAdh/OkD1q3A98FbgYeC1/OWmsO77+QWdb2HgE2AP/b\nfb16rbnf1fAJ4I3K/rFQVlfGVfWtsH0cGA/btfscoZq3Fx+l1V53qM4+C0wBh/A1jndVNV1AW6k7\nHH8P2NxbxQD8MfDbQDEzy2bqrxlAgX8XkadF5FdDWZ3vkYuBE8DfhCaPvxKRdfRYc7/NcsWi/ier\nlqkEIrIe+DbwW6r6fvVYXXWraqaqV+GjtWuBy/ss6ZyIyK3AlKo+3W8ty+AGVb0a+CLwGyLymerB\nGt4jMb5J7M9VdS9wCl/tLumF5n6b5ZvAjsr+9lBWVyZF5AKA8DgVymvzOUQkwRvl36nqP4Xi2usu\nUNV3gSfxVdiNIlIMya1qK3WH4xuAt3ss9dPA7SLyOvAP+Kr4n1BvzQCo6pvhcQr4Z/yPU53vkWPA\nMVU9HPa/hTfPnmrut1n+ALgk9CA28A3fj/ZZ07l4FLgvbN+HbxMsyu8NvXDXAe9Vqgc9Q0QEeAh4\nWVX/qHKo7rq3iMjGsD2Eb2d9GW+ad4bTunUXn+dO4IkQWfQMVX1QVber6kX4+/YJVT1IjTUDiMg6\nERkptoHPAy9Q43tEVY8Db4jIZaHo54CXeq65Hw3MXY20+/G9tj8Cfrffeiq6vgG8BbTxv2y/jG9j\n+i7wGvAdYDScK8Cfhc/wPHBNnzTfgK+KHAGeDf/3rwDdPwM8E3S/APxeKN8JfB84CnwTGAjlg2H/\naDi+s8/3yk10esNrrTnoey78f7H4zq2Ae+Qq4Klwj/wLsKnXmm0Ej2EYxhLodzXcMAxjRWBmaRiG\nsQTMLA3DMJaAmaVhGMYSMLM0DMNYAmaWhmEYS8DM0jAMYwmYWRqGYSyB/wexAX269vCdIgAAAABJ\nRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "ruyvVnmCDBgj", - "colab_type": "text" - }, - "source": [ - "## Adjust HSV in YIQ\n", - "This operation changes color scale of a given RGB image to YIQ but here instead of choosing randomly, delta hue and saturation values are inputs form the user." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "vbCdwGtYChnQ", - "colab_type": "code", - "outputId": "9aa7cada-ffe4-4404-8a31-21427e9c18c4", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 269 - } - }, - "source": [ - "delta = 0.5\n", - "saturation = 0.3\n", - "value = 0.6\n", - "adj_hsvinyiq = tfa.image.adjust_hsv_in_yiq(google_img, delta, saturation, value)\n", - "_ = plt.imshow(adj_hsvinyiq)" - ], - "execution_count": 10, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAUsAAAD8CAYAAAD+D4bnAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsvWmUJFd17/vb50RE1thDdUut1oAG\nkBCTQFgSs0ECjMQkmcEM14BtvPB6y2+t+9Z9a73r9fzhrfft3i/Pb11/8DPXxgw2IDBgMQgMiEGM\nEgIEmhAa0NCSWrJ6rq6qzIhz9vtwTkRGVVd3l7qrKjO7zk/KrszIyMyTmZH/2GfvffYWVSWRSCQS\nx8YMegCJRCIxCiSxTCQSiRWQxDKRSCRWQBLLRCKRWAFJLBOJRGIFJLFMJBKJFbAmYikiV4vIvSJy\nv4j81Vq8RiKRSKwnstp5liJigd8CbwR2AT8D3qeqd6/qCyUSicQ6shaW5RXA/ar6oKr2gM8C167B\n6yQSicS6ka3Bc54FPNq6vQt42bEeMDU1pdu2bVuDoSQSicSxeeSRR55W1dOOt99aiOWKEJGPAB8B\nmJmZ4a//+q8HNZREIrGB+Yu/+IuHV7LfWkzDHwPOad0+O25bhKp+VFUvU9XLpqam1mAYiUQisXqs\nhVj+DLhQRM4XkQJ4L/DlNXidRCKRWDdWfRquqpWI/K/AvwMW+Jiq3rXar5NIJBLryZr4LFX1RuDG\ntXjuRCKRGARpBU8ikUisgIFFwxOJZ0x7/YQMbBSJDUoSy8TocBSB1KPflUisGkksEyNDaoCSGCTJ\nZ5lIJBIrIFmWidFCtTXnlubq0oIwImlinlhdklgmRgup/8QrS+bmSSQTa0USy8QIoYtEUltKKUiK\n8iTWlCSWiaOykiizQpwaC7LkAbWUSbzV3NbWTi0DUWhNp1VRr4SbYZv6MAVX71GveOdR51HAWIMA\nYg1iDMZIf/y1tSmCSLQ+4/WW/B75rhXCmwr7tYbLUpN20f2L33jiFCGJZWJZahHUWgQhCFX/Kqp+\n0QO0+auYoER4pxgj/ecEPB4rgnqHd46qV/HEXQ/z8IMPMTc7j3rFGkOnKPDOYYzBWhueIIqpIDjv\nEQQRQdWHMREF10gUUKFyFdYaut0uXpWyqsAKWZFz0cXPZeZZpzO+ZQqTGWxmo7iGZ1OniDHUstpI\nfuvkEAReW+/Ro6oYST+vU4n0bSaWRSAIZX2jTSMOtVAFyxJVanvOO48Yg6MCsRgMvnT4yrHrzvt5\n8LcPsDC7QGEyrLEUeYYrSyazAjRYf+pBMIiCOodzHmMsRvqiperxnsaS7A9RcVUQ2sxYyqrCmowM\n6NgC75WMjEfueZDf/eZ+vPOUVYUKvPCySzjt2WeRjxUggjXhdURM8/5UNYyhOXuYlpAb+meYxKlC\nEsvEUZEl08nm5y/1lDOamlEo66m2IGAt6h30HE8/ups7b/0lswcPYcUw2Zkkt4ZifCIIoBicV1Sj\nQEbLznuPMQYREy218DraDEib6bT32pptG7yvsMY2+1sszjuMtVRVRZZlKIr3jswYFEsnzwHhd7+6\nj9/+8h56VZdu2ePc55zPi6+6HG8U4nS//cGEqbeP16T5FBKnFkksEytGAOcc1tpm6iki4INo1bNy\nVy2w9+En+cWPfk5WWYosoyBj68SWvrj6ICa2ERVPlhmItpuiiI2WI54Qv2mLEX1fJCCm7U3UIJTh\nRnQXKNZK63U8KFhjwrMZ8PGVBaUwhqIYZ6ozwaHd+7jp41+hW/U48/xn8fxXvJh8YixO2eMnEy1s\n1SD+iVOPJJaJo9MO1mjw2RkxuMphMwMahK1XdSlsxsO/uJ87f/FrJvIOmbFMSAe12sRMFFBRkPBc\nzdNrbU3qogg3MaVS+rP+k1zFs3iqXlvGIZKuKD78VYn3CmGTMp6PMVGMc+iJvdxyw/d4et/TvOKq\n13D6RWeTFRn1Owrvw1Pbs4lThySWiaOySFhEQgTae4w1VL2KhYNz/PQbP8Af7lHYDJMJY2qRCiQT\nvDpMZoL/Uuspq6Evi7UaEv2dJuwn/dh5f+p/4mMPD5f+1satoK2940DU0L9Xw7hMEHkN0RwyLDNT\nW3no9t9y38/uxqHM9g7zgssu4fyXXIQYG4NCiVOJJJYbmZa11ra5+vHmxalBoPjKMb9/lu99/uuM\nF2MhWJLnTUAmz4swJRbAEa3GaBqqNAGS8GzS0q8oLlJPhfvpOKtDbS3SUtIwpsUvo/0/IqiRMD3X\nMBqREIU3xlJWDkTIgZliml23P8Bdt93BC654Mee98DmYzMbPYrEVjfT9rs14Fn0Pyd85jCSx3MBo\nnWWo/ZCExuWEHo8hpN6ogqsqnrzvUX7y7R9w2uRWpjuTIZASHZUK0XUX04Q0pNxo7RCM9x9tGr1o\n+r0GYqFHe+WjRa1bqroow7IObi3KJxVUwFrLTD7Nrtvv56Hb72XqjK289MqXhah6tDQVjT5ejRH0\nflgM9eEbWZqwmhgKklhuaLTxRWodoGh8iSHQ4Z3jFzfdwmO/fZjTtmxj+9QMFR4rpsmzFDFHF6MN\nhHM+iqgy0Rlj4ckDfPefv8YLXnkpO597DiaLP7dopeNrV0RwWVgs2liYiWEjOVY2NK3och3JDfnX\nUMHBp/Zz/d9+kkOP/Aczk9M4VyFGMCr9qbmEFJ9EPyJvraW30INKmSjGeODWu/ji3/4zD/z8bnzl\nohoaxNCYqKbOzdR00hlWkmW5oan9c9FvJ0L30Bzf+dzXGTM5FsuZW04HFvvtliaAt9N2NjwieFXE\nGrAa3BQinL71NB6/6yEe/vV9VK7iyve/lWKiCA/BxM8XwBNsmPSZDhvJstzAeMB5j1eHOsdT9z7G\nzdd/i6l8HFc6fJOz048W16GXxAqRGLzSkFta2JyxrODGj3+BB2+9B19VaPOpKkQ/cWL4SJblBsZG\nX2O50OWr//gFtm/aSsdaXOnJsgLR+ONVUPEhR7JJr0mCeTwEE4M5DmNCFB0HYiwzU5t55O4H+dWt\nt3PVO9/EpjO24h3YLAnlsJLEcgNTzs/zkxtvZm7PLDNTW2MEm1Ddx4eppMQARLCO6rXfKZxzPJow\nWdQ+7z1WLAp45+NnKmyb2sJt3/gRXVPxhndfg9gCg011OYeQNA0fdeIs2asP/jElLrvTZlu47UJq\nSix9Vi10+cY//RvMVkzkY016jHrBGIOxpvUS7czLdMishCbpHgCJGQPxlomrgwQUT0HGtB/n3z/z\nFbTXIyzFVFCP8y58xdoPwtH2jrTSQtMJbG1JluWIo3H1S10STfFN4Yk6hzJkhocfa7XQ5YaPXs85\np5/FpolpNFo5R3OT9de4pJ/iydL+DDX6g4Mr06NOGfcFP77+e8xV87z+/W8h63Qw8fsFGmt0OZIn\nee1JYjni9BOmtWXN9H82IuCqkNpz149/yWN3P8wZm07j8OwsWZY3YtvsnFhHBBOT0lUUK5bMW6aL\nSb79ya9y8eUv4ryXXBSDRM3iH3SZr6leNZpYO9KcasQJxSli2mOzvplG+BRBDHz1Y59j/4O72Tw9\nRUkVq+SEiuPGpIIPg0BUEK3zViErLI6KhYUFCmPZdccD7H3kSfB+iYUf8zHrC82diTUkieWIszih\np556hyCCorjS8Y1PfZlxn5EXBWVZYhAyG+pEipEmFzCxvqhoLNAB1li89zgJJeQyGyZ9t3/7Fm65\n8WbUOaRenw44Vw1y6BuSJJajTh3gwceq5WHJseAp57rc8P99hknpUGRjVN0K0TrhOXm5Bo/io2AC\nfSszTqidcxRZRm/vHF/6u09T9Uq896gSxDSd39aV5LMcdSRMtY3aEP3G4+YdX/roZ9k2uZltE1so\nspwFX6FGmwK9SSYHi8TKI3Vfn7oQCdCsqDISouBlr2TLxGa+9y83MrF1ipe//cpQmCMtIl9XkmU5\nAtSpIyEdKKaSeG0qc4tqKDerivHKN/7pi5y5dTuZWIw19Ho9DAZNS7iHh7iufvn7YqQ8KmieF2Qm\nI5eMcv8Cex/eHabl1MWL62pR6RS4liSxHAFadbuD6KnvT90kpIl7X0HluPHjN7Bt01bKbonJ2tWA\nkgkyTCjt7IXj79eeC/zyplu58we/pOp1wz5N++C1Gm0CkliOCKHlQTA0pBHPkHAe9hDn+fzffopp\n06G70CMvxuhV5QDHnDhx2olAdZ5sXzCLLGffo0/xg3/7DnjXPCrlwq4tSSxHAI25eHXb2VBVXOPa\nbo/rlnzt77/EGVu2U5ZdTKyz1rFFKsowgiz9zoRYK7je4JUxO8b+A/vD0tR6cxLLNSUFeEYBEUTD\n9Fuop9ZBOF235Juf+gpbN20C55Ei/KRCjclka4w2IVWoKmP/8ywL/mex7C0P8LYPvgMTO0yqxpqY\niTUjieUIUNeEDX5KH3KDvKdc6PKjT3+bLcUkvbIky4pgaZjYw3q5pR6JoWdpi43MZqGUHorp5Lz8\nD3+fselJ6rImXjUWdkvf91qSTkUjQh3IAUGdo+r2uOHvP4+xsV+OtVSuDD8ZraOk6ccziiz93rwq\nxViBWjATlnx8HKchr1YIuZka/dqJteO4YikiHxORp0Tkzta2GRH5lojcF/9ujdtFRP6HiNwvIr8W\nkZeu5eBPSXSZm7GntSH6KHslX/+HL7Fz5jQqV+GcxpU4rr+KhySYo0idd1nfArC5pfQVXal4xVuv\nxGYZiFC5Eu8VTP0tp+96LVmJZflx4Ool2/4KuElVLwRuircBrgEujJePAH+3OsPcAGgd99RGMOs8\nyjrfDgXfc9z4sS+ybXozVdUjszliQk/vzOYtCzQup9vAXktp/Td01KlfrfHVeZOVc1RlSWYMRafA\no8y5BV7/R2/GjIWVO1Ysmc0xNp4axaQamGvMccVSVW8G9i7ZfC3wiXj9E8B1re2f1MBPgS0isnO1\nBnsqo9IvrItoE+UU6bcZqLoLfOXvP8/2LTMslF2KvBMj34nlWGku40DQutBJP4+yrj5UZBlFp6Dy\nDhAWKHnTB67F5BYzjMK/QTjRAM8OVX0iXt8N7IjXzwIebe23K257giWIyEcI1iczMzMnOIxTiMag\njHUpJU6nvVLikNJz08e/ynSng5YOUUNVOSSF6EaS/nS7XoJT99mM6WExA+LSt7+KYmIsulk0SeUA\nOWmzRLWuLvuMH/dRVb1MVS+bmpo62WGMPHXl7H5lyuYOMoXvfvEbjHfGKLIODo/NbZhmuyG0mtaS\nRRXDdfnLEoZxdtp8w1Eo1SsiFnU+VlI3VJNEoTR4DUWdk19ycJyoWD5ZT6/j36fi9seAc1r7nR23\nJY5DHZiJyUFYTPiRq+OeH/0aPdgLPyZbF1DwID5EvjcIqu2Ff43Tom+RRyFRr7T9lO3E7eFCG8PS\nO4/3nqwoEGOZy3u89h1vakroidhlTwSJ9eNExfLLwIfi9Q8BN7S2fzBGxV8OHGhN1xPHQGJbiHaH\nB1c6Du7ex2O/fZiJzviR67xVNlQqZTtcI4tuy+K9YmHjo1maw4C0xB2E8fHxUGbIwpz0uOodV2My\n20zJqduEDKP/dYNwXI+XiHwGeB2wXUR2Af8X8N+Az4nIh4GHgT+Ku98IvBm4H5gD/nQNxnxK4tRh\nMaFyuRhKrdBexfe/+G2mxydCj29XkZlYx1DNkvIKpx5Lo9jGhg6TdcaAtoRQ4/4SQsN475vosEjI\nVRwqlpRXK6sKtcqhhVn+4IPXhffpHWKzaCmDJ6SGWUlBvUFwXLFU1fcd5a7XL7OvAn95soPamIRp\npSsrbJFhVfje577JlolpMMHpb61dJrI7ZCIA1Mv0wCw7XgEQQo4gtciFepzWhv7avbLHfNllbGKc\n087YwVkXnM2mM7ZhiyxUX6q7TxrT2FvqfWNVqvN45zm85yD7d+9h9yOP8eSTT1HkBROdcTJjqcoK\nayygeOeROrNAaiEOJ67+yPv/rsh3GBvmNG1x6Veyx0Imhl63R1bkeJTL3vwaJmamQ3Cn5WgVU6cW\npfYfgyTFUoeE2nVvshxQ7rz5NnKTIQgeN5yaeBTq9esqLlb+7oumIGj0HRiBqqrodArmqnnmunPs\n2HEmz7/0YqZ3bCHv5IgxOB+jw+EJGnH0ofFQfypuTYwYG0wOGUIxOc6ms7Zz/hXPxzvFlSW+cuy6\n4wEe+M190OsxlnfIOzndbhcFjAjWZBgEF1fFtLMhl8rmUWmlB9G+JoBTenHNjTGGnnFMzkyf/Ief\nWDOSWA4NinNh2ffhPQd5+ndPYMnwXlGpsCYf9ABXjBIKfojPYgqUj3WSwsUYQYxQOsec6zK+eTOv\nfePryIoMtXEJX2znq6qY2nqsXbXxH2PC0s5QkSncZ5oEnL5lZowBEawVjC1AlfOveD7nX/F81Hme\nvPdRfn3b7eA9hbUURYeyV6KtHFYvPlb+iZahLJlHL8Ny6UHtcFRmM0xh6UrFle95E2JisZQN5Ice\nJZJYDgux9gVe+fon/43t01swmWCsULrR8lGJSqwpVldJMlHAggW9f+4gMzu28/KrXk0xNYaxNkR7\nib2E6gQqH4SyqeZ5xPLN1tJA7SdcCbJIm0LOYishS8DYGGCxhrNedAE7n38uVa/Hk/c+yi9/fBsz\nm7dS9Vzox462ipCHXt/1W1zZB0Kjl82/Nmw80J3lmj+5FudB1OM1+SSHlSSWw4IJK7+/+9l/Z+e2\nHTjvmvqE1o6Wr0qsodtdYHxsHBGYX1igMzHO03v3svX0bVzz/ncFsTIScgfrep1NjFhRDS18GwFs\nC4jq4r8x4EVLLFX0iCDK0ZY9Kh6TWXI7xtmXXsSOi8/lyXse5mc//AnbN23HVR6bxZH58KSmLdRH\nod+2NmQ2qPd0xsZx3iHWMOe6XP2ht4NYbB4r4euR404MB0kshwRfVtz8xW9h5x2lV8RKU59QxY9U\nubVeVdEZG6eqHPtnD3DxZS/kOZc/H5tZ2vmRtCw9iEEVqVNqomI00ZtaNFv71y+o7WWDUXiP0LH6\ns1yyWfuPqsW0GO9w9ksu5OxLL8T1ehx6aj8/vuFmtkxtwksV/JiqiCwXwOpT580KodCJMTmlr5BM\nuOytr6YzNY7JspDmBKFHkrRWbyWGimTvDwmz/7EPFsIqjaITggvhhzxaQgkw3RkD9exZ2M9b/uLd\nPOeKFwZRiOIXihjX4tdKHpe6YHG/P/Yi16BI+55+YCcmpxqReDHR9ou90THRLdCySOtLLHNWuw6U\nuDooBo5sXrDl7NN505+/jYVOj8O9OSyhEO/xhLLxECBYmyHWoEZx4hnbPBUSzuP679oN47Qa2tzQ\njU6yLNeROi+wFoP6B++95+av3MTWiS24qsJVURBWEERYK+ofe7ByaMatAkYNznnyPKPyoYamd4rN\nDN55Htq3mzdedzWbdmxFMosYE9+KxgBPyBE18TXqabOoOcrblcbIPMq9zb/LZvZI/0qIqC99Jmnd\nr6jEchXR36ko2ViHq97zZsr5Hjd95utszifJCL29S1cxPj5O1esR7UiUIHy5hPfv1GHEcsj1+IP3\nvC28mjGLxq6iWGyI8iWGjvStrCdLfqQiof2DuopNY9N41y/HNhS1cuKUsD0SUUEUOp2cbtlFxJBn\nOQLsPXCA0y8+mz/88/eweec2xJojRU6b9PHmNWorri5m216jQ+vWSsd8VMFd0WOkfZU6hh9ciYZ8\nosOb/uRtnHbhmWHabEK5NF+5RiTrfjkCVN4hRiiyDvvcLNd84NqQEiWy+IWbzyDVFRpWkmW5joTp\nZz9fz3tQV/Gdf/0GhZpFKTCDpp0b2BYtCAK60J2jyMcoq4rCGBzKm//8D7FFJ/rnBL9sC60heYPP\niPiNBbMUYzPOv/S5qCr3/+I3bJqYpuqVSGZi6w/FEERUDXinHM7necsf/yGYfiAqMVoksVxHDIJK\nndYiaOW45Vs/YKLMwkK2WFlmCGzKiGJ88PnVU3BilLnojOGd59DsLC+4+iWc9pydiGSICE5DKnd7\nqj2qeBQjEq3IOF1WxdiMZ7/shVx4xQt45Pb7+M1tdzKhY4116Qknx9zmXP6O15KPhzJrwbAexRNG\nIonlOhLy/hTvFWPDj+nw7oOM5TnEQMXwCGWgTqyuLSatQqR2dmGOBdfjmj+7jmw8R40NOYmqWNMq\nJdbyIQ7XO1sZ4eQFTdc4gBjdRqHXKzn7RRdwxkXP4t8/8W9s6kySFXlMMFfmqLBF0ZRZM2LwXpsl\njInRIfks1xnVkIfoy4pvfuYr5GTBbzmkUqKieONxWmGMRRWKooOdLHjLh99JMT2ByfJgNccoMrFh\nmrbzIGtGTSN8EMngUugvfQRAlaIo8BiKyXGu/tN3UEloSdvr9ph181z13quxucWrC8Lrh8fVknhm\nJLFcV+IPRZWyW1KUhjzPUT+cX0OYfodwk8FQ9SrGJifY293Ple+5BlvkeO9xlWu8mqFlLyDxvdbp\nOHXSz4ilQYkEi980aUj9ZPO6Fq9koeZkMd7hRa+5jIPdObqm4o3vf1vouuldvx5lTJ1KjB5pGr7q\nHCPdJ9asVKd8/dM3sK3YRFVVcUrXijjTXosSnnOt7M5FqStLXrXJC1chMxlqhL3z+3nTB94OeYhe\nG0BtvfwvjrxO9I4nhn5y+ZHrtocdjQmQS5uBhRNDmE6LV4wEq/v05+zkiulXs2lmM5qF921ZXLh3\nWGcRiWOTxHJVqZeqtX8Msujusqo4/NgjnLtjEwf2lmTZGFXZC1FSVfqurFBvqJ+Evfo/MGH5cdZW\noFPFmpBQffDwYV733j+gMzWJ2MWdBNtpQEcIYVtklrt/yJGlOY+t91B/BtZkzX0gzOw8/ZjPaVPj\npJFkOOd/I8sxMqcjWWaY3fM45zx7O8953ml0y0NNcVeRII+q0UqLllpY2bIGhPlynUlIyMKub4VA\nzVhnjNm5Od74J2+lMz0VppTJMkpsQJJYrjPiuhRWcK6H7ThedMX5qJ2nLs0QMnOkla8sK69u84wH\n087G1tafYDuVZY+yrLjyP11N1inwXinyApeW4yU2IEksVx1ZctHWBboH9oQiDGpDoVqd55KXXchC\n7yCKYkyGeh+qfdcVaHTtvqZ2CzDvPZnJyKxFVRgbH2P3gafJJ8dBDFkWqh8t9d8lEhuBJJZrTLuz\noGjF4/c9AIDzDmvCOmJXzfPil52P2l7wUkosXSaCiCJrZFouascKIYijYcVJ5Sp27/8P3v7n78I7\n8OpbUe0klomNRxLLNUZMrIGosOeh3zI53sFYQaxiJcNVdaWdLs970Q6e93tnsFAdQMUB9ZR8bXyW\njQZrbQOHfEJvPWZTwXX/y/vJOkUoF5esycQGJ4nlatN2/RGTDkWoegvM7n0aFRen17EJmclwzuOd\nItZgWeDSKy4AO9+KvNojbLlnVFziqEgTrAmr8ASxhj2z+3nV264MSTMSq2q2S00m3UxsQJJYrira\n9wFGf2PdgtXN7qGTW8RrnZtMaJbgsMZgrcV7pXQeYz3Pu+RZVH42rJyJ/WiIU/T6tQIno1z95mF1\nFPzwwmHe9qfvxo51yLPYdjcmZse6OGkantiQJLFcVaKQ1NalhD7Wvix5/JGHjy0xGkswKDgvOFdx\nyWXns3kmZ76aj0UqQNUhoSdg9B6e+BRdxYNIDDQZvIcr/uA1ZJ1s7SLwicSIksRyFVGaZdFxtU4s\n1yU98traPAp1q1dp5ryespzn3ItO59kX7cDpYYxYvIMglHVdnxO38tR71ATJNWKYm19g6zmn4TQV\nekgklpLEcrWpe8W0dHHXPXdi9djCFir7hER0QTFisFlOVc4ztcnzoiueBVlY6aMqoLFV7EkM1RqL\nlg4RmOt2eeOfvw1EyGyWyoglEktIYrmKyJK/oIgvMb5qVcY+xoOXWp7R76kKVdXj4kt2cNoZHbxW\nWACv9KreCY831GUM0foLLr0olBaL25NUJhKLSWK5ykjjsgzd/7oH9yFaT5mPYQfq8oGbJqCiSq/s\ncsY5U5x/4Qxz5SFMFq3AE8R7T5YXHOzNc8FLL+pP7NdmKXoiMdIksVxNmiB43bdF2fv4o6HgqzqO\np0CL+tLEddrNf+rJbEFZ9hjfJLzklc+hV82SSXESw/XMzS9w9QevRYyl7v4SQk1JLROJNkksVxsl\ndgQMJSq0KjGyeCXP0R7YCFS7/U3EWoOqx1iLMeCreS552UUc6j4VWh7QXh7Zft6YSym+rsuBFYN6\nT5GPUXU8tuj38w6vq8lnmUgsIYnlalKnI2rIpcSXeFeG6W52kmW5dMkVUbSa4/JXXsxFLz6dbm8O\na2L/HukrZl1BEjUYVZyrcBrqfu+dP8gb3/MWxNaFO/p1LFPqUCKxmCSWa0IIkFQL8xR5jrEW7092\nyWJfyJxzGBHEgPc9kAVe+pqL6LlDKL7p/9IX2JhQrpDnQbTzIueVV/8+WZHX+Uqx+2QikViOJJar\nTG3PARx46km8c7jKkWc5x4yGr/CZ69U0dc/x0EfQg+vy4pddwPhUjHIb2yS5txZf4soKRXGVZ/PO\nGSrvCK1zFhfpHbHuD4nEmpPEcpWRuCJG1TF3cF9I+DaCO67PciVPXr9GWG0jJlQmMtbgqgpflVzw\n3B145qmq7qK4unoHxuA1dHTdd+AAtsixJsOYfrsHrdejp5qVicQikliuJm3jz1eIK+OG1p0n89S1\ngNUNbrSduxkKc+Adl1xxHtNbcsR4MBoDQ6EV6/j4OIrw0lddFh/Vag9Rt9Bp+y8TiQSQxHLVqQVr\nds/TFGJYHNo+CcGM4iVxDXkjk6oIJnyToqhxlN15nvXsbTzv93aST2icrguop/IVB+fneNZLnx2f\nt138o5XrmSzLRGIRqXPSKlNbavufeoIidgasG4CdFNr3O7LkGiwO5ki0ZL3vceHzTufgvh677j8M\nYhBrufy1V6Cmabu1xIiM24cwHN63ruPSURG8r7CSHeljXU7sl7OWN9J+J/pcEoKDIfXMh8LU0Reu\nsnFqUB3XshSRc0TkuyJyt4jcJSL/OW6fEZFvich98e/WuF1E5H+IyP0i8msReelav4mhQULOjQio\nKzE2WJb9ifI6HFbNce5BFa+OyWnDxOawPmd27hDbn71z0XjaZddak/K1H+szZckyTO8cRgzO+fqj\n71+W+2/pPhttvxN+rlYO8AatmNuMAAAgAElEQVRmJdPwCvjfVfX5wMuBvxSR5wN/BdykqhcCN8Xb\nANcAF8bLR4C/W/VRDytKXMLjERzqfeOvHNT5V70nyy3nX7ydc86dZkG7SJ5jsEOphysjDNwYg/cu\nrG9P/63hfxKrYsXjeIPq5nGn4ar6BPBEvH5IRO4BzgKuBV4Xd/sE8D3gv8btn9RQrfanIrJFRHbG\n59kQ9OYWsHhUDWG6WNuX63+UiQilq8itYXxzxpvfd01sEeHRZoHj6CFI8LcqHJqfZ2wsH9gJ6VRH\nPeQ2Q1Sb4ziuC9tQPCOfpYicB1wK3ALsaAngbmBHvH4W8GjrYbvitkViKSIfIViezMzMPMNhDykC\neM/Tux7FiKBewMhA3X/ehwO79J6uQDE5jauCNTaaAe/aXxmq0Bub8fmvf4XO9KZg1Nf+tmXe3HJv\nd7mv5lTd71j7HHlf+JyNCN3Ds/zJdW8F7zEmi/dq/TVsGFYsliIyBXwB+N9U9WC7HaqqqjzDFoSq\n+lHgowDnnnvuKXGSqs+65fxhxqyE2pPNYTiYtxhy2A0mE6Y7m3BiMVntfYnCM0IsDXCpKhVQxKZw\nssyex9q07AnjFN3vWPtI808d34m+97jAofKefLljZfQOoRNmRWIpIjlBKP9FVb8YNz9ZT69FZCfw\nVNz+GHBO6+Fnx20bAlHFaBkC1N4TCk9Cv7TG+hIOfE9ZOnacf1aweOMoRM1oHugS0qCMGLwPf3Wp\njbTSD3oj7Xe8fVr3h1oqgvcem+XRddMPCGrT2mTjtEZeSTRcgH8E7lHV/6d115eBD8XrHwJuaG3/\nYIyKvxw4sFH8leGQ8VjRsGLniGT09bcujTVgBO8Ek8VybupR9SMnlO2IfZNGVEfIRUH8MqHddDmR\nS9DGcN3YKIoiS4TxlJgQrpiVWJavAj4A3CEit8dt/yfw34DPiciHgYeBP4r33Qi8GbgfmAP+dFVH\nPNQo6j1iwBCWJIazkdT3rvuInKswIth8HMnHgdoSG3HqjpcmRGelWc1Uv7MROxMMGwKoNla7qYut\nENwdGIN6hxE70GGuJyuJhv+Qox95r19mfwX+8iTHNZKoKvP7D4Qpi7ba1kor8LDOiBjEGLacUcff\nzGJNSSRWQK+qGMuWeC032PkoLXdcRQRl9uCBYOks6s0wOGXyXvHOM7ZpE+3+34nEM2HP/oP9hT1C\nk7y+kQ6lJJariQaxNGbJxzrAiGFmgyUpNqe/Pj2ZlYmV0D+xPvn0niY6vjiqvnGOpSSWq4pixIOP\n3slF+RgDGpEqxghi82gNNPcMZkCJkeSpp58GiNkUg/G/D5oklquKYlB82z9ZC1QzBR7AqLyCZGyo\npLjEqrJn3754TRcV2thIxamSWK4mqhDLoYmYpgxBc98AjixBCH0l68JuYWsSzcQzYb7bbRkB9XG9\nsY6hJJariKJYPCbr99xRdKAtGhSl63xIMoaQYzm44SRGlPluD6hXhUuTd7mR3DlJLFeZkJ7mwwE1\n+GB4IPbs0SXbEomVkud5FMil92yc4ygV/11lRGSZiOEgkdCrJ1wldSJLnAh5USy7rHEjHU3JslxN\ntF984AgGpJ5iBGOyRTlyKX0o8UyxxuI1tnM+eqmiU5oklqtMs+huqWAOKGyoXsnyoqXVsrFCmIlV\nQYwNaUMb+NBJYrna1CsbhoZWutAGPtATJ0ft85bh8S+tO0ksV52gSMN0UGl72t0I5vCMLzECDNHx\nPCiSWK4yLhQCRGgFVQaIquJcf5mjx4ex6UZcg5E4UfRoqR0bSESTWK4RS32Wg5+aL80dSiROgHZs\ncJD9UgZAEstVps6zHJaIYSgq7vo3CNlDgxfvxKix0Y+YJJariQgai6SGZvRLy1as/+EmYvBV2bx2\nI5KiG/7gTySeCUksVxGBUEFaQ7dl7+tgz+DG5L1i0VaOnER/ZZLKROKZkMRyVZEQTJHhiYaHvM+6\nf0q/wUUo7DHQoSVGiEFV+h8mkliuKrGyTxRKkWH4eBXBt28mEs8Y7/zxdzrFGYZf86mDsKhtw7BU\nlDa0LMp2QeJE4gTQ+jjX9W/tPEiSWK4qQukVW1eTVheCPs0imvU/tBTIrUW0CuXitC6yJRsu9SNx\noijd+TnqTneqPhw6AujGsTiTWK4qQlYUULcLbdZhD86SEwGvHvUlYEJVpCjaSSoTK8VVVQxcblyS\nWK4mIsxsPw3nfFP8tzkFD6wHT3jp8vDhWGOz7qCykQ/7xDOlk2cxSFjPSmCjnW6TWK4qwtj0NM47\njDGh901zz4BKtMXeP3P79jZFiVv3DmRMidGjU/QrVy0uJL1xjqEklquKkHXGsFmO962kdBmkHRfW\ngR/cuwei37KW7o1zmCdOlk1TU8EP31bKUIZogKNaX5JYriYiIBnd0mOM6U/FY1BlUPIkoozlGfhe\nk5y+sSZQiZPl3LPPDldqv+UGzLtMYrnaiOBj/x2bZU01yTU/tJbTYQHnHN4pVVnSO3Qw6Dkymj5L\naV9tN0Efxcrvy413WN+DsG1m6zKupGEd79qQxHK1EUGKThBN72MhCwPi1zhVRxc9v0rsNmkEsRZr\nYM+uR8BpLNQ2knIZCHlZgKA+FJ5rrQVo7zS0lyZDUeqAm2CGqoj94hPQ9q2bwGn8kLX/YQ/PgNec\nJJarjIhhy8w2TLvq0BrOvgVBVEAtqA21KiX8GI0XvHiMghqlLHsYqRDAqBm9A12B1qooVYexAtjY\nlr1f5lilfxlGtP7pKUEw1eMVjBnO76ST55j6oJahUvV1I3V3XGUUw9TWrezZ83j8EUhfLNfg+FIU\njATrCgkGbLRbFINF8KrglU4nx/XmMJ0MwQ7N+vUVI4po7FlNKIUnYrAimCiiy64pGcq3aaBdzsQI\nXgUxoC58l8OCoDjvya090q00asfQSZDEcg3IxibpeaUwAhp+FGs3542Wpfi42lJwRlD1GPEgFqOe\neQM/vO92/AN38q63/DHj1rLmZu8qE4rQx+m3gjE5AvzxdW8GRqtGZxD8MF6PYhC8em65414e2b27\n5SQZzHsKn2+soOU9mbH90Wgtl9JfybMBSGK5yngN1k+FJVe3eCn2mghmTAbSYFl5wBqD67mQRGw9\n1UTO9351G8XUOL7nKH2Pju2EqfioHeja/FOrJ7nY2MpjdPDqMdi4PCAIZ4nyyCO70CJn0METiT53\nYw1lt4cVs+HXfSWf5SpjjCBi6ExujlNDv7b+HQEVH0MGggq4XhV6hRvDPunxg9/cweSWTag6JqYm\n+P5PvhumUqOkLsARlrAECxqaOMnIXOrVVP0OxYJBmC97q/JJrQYighHBVVWcHPnWwh0dvRPtSZIs\ny1VE0cbaOW3nmex78ECIguvaLnds12M3KKrgc8Ouub385rFHkbGc3INTz/zCAgfnD1H6HoXtMGpH\nfD09FQHvHR7FRsvyeB/xcu90ucesx36NcdzcVuZ7JZ3xzlGeZb0JJ6aqclibLVutagPNwIFkWa4y\n/dJspujQcyG40qynPYlnDRWxDMLi9bmiIX3GxCCPiuLGM+5++jHuemIXdnws+p4cuS2w1jA5Pck9\n99/ZjyD7eFEfLzqcxV6lft8hwT8Ed/qBKmldFj1smW1L7zvaY9dsP6kDf2EJqvOOBx97IlbXP96j\n14AlaW1iJAZ1DGeesSO4dCQU+wvX22vENwZJLFeRcPI1KILNc7ztYCQ6ybXZ4xlTz94E3+RP+nrq\nLUpWFFRVSZ7nLBjLTfffyd7ePEVuQ3S4FV8SIzgct9/9y1BCDsVRL82Ml/hjGDaaE0R9+xjjHIDc\nPGPC2E1I5TKGW355O8bawQxGpRXl9qhXrM0oy5JXX/aiMN5632WszI3AccVSRMZE5FYR+ZWI3CUi\n/3fcfr6I3CIi94vI9SJSxO2dePv+eP95a/sWhgutk6UxnHbW2WidOnRSx1ZIGNSYFCQS8iQFjxdB\nvZIVOb/b9x/88OG7yYqcSivqBJTwI4g5iNGC7EwWlFpReRcLI7QUVf1QTARPdWrrvf6sO2PjA62u\nv+h87sF7T1mV5CZ562BllmUXuEpVXwy8BLhaRF4O/Hfgb1T1OcA+4MNx/w8D++L2v4n7bRjqFBDE\nMrZlU3/6fRJiGSedQRxp+7lCWpAWll89/hC/OfgUJs8RIyEsLnZJPlwQViMGmwnf+8lNQXiDp3Mo\nrclTl+b0BUDpHFmeoQMryNxK2RBwLqw+6/V65JLEElYglhqYjTfzeFHgKuBf4/ZPANfF69fG28T7\nXy8b6Vcoiml68Nggbk3U88R+COEn5anXdFdeMVmGV0dpDbc9dC9PVHPYPEdiAUsViauIFkcSrDGI\ngisdu/c+Tq9aoA6N1CksdXOzxBrS/lpEeOLpvSE7wfcFdBADqrXaGENmDafPzAxoPMPHimx+EbEi\ncjvwFPAt4AFgv6pWcZddwFnx+lnAowDx/gPAttUc9FDTOAcFxFB5DStsTiZgEoPpdQ6njcEcKXJ+\n+LvfcABHJ8vQGB0WBdOftLesy7piTLBUTSbc+O2v0HO9Roi1ZWAk1g4fg2mCULqSH9x6G95rXBAz\niFNV3zNO9IWr81zx4hemYyGyIrFUVaeqLwHOBq4ALj7ZFxaRj4jIbSJy2+zs7PEfMCJobUR7RcWy\nadtpiJiTS7XUOovSYBC66vjF7t9x82P34jOhLptlCCsqvISLiqISrcWokioubAfyLKMrC/zoFz/A\n+SpM9DUEjlJ/nrVFxCBicN7z0GO7cZlFJK7tH8iAwok4rI4SMMqhgwfYObM1aWXkGX0zqrof+C7w\nCmCLSOPMOBt4LF5/DDgHIN6/GdizzHN9VFUvU9XLpqamTnD4w0Y9YdYgNgqbdu4EMVhrOSKppOWd\nCAZFPwXGWtu0hJA6wm6Fw9ZzyyP3safsBkvTmNhnp84/lP5QYLHPVAXiSh9FMSrY3PDI7t/htUK9\nJ/oNgiXcmhEuzatOHI/WZ4iGoFmTluXx3jW+4u//7LYQBVfPwNp/qeDjCTLE6A3eVYtSszY6K4mG\nnyYiW+L1ceCNwD0E0XxX3O1DwA3x+pfjbeL939GhTNpbA+KZOViCYSmh2A6zc/M4X8emW5Kz6GOp\nrT+DilA5BxI7RWpwtj+lPX78wN1okSM2rNaRVhOpvpAtVrh6SR2t/SAkqav3TE5O8K9fvx4vHmvr\nosUnH5hK9OnLUPhrTVjq+OTe/eRjY0B/rfigEOkfQQK87Pd+L7qT0kEAK7MsdwLfFZFfAz8DvqWq\nXwX+K/BfROR+gk/yH+P+/whsi9v/C/BXqz/sISXOXtsdFDEFp519DtZkHH+NCYTlkWBEsMZQqkMM\n9MYyfr3rITSzIWJaa6058Wmb4sFBWfaQHO5+8E6CQ7OO6J/wUyfopyMuFsqAiuDU8dWbvktms8a/\nPGjbPUxAlF6vywufc2448W8QW+d4HDcnQFV/DVy6zPYHCf7LpdsXgHevyuhGEQHBNhafU8/0jrM4\nuHs3hZUj9q1/F15C0jkQZ21BsEyR8Xh3lnsefgRnDXknj0VYwRtOqm2zsRaDodvrolrx8ztv5bnn\nXUwnG0PTeoWTRKJQ1me12sUigKd0jgcef5yprVtxLrQhCfcH7/N6E2Yp/ePTlT2yuIw0HGTpzJl+\nEatK/GGoorHikApgx5jYug2H4r2PPiu/yICw8Zg00fGP8RwwFd998C7ueeoxTJFjMotT8DG1J3Mn\ndwCrKs47CltQ5DmdsYwv3/QlKl/R63YbG8d5t9TbmlgBWh8LEE+MSijIBgfn57j1jnuC7ziu2lEZ\noCgpGAl+dVHlNVdcjtE64S1945DEck0Iy/JCHcvgxIet55yDx2BMOCDtkmVtdcGCsgypRvNG+emj\n92KmxkB9mKQpzZRINaR3nOwXKDFNBKAsS7rVPHc9+Gti+cIQ9EmzsBNC2tZkTAtSoFLP9Td8DS8G\nI0JZ9qinGYNsmVynji0cnuOCs84IJ3Xnk88yksRyVZGYOlQveQwfsIgBO4FDwtS33fkxYm1G6R35\nRMFjh/bxg/vvJis6uF4Pm+c4EQyhSKxBY/qPcFILE+s4kISfaZbnFEXO7Xf9nAXfDf4qY45cr5zE\ncwW0fI+tIIkn5FhuO2MnmbVUlSPPMmqhHORHKwjeO3IjZNgmYUNPxtdzCpHEcg1oci0RvHOxu4Sw\n8/zz6ZXlMmfqcNsUlrufepg79+zCjI1hvMFgm6k9hElcPQ0PhTVO/KxvaqtSCZF3Y/AaouNf+Npn\n6bku3letR9T7nvBLbmi8D26PL33jOyyUJXhPXnSwdZBugAVGva+oXEWe57zy8suoi5YYc3IVs04l\nkliuMo1fL1bEMVmoBWjEkE9vp/LgK48XyHMblkKKoZcZfvbQfexaOIQUOdYIWI8awfu+VQI0fjCN\n/qUTxdfiF/+qB/VK6Uo6Ezmf/donKX2vv1QziqpXP+ig7fCjQsjAknDCEUGNcMd9DzJbVeFEBbiq\nwvnBf5DqY6k4p5x/5g6AMIdRUp5lJInlOqKS8axLXorNDZkYKuep1POUn+Pmx+7hgHHkkmO9Ao46\nZxNx6zPAWunjbzcvLF/85vWUrhcKCqM4Da6AFOk5NioKJpQ6M1i8Or5003e456FHKDpjgx7eEVib\nYzKhd/hQqPAvNCfJjZImfTySWK4jlTrUjlP2yhjVzvjN/t386okHcZWjkAx1ob2rFwG11Enq606c\nbjst+fJNX6DnFjBxuWV9dzIuj436UCdUBW69+14Ozfeo3HD6/7zz9ErHeWfuCMtj0daaifQtQxLL\ndSU3GWJg1gk9C99/4C72VgtoZshshuJiS1SDqMWoYvDr7/YPfgRELHmeUeoCX/rm5ym1rNdfxggu\nJMvjSLwPvWqssQjwwBOP8+Cjj5HlBdbaITLI+2u/jLEYhVdd/tImLg/ErI7hGfEgSWK5jtSFgS94\n6WXcfO+vMdMFXhXjYqMx0bA+VyCspAmVzAfn+JfYNVLJOsJnv/Iper6LqifPcprF6yTbA2hOGk3V\neQkrsL7/05+HU15cTihD0ylOWmM2VPOzZMbSik8mmWyRxHIdUZTSeTwWsWNUlQMLuc0RFYzG9eGq\noWOjaLQrB1eJpl6qNzc/Rz5m+OQXP8ZCNY/zrr+ks+0m2MCqWbePlbi2v+t6XP/1bzI+ORljY0Es\nB1fg90hEBDEG7yve8vorg5slzh5q/3UK8ASSWK4jBiiswUrBe97+n5g/NIeRAtfksbXSmNUg3kYf\n4fr7uUISvMYcTEOW5SierTPTfOmmz1FpifMVzrtmSt4u6NFcGXXLc3FNksXbtV+ipE4qECM49dzz\n0MNc/83vYLNOaN3RZN2auFJnUOiS64pzDuNLTt+6hXrRJbE6UthtZL+9VSWJ5TrSVKW0htwUvOqK\n11KVvVi0IvzsfOMrUrwJ5d4GcV7vC0BYoldPLUtXIQKf/vLHeXzPIyCe3GZNkn1/LXTwZTb1jupl\nfyP2u2uksFVircZ512z3zuO9UnnHTbfcym333IdIRqUa+8f3v0XRwfzs6vqmdSqDEnJrUc91b7gK\nKyb4KIXYM9w2/utEEst1pWnZquFHeN5Zz2Z23xxVVYWljNTJ5vRFRYZTX8bGO3z/lu/y9e99lUrL\n0Eoj9kyXluA3GaGxnNyoFRVufSM0VYTiSUBMsBR9FXIUMcpnv3YjT+w5iHfVMZ51MITlsoZYtBQj\n4LynmjtMp8j7DdSSJbksSSzXkX43v1ip3BS89x3vJ7M57QTH8POUvq4M4YndWKHo5Cwwxye++DEW\n3HxjF0cp6S/2qeuL0C9SPCp4ojWpArVPub8kAICy8jhxfPk734d8krGJDjYfUEvbFRDKripl6XCV\n473XviVUGILh7Rk/BCSxXEc0TqlDO1uDIEzk05gqa/m+2oV6h9SsJHT/U/W4smTTzBSf/8an6bku\nlQsrfkJdzzCtC6uZgnCaYVT+Y7BoFiqLT2feKc73qMw8//Nzn+FgVWEEugsLDOUZrk4Kistns7yg\nmp8jpx8BF5GmXFxiManH5TpSr7fVGByoXCii8K43v4d//dZnyApL5Sp6ZY/C5s0UUIfQ2RfaIESX\nQuWwFr5285eYPXiYV17+Gp51xvkUpghmjCzOFB1GGTka6hUxsQmxD10P1XuceH77yN386Nbvs2Xb\nJk4/vaDX24eXGUQsdpEvBYbiXauABXWh7bGrKj70zreHVreaXJPHI51C1hERqedAIEKWZXjnAEPH\njIETqsoxPjbWWiEzrOtkQsm4YG2FObb3jrHpgtvuvIVPfeGfONjdD9LvdO7UMcAuMyeEWIOnDtKE\nCHHPL/DdW77JHQ/8ki3bNuGc0usuYOQwVvcBJbQCKUMhlBDG5OOsxkB3dpZMwgl6aFI/h5gklutK\nX/S8OtQ7jDEYY7j6yrdycP8hOnmHhYXu4ocN4YEsdVGNmA/q1CNGqLqOquyxaWaSG771BX529y10\nfQ8IDdLED+GbOQaidcBK6WmPn97xQz77tX/m6f27wwlClcpXmMyGb1cOY+UgaPc4zzwAxIMXxITa\npW+68tXxnfmUHrQC0jR8XZH4v2A0nN3DVsN4NsmbXv9mfnDLd6hb0oqGNKM60XnYCN6BMK48zynL\nCmstJhOcryg6OQ/uuo877vkVL3reJVxy0UvIbQ5aNL5Z1dBvqI5vaStwUr/nft7m4nzUY/28609L\nW/+2SkMsfh9Nek98fTFNxoLXkEv609t/xIOPPkBnoqCYCIsIokEdlqp6jeNSYAErDsXhmWzcLq2E\ngeNwMt/10aOCRgyVerT0LBw8xFnbt6EiIcA4hMfXsJHEckC0xU8ImnP65jMosjFy4+hVPcQI6j1G\nBlsUdjmOGE+TvN1fnllbLRPTYzyy+yHuvOcOtm/Zzh+87q0UJsc7j81CjqZZsmZa4vS+Xv0ZpMg0\nQZY6fYfmvvi4VqGPdmRmcSm71mdfB23ii5jYTbNUh/OO2+++jTvvvoNNM1NMbZqgrMqWtXnkBxD+\n9Qg9BAeUqNkSdb5eP7g411Ea5+9aTtmVqlKyTkbv8Dwfeve1IQLepHsljkcSy2FBPQbLm3//Wn74\n8++x99B/4NQhMaAwUmd+7dtYKhp9Yp6x6YKeLPCFb/0LgmFhtssrr3g1F5z1bMTnIKZlRUpTsQdt\n5Wtq/4ctEoJf7Yh1fbu50TydLNmmzc062dyrZ/eex/j3732D8alxxIQk7untE3hX0VuoyKxtci2P\n9Z2Ed19icag6nEygjAVNVGLb2b6495uaHc9mPh714+tLf4x5ntNdmOetV72aiawT+s5Tp77K2un0\nKUISyyHCGEOnGON1L38Dn/jCPzCxqRN/2yN2FJs6pzT86I2YIHJiqKqYwI4ysWWcX9x7Gz/+5Q+Z\n7mzmshdfxtlnnNc8jcSScEbM8vpRW50tfelfXfwAUWlN/RVPqBHaq7r8/Fe38tCu3+GMoxgvmNw8\nHpNCw3P76NPL8ixatIBoDNgt/xHUSWIiguEwaBVSwWUsOhp8PAHUAmlYTuBOjPgpxJMJQnQbKFNF\nh62Tk8H1EBqU1B/QKrzuqU0Sy6FBoqEiWMl533Uf5Evf/gzeKXlRjGgflGA32ab7WfTrSbA4VR14\nmJgYo6LLT+74Ae4X32f+8AKnbzudV17+aqYnNpGZvPnhmzpvM/5XC0E/SzW+FIpXF6buAFrh1PHU\n3ie4/6EHeOLJx1gou3TG81DMZDKj0CxY8SaO3QpZbDinzXRd+pbYSgxAdXgsRkpE9+N0GiPjeM0Q\n+pkCi72sJyNa/dOF9xrF2mCs4eDePfzpu64N79Ga6J+tXzEJ5fFIYjlEhABzONA72RhuQelMFiMX\nqFxcgmyJnVf75uJUPSxN7hdtKHJDvnmC+eoQ3/zRjYDgKx8ae5mCmS0zbNq0icmJKSbGJ+gUHTp5\nB+89VVUxvzDP3MIce/ftYd+BfcwvzFO5EsmhyHOMCUnX+bhFM0ueW1wZgjEYQUyd3tWX3qUBobC6\nSmLTuOW/HGkeGz23CkJFZvbjtIfKTBMgazkWOHnrri2+sQmadyDwlte/FipP3gnFW2yYAhCHkeTy\nOCSxHBbqyK8EPTEY/uhtf8z1X/0UxXg+4ME9M/peM20bOgENQlMHOrwP00UTC+V670K7YAXBYYxg\nOxm2CF7Qg929HN57AN0DlaswxjRtXJvgjsbrVsnHhFw6cZYbthtrqMoSg4RWrzZYXyixunl4F8Eq\nbVn0jT9Uj5uX2H/LbYvRo2owzKFkwBhecrRxSSv96fiJ0rdMsyzDlSUCHDywn50zM9jGzxut8TT7\nXjEpz3LICL/TMD0qbMH73vZBDjw1iw+NdHHeN774phfOkOFpWVyNL7Gfr6jS2kP6Aheybyw+/oaN\ntc39RmyYgkeBU++xdYqP1+Bf9Bqua+0zBWMNEjsUilgQg3MKxoKx0ZqXfnQ9Zh5oM856/Is/6sXL\nUo/O4jyGOj0JDAcw7MFwCDGhyHOwrv3STzA8Qny4HBNd9F9VllibcXj2EB96x1vJTL38lH6fndhY\nbwgPo6EjieUQI9F/+aH3/hm92ZDv18k7zX397ozDz8rERZe5HPveYz3jyp7hyGn2ot2P8siVokv+\nXeyUcFhmyTiASIWYLFrJYR9pv6KCquHYBfv67g0IvehdVfK2119JbjL699T/DufJdlhJYjnkGBEK\nO8Z73v4BMmtRX2G8hPYTpGN9VOkLoccwh9W9iPhYv7R2AUBthyo2WrfH7vQZMgdCq+Ju2WPzWMHO\nma1kDG8VpFEhieUwUk8LlSYCPJaN4XtKVfmhakuQOEGaeb2GPkymwupeDF20JZSqlnoesZKK+T5m\nHFhjYGGeN7zqlcE/OWpRwiEkieWQIpgYuZWYBA3vvPq9dBfK6JivK3cPeqSJE0Ex4aRIXEWkDuhi\nOYCVueifFER8sBSpk6OO/pNVgco7bBZSoD70jrcz3ikAmqW1iRMnfYTDSGN1xMRmYrqL7fCB6/6M\n0zfvZLwzmdb0jjginlP7ZVQAABNySURBVP6CR4NQIfSw7CVjL8JC3LEOWR/75yoKmRGqsssfvuE1\nWLH99fXppHrSJLEcEtoJJsuGITT0ozZYXnvFGzAup7dQocfxYSWGE1FFtT4ZLg3peITDZHIIKNvh\nmDgdPxpKJsJrL7+UiazTOudKOqmuAkksh4gjDufaaVmnwZiQC2hNxjVXvp2JziS5LVopzYufISWE\nDDPBWlRMDOksjdtnQJdM9oGUzdbFeZ6L4/MCXHD2OZy9/bSY+hRltnUMJU6cJJZDhrQvdQ6cSGMY\niI3rrBWuufKtHNxzGGtNXAXTzy+URc+WGDbqlT1187Cla4TqfYQuln0YmQ3+SwnFiOs16sYIzntE\n4bydO3j5C5+Hr1zMSY3PFY+hxMmRxHIECS1MMzp2jPe94wPsffogViw2M1Ee6/XM7bXHiVEhNEyu\nYiatRSix7MdykFDJyGI1JOT3qgr1sH3zFK+45IVUVUWe5yn6vQYksRwVlh77Cr5SxvMJ/uzdf8GF\n514ctsWVJwKhMvkgxpo4SQTFBstSXDPRFg5j9UlgDo1VjzJjuWDHabzpFS9DAGtDPmWyJFefJJYj\nQWtpoGozNc/yDFEhMznPv+BFvOySV1GWDlFwzuG8T3blSCMxCBTDPupBlMzuA78PpxVnbtnK7//e\npTH1SJNIriErFksRsSLySxH5arx9vojcIiL3i8j1IlLE7Z14+/54/3lrM/SNRxM11f7639r11elM\nsHP72Vz3hndixCJicBpy9RKjRrsMR1ypriAmJKhXZY88m+e5Z27lyiteSuUdXgWTVumsKc/EsvzP\nwD2t2/8d+BtVfQ6wD/hw3P5hYF/c/jdxv8RJ0V/DK0tiNnXdRlVPbgoms2nO2/lsrM3oFJ0klSOK\nNIlEYemjEZpljL1uj8tfcDlXXHI5RgzWhotPfso1ZUViKSJnA28B/iHeFuAq4F/jLp8ArovXr423\nife/XtLc4KRQYpS7zgDRfuqcQKyqQ1NZ59IXXM6rL/t9Duzbd/QiEYmhpV0sQzGxEpNijOBdxdvf\n8E4uOPO5CBJaj8QKvulXtras1LL8f4H/g/7i1G3AflWt4u1dwFnx+lnA/9/eucbYVV13/Lf2PufO\n0+Mxw4ztMnbNm+CCCDgEwkMVDQkQBMQxyIgEWh5JHx9a8aEFVarUj+2HqqlUNY1Ko6iPhELaBlFF\naQhElVIJwjs8QuI0FBg/xhnXHo/ndc/Zqx/2PueeOzPY4wHPvTOzf9b13WefM+euc+bc/+y919pr\nvwsQ9h8JxzchIl8UkedF5PmJiYklmr828GFExoeAlKnOGi8J2cMRsIklMSmb+of5wmcfYOLwMX+8\n+nCTNElDBm3TODc+3ZvQdNrIKUQr97n5/vuhFVdkD1KfmNhYy+xUxrlbL2CgdzD8zgl5QCXGnS8D\nJxRLEbkZGFXVFz7MD1bVr6rqDlXd0dvb+2GeelUi1YLM3deo9L01H1rUmXRx3+2/zS3X3U4eJvrM\n1mewVnAhe7ZvqIYVYcTndjRqMC76/k4l1knTcrqu7AMIeZaRGCnXY09MDWYsu268k8u2f7z8Q9c8\n7SD+hTvVLOYbcRVwi4i8DXwT3/3+MtAvIkWm9WFgJJRHgC0AYf96YOxDtDlyAozxv1bnfBBzV9LN\nzdfdwtTRaaxJyPKMWi1pXtI1DIYW6RqImY1OLdX767sOgBfQjjSlntex1qK5Yfs5F7PrhjvoSrtB\nohOnVZxQLFX1YVUdVtVtwG7gaVW9C3gG2BUOuwf4dig/EbYJ+5/WGCG77GSZX3LBhF/x+s4B7t55\nP9bVsKTU6z7UxKNNWW2daJkvM3JqcKKNVHt+ULp06mRZRprUOHZsmjtuuJPzzjgfm6R+vHolrlu3\nSvggfa0/Ah4UkT34MclHQv0jwECofxB46IOZGFkKSeIb/UXnzoVZPzs/fQdXXnI1E0cmUVXq2QxJ\nmpDn9dJJVP25yKlDtMgp5actiiGk4zMcPjTBrht202E7sUkKYVzSmNjVbhUntWCZqv4A+EEo/w9w\n+QLHTAO3fwi2RT4MVJsG/kUsWzdu4+6d9/Lok/+ISS1ZltHd3c3szGxYmbF4RcE8VYiahuNNBGst\neeYwYunp7uX2T9+FEUOuuY+b1WIWT6RVxNUdVzkaXNzFUlmiilOhlnTx+VvvZTaf5R8e/xpmwPpu\noYTUYVErTylFKjaMv8mTU3U+9mtXcu6WC7BNDhw/jpzjMGLCypWRVhBdnqscKbPO0Agvsj4JgxFL\nh+3kN2+/n2t3XMeRgxNkeTFwWZlFMi9kyYe6GBrdyOLT1lLbR6jelwWirqRYTce3IPM8x1qLc4rL\nM2xiAMPhgxNc/4kbuWDrhViMDwtqBBOBNLbibP/WEcVyldNY5rQ5NtPH8rkyg9HG/s3ct/tLuEnQ\nHGySkOc5LqyjrVKs/eMThznRMrlYQ1ZPdu3DlY1K87RTlcYdEAhdZ8WFVyGWeVZHgckjM3xk63bu\n3/0lNvZvBghTVJn7q2rM+Y5a2TJiN3wNU8iow/myE+687fNMZ1N86z8eo3Ndysz0jE84bI3PdBN+\nyvkT+DNowy3U+H/1Uw0Y8PEe4ocxyv1F1krBiCVNDTPT09Q6amzZeBY7tl9OYpKyjZ6rwxobByfb\nlCiWaxyXO0xiUXXYxK9b3WtqfOG23+LY7BEef+IxOjakqHMhLtO3lqSS/q14X1vtSqhOBChzA2nz\n/TCFXFqDy2H6WMZnPnkzg32by3MU8mrwSZwlerzbkiiWaxiHw9gwtw7BSFgyVXwGm76OAe753APk\n1HnpzRd46fUXWd+/LsxFdtRDollXrjIZplDK2vAOucqyDn7OvgPnrz+tdSBGmJqaYnqizs7PfI7+\n7tPwSTF8YHnRhTcEx41ATk6j/R5pJ6JYrmHKhQfC91JDNgYN42a5Or8cb55w+fYruPQjOxg5+A5P\n/ddTdPakdHZ2MVufIa2lZPXMxwsGj+3ql0o/NbSYfSMGssznm6ylNaanZ5iZmuGqHddw/rbtqJPy\n/ua40lljxJTdbi3yUUadbEuiWK5pGqEpRdoaCV9YBWz41lprEYHU1Ng6dCb37nqAn777E3704nNI\nIqgVTBhrc7p2ppgU47VF0t3uzh7G/+8oNU247VOfpSvpwopBsKjxDrFy6EJMGPMs7n7RnpSGCEfa\niiiWa5jSTy7CvBUGi/V3CS5eEZ9oAy+c52+9kLO3nEeW13nulf/mnZF3SDpscPpUv+nz25jzaxY+\nfq5eLLW1upDuLPZc769ZYcYNQp4r9ak6mwcHufnWnaQm9S3swu2linM51tiG86dI3By8ZKKUqdai\nULYnUSzXMjK3WImYlMp29U0aAptSI01TrrnsOuqX1Hl39Bf88NkfIqJ0dHWgOIy15Hnmu+ZBH8SY\npgW1mj5rvmlNdccTuapj5Xg0fPYLuKjCDxtjUJSZmWk6ah3kLoT95I7EJrjcMTU5w+aNv8InPnYN\nHbaT1NYq45HN8ZbWJse9LmJoUNsTxTKyJFR9uJGqn7qXaMrZmy/gzFvOA1Fy9QL5naef5ND4GEln\nQlKzPqO3c6Aaxjd99u9G978hXAvNVanunWfT+9kKqDTau41ZMOEnRL0XGu/gsjYpnS9prYYiTE3M\n0pl2cclFl3H21nN9yI8aEpv4FGu5hqmiFdtigslVRRTLyBKpdNrFJx1WFELYSyIdiMCt1+/EqSN3\nOft/OcKrb7zK6Ph+klpKmvjM7oWK+QD4+d1wnbM9t6zzauZjdO55pdFSdYIxgAhZnlPPMuqzGUYs\nWzZu4aILL2ZDz0CZaBeRcgmHusuxxqBG8SdRyh52ZFURxTLygam28gwChfNCQcViMBhrGd64jeGh\nbTjNcKrsH9vHW3ve5MDYAWbzGYwVbGoxJqx/rsWYaeg4N4qNPnlD8d7fPlWMWBQXMs6HmTXOkWU5\ned23Cvt61nPu1rPYft5FYdE3760uJxoWwwVadX75NdptGVEgTYIfWT1EsYx8YKTaSguUAlptfuLH\nOq3UMOoYHtrKGYNbvHCpQ9VRd7Ps3beXvfv38s7I/+LIwqwYRaxfh8aLqRevooXnxwoV57R0qKhT\nv3aNGJ9AJHP0dq9jYMMgm4Y2MnzGFp9QF0BMOTRgxc/fRgVjpBxfVW26II/TyjiuF0sNDrHYulxd\nRLGMLImqFBTzwxtjgo7msKTm451zWNsIzC6G9vI8J007OGfr+Zyz9TycOlwxNrrgKOX7oU2fV01I\nUci6kUaXGg1jliFmEgSxvr74/LnX3gi3KisbjiMJ71EuVxVRLCNLoowarIQK+YaXFwlBQre5mB/d\nkA7fWnMVB4igTsOYYHGslMtjzPng+cztntMsUxqS7DZErBIZVbq/gx3hX3FNpry2ZrGuZmAqTlgI\nv0adXJVEsYwsicaiWZU6v6Npe+Hj5sUsNc2HPq7OvM/O4zmem/dVgqQqrcJGbqb5thX1J9K/Rtzq\nCQ6MrEhiirZIJBJZBFEsI5FIZBFEsYxEIpFFEMUyEolEFkEUy0gkElkEUSwjkUhkEbRF6FCWZYyM\njJSBylDMExZEhCRJyLIMYwz1er2Mv/NZYHIAkiShXq+TVBbaMsbMi9Urzrtp06blu8BIJLLiaYuW\npc/0YsmyDGstfX19JElS7puenqanp6esK+qBUlCzLCv3mZACrBDOxnQ1XTjQORKJRE5AW7QsRQTn\nHENDQzjnGB8fJ01TRIQ8zxkeHmbfvn2lWHZ3d9PX14eIcODAAYaGhhAR9u7dS39/P8YYJicnsdZy\n+PBhTj/9dFSVQ4cOleIaiUQiJ0NbiGXRCiwEcmxsrKkrDV5QBwcHmZiYoKuri4MHD9LV1UVPTw8H\nDhwgz3P6+vqo1WqMjo7S09MD+K56kiQ45+jo6KBerzclno1EIpHF0BZ90qIFeejQIWZnZ+np6WH9\n+vUYY8jznOnpaTZs2FAK6OTkJN3d3XR1dTE1NcXAwACbNm1ifHwcVWVoaIjZ2VnAC3HRFc+yjCzL\nynHOSCQSWSxt07K01jIzM1M6cCYmJvwyq84xNjZWOn9EpHTkjI+PY4zh6NGjOOdI05SDBw+S5znr\n1q0r6/ft29fUQo3d8EgkcrK0VcvSGFOWJaxhnaZpWL/FtwwLYS2cNSJhbebp6VIIe3t7mZiYACBN\nU9I0JUmScn/shkcikZNF2kE4ROQo8Far7VgCpwO/bLURJ0m0eflYiXavRJvhg9n9q6o6eKKD2qIb\nDrylqjtabcTJIiLPrzS7o83Lx0q0eyXaDMtjd1t0wyORSKTdiWIZiUQii6BdxPKrrTZgiaxEu6PN\ny8dKtHsl2gzLYHdbOHgikUik3WmXlmUkEom0NS0XSxG5QUTeEpE9IvJQq+0pEJG/F5FREXmtUnea\niHxPRH4W3jeEehGRvwrX8KqIXNoim7eIyDMi8oaIvC4iv79C7O4UkedE5JVg95+G+jNF5Nlg36Mi\nUgv1HWF7T9i/rRV2B1usiLwkIk+uIJvfFpEfi8jLIvJ8qGv3Z6RfRB4XkZ+IyJsicuWy21wEfLfi\nBVjg58BZQA14BbiwlTZVbLsWuBR4rVL358BDofwQ8GehfBPwHfx6gFcAz7bI5s3ApaG8DvgpcOEK\nsFuA3lBOgWeDPf8C7A71XwF+J5R/F/hKKO8GHm3hc/Ig8M/Ak2F7Jdj8NnD6nLp2f0a+DtwfyjWg\nf7ltbskvq3IDrgS+W9l+GHi4lTbNsW/bHLF8C9gcypvx8aEAfwvcudBxLbb/28D1K8luoBt4Efg4\nPsg4mfusAN8FrgzlJBwnLbB1GPg+cB3wZPhytrXN4fMXEsu2fUaA9cAv5t6v5ba51d3wM4B3K9vv\nhbp2ZaOq7gvl/cDGUG676wjdvI/iW2ltb3fozr4MjALfw/c4Dqtqkai0altpd9h/BBhYXosB+Evg\nDwEXtgdof5sBFPhPEXlBRL4Y6tr5GTkTOAh8LQx5/J2I9LDMNrdaLFcs6v9ktWUogYj0At8C/kBV\nx6v72tVuVc1V9RJ8a+1y4IIWm3RcRORmYFRVX2i1LUvgalW9FLgR+D0Ruba6sw2fkQQ/JPY3qvpR\n4Bi+212yHDa3WixHgC2V7eFQ164cEJHNAOF9NNS3zXWISIoXyn9S1X8N1W1vd4GqHgaewXdh+0Wk\nmJJbta20O+xfD4wts6lXAbeIyNvAN/Fd8S/T3jYDoKoj4X0U+Df8H6d2fkbeA95T1WfD9uN48VxW\nm1stlj8Czg0exBp+4PuJFtt0PJ4A7gnle/BjgkX93cELdwVwpNI9WDZERIBHgDdV9S8qu9rd7kER\n6Q/lLvw465t40dwVDptrd3E9u4CnQ8ti2VDVh1V1WFW34Z/bp1X1LtrYZgAR6RGRdUUZ+BTwGm38\njKjqfuBdETk/VP0G8May29yKAeY5g7Q34b22Pwf+uNX2VOz6BrAPqOP/st2HH2P6PvAz4CngtHCs\nAH8druHHwI4W2Xw1vivyKvByeN20Auy+GHgp2P0a8Ceh/izgOWAP8BjQEeo7w/aesP+sFj8rv07D\nG97WNgf7Xgmv14vv3Ap4Ri4Bng/PyL8DG5bb5jiDJxKJRBZBq7vhkUgksiKIYhmJRCKLIIplJBKJ\nLIIolpFIJLIIolhGIpHIIohiGYlEIosgimUkEoksgiiWkUgksgj+H6c5Dmq9zR7qAAAAAElFTkSu\nQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "fdbCDYJkG8Gv", - "colab_type": "text" - }, - "source": [ - "## Dense Image Warp\n", - "This operation is for non-linear warp of any image specified by the flow field of the offset vector (here used random values for example). " - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "dG557eQDDtSK", - "colab_type": "code", - "outputId": "9a5fa541-d465-435a-9b47-edded4a2811a", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 269 - } - }, - "source": [ - "input_img = tf.image.convert_image_dtype(tf.expand_dims(google_img, 0), tf.dtypes.float32)\n", - "\n", - "flow_shape = [1, input_img.shape[1], input_img.shape[2], 2]\n", - "init_flows = np.float32(np.random.normal(size=flow_shape) * 2.0)\n", - "dense_img_warp = tfa.image.dense_image_warp(input_img, init_flows)\n", - "dense_img_warp = tf.squeeze(dense_img_warp, 0)\n", - "_ = plt.imshow(dense_img_warp)" - ], - "execution_count": 11, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAUsAAAD8CAYAAAD+D4bnAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsnXm8ZFV1779r731O1R167qahgQZE\nQsQhDgioIAgyOibqM4l5mmjEzL5MT42axDGaxORjZk1MgnlP4zMxalDmURxQUEGZZG5oaJqe71R1\nzt5rvT/2qdu3m4ZuoYd7m/P9fLpv1TlVp85Q51dr7zWJmdHS0tLS8ti4fb0DLS0tLXOBVixbWlpa\ndoFWLFtaWlp2gVYsW1paWnaBVixbWlpadoFWLFtaWlp2gT0iliJylojcJiJ3iMg798RntLS0tOxN\nZHfHWYqIB34EnA7cD3wH+Dkzu3m3flBLS0vLXmRPWJbHAXeY2V1mVgH/DrxqD3xOS0tLy14j7IFt\nHgzcN+P5/cDxj/WGpUuX2uGHH74HdqWlpaXlsbn++uvXmdmynb1uT4jlLiEi5wLnAqxcuZLrrrtu\nX+1KS0vLkxgRuXdXXrcnhuGrgUNnPD+kWbYNZvZJMzvWzI5dtmynot7S0tKyT9kTYvkd4CgROUJE\nSuBngS/vgc9paWlp2Wvs9mG4mUUR+Q3gIsAD/2xmN+3uz2lpaWnZm+yROUsz+yrw1T2x7ZaWlpZ9\nQZvB0zI3sEd53NKyl2jFsmVuIGSRNDDJi1rNbNmb7LPQoZaWH5eBSDZ/pv9CFk6hpWXP0VqWLXMG\nsWnjctqqbEfnLXuLVixb5gSKoWIzRNK2WpNqmGprWbbsUVqxbJkTuEYKBcAMp1sF0wREhLb5Xsue\npBXLlj3Ko8rXj6lrimFI9vMM5i4NMMvD8xlCuY1obi+ge0hQjbRHttsye2jFsuURPB45mTmPCGCW\nxUO2f1V89K0P1sQNDz1inZIzHAQQzYK56q8/lofgjaf8hrf+7LZCWfdBmj2IETauh/7U1vXjm5sP\n1h3sxdaH00K4zaptxVHw2x1Ma+Xub7Ri2fIIBKalwAb/bW+gARVGf/Aeayy9wXPxqMXmbYZZxDBU\nwCw+Qkym5x97k4Rul5tf/Hyo+tPrA8Idpx/Pna84hYfPOB5JieEv/T9EExtOeR5brvgqh9x5K5sv\nvxCZGuf+04+HopPfvOpuVr/tDflxdxhSnT9xdEHeP3Ez9kOwtPXo1fqoedKMkCXMsjhajRFh+jgB\ntcEJaD1O+xlt6FDLNgzub88OwnEawRADEcWjQCKZx3AEc0QML5E6QWkFMRgb/+y9LHv7B4llH2cF\n3z/9xTz74q/zjZecgC0d4eD1Gznife+Dpz+PG37r7Rx4/60c8/F/4OtvfD0/sfE+BOjXyqgvkN4U\nCz/xbzx05gvyPq19iL5Eyg+9G3GOu/7kDzkEY6zRuwdOPwE04kLF6teeSWGejVbRO/QpjN67inlF\nYLyuOeKq6xCrECnBe0wNYQrE46igTkg1CcNLUJdQAoHQnJCIki0PEQGNYIHtjc2Wuc1ur5T+eDj2\n2GOtLdE2uzCyKEaBMBiONkKJKAqYKZ5AkgpHmS1GDEckmeK0oPY15dgmrNNB0kJ+cM6zeOYVNwKB\nqR99j/7b3kZysKa/madd+QPWfuZfkPM+hY8KIWFWYmaYzx9uDnxiegZTYkKDxzC8Uywa0YwQPJUL\nWEp4J1Qo87SgVmPMVcyzgFNYetHX+f4Zx7HSCUN//nEmfu/3wI0Tf+fDHPTSU6Hs8J2TnsPzvnZD\ntiYlIs10goZAlkhDzIHUQEK0w5SbpGsj07MALbMXEbnezI7d6etasWx5BDMsyGl2MKoUEoZHVDGR\nxgydwNIIjoiZod5z32+8kfTd61g0vJB70jhHsICen4Si4KCvfIuHT38BNR58DSKUdY1zgqpHqcE6\n4MBpibo+4h2mlj+vmbP0ZggJlQIlEUUogYTgzZhyjm4KeImoevCKWiI8/TlsuPkGRiRRTXmGy8T4\nMc9n/iEHc9+1lzO8eR1LXvFabv38f/Cir1+HWIGREAEVwU3W0C0wMcQCSATz1C5RUGBm2dpsmbW0\nYtnyhBlYl9MIWCNODsXMgTQ+6rFxLCZkKGJ+hI1nvJjohf5ppzN8ycUoHqTHQxZZYUNMBKVjnjJ5\nesUkdepQOgUMbw7UUQdFG51xRHwKoCGbltDMe0q2MtUQl+ceTRRp1F5EsKQgIASiGE4ipAJ1CZ8i\nUTzeZS9R9JEiCuaUG+YfzuJ6nOVTY+hxxzP/mq9zRx15zpXfQcqAS/mkJO9wkxNYdxgxaaZAmx+P\nwfi8Zdayq2LZXsaWHZAVUpr/kjQxjWaghpgRTZFVd6OpT9Q+43fdwLWvO5vvnft2Np11MvcefjAL\n/uXTDF1yKbVPIEqwksVuHg5PIZB8n8lyikIdhU8IFYZQOaMqIl5hKJYUSQgpUAUFF7eauI01KyLg\nXV6sCTGHNZawmZGcbxwuhhu4rkRRJziGcBYhRVRAzOF0mOg8z9i8lkPGN9PRSPG1qxEnrBzqMvat\nb/LDN78BW/swt33sfRiKDI3gpCbF7FyvbCJ/zi7eYbPAZmnZCa1l2bJDBvkxYqC3fh952rOxGLPl\nNLEJG1nMla96IYfUnsXWZ8Or/wfhy5/liC9cwbpXvAS1gjr0cFaAS3RiQXQRsw6BmujAWRY4J33E\nHKJdzPVJIog5VBzBKqzxlChuuqCGbL+vg1zI6RVCtjohqCOJYs0QWV3Ca0mwiFpJDJM4E4yA00B0\nhrk+nZjnJCtvKIZziqukmSKAWqbohiFcrElSsv4nf4qjP/ohHvjMf7D4zW9gaNLB/Hk7OLnGo01m\n9oHObrmCLbtKa1m2PCZxRpxgDvhunmsFJDAjUpME1n3za6DGD848Adm4nrHvfx/qSQ6c6LPijz7C\nWAXzv/QZRqNj3StfCnisSBTWIeAwMfq+AhyehIpDpQkjEjDtZhGTiBIQkzyUpiZRYDis+aoOpgW2\nnR0YZPc4sCy0YjKtn0kUT8JpwCEEDZgkkjjMRcoUcFriTEjiKBQ6MZAIBAXUU6oQezXqCsSBc0Ip\nw5g5+hJQhaW33cjEDT9ixU+/nu5UFsptY0+bPRchEreGZTXrzIzOdLBVy2yjtSyfzDTebk/K4S4S\nMEuICzOCCkFVsXt+hCsK1rztzRS9SUwE5xxR+9y6uc/TF2YLalBJDbIIO4TaV4TkUUl4K3GkPIe5\nzb443CD/WyIhlSSn7IjtnU9N0iOCm5Hnsy1iHpMmUL7ZgD3K+gHOFCXkKPhtCEDEzHF/nOLQoiSp\nseyiS9n4srMo3vQrxH/9e6JXqpPP4JB3/CHmOogquD4inWa6tUniTFB5KK1udqbY4XG37Blay7Jl\np0SJzSMjuQ5mhhvbAgZ9GYScG9e/8On4Q49Cyg5UES2HUAFNiqOYFsq8JZAmS8chJIkUqST6LJTA\nI4US8hxio4BiflooVR75Y779IkGyVQk7FEpgGyG07YRy+/UDVNy0UMZtMnbyeRNRDi06gOCdY+M5\nZ0JK1P/696gZUSNjX/86685+Pnb7D8E5hA4a+6j2MRLJFLNxypTPWJJBhGvLbKMNSn8SE0wwqaen\n+lQ8Mm8eMrWJQuZhJNzYQxw5r8v6lz0f1YLgFalrjPJRC0ha2LrCW/6KFanchT1KbE1qzDh7lA/Z\ny4TtBD7br9uKmmoWVm9GUmXi5W9gdNEClr7uNYzf+D3mrTgMGyowV+CjYV7zOXcdoMbM40hECe2N\nOQtpr8mTGENRM6QuSWtvp1hxGGs/+F6WvuNPWPvKk3hgwQEctGk1wbqoeZxNYClizk+LmJntxljC\nuZPysr1Qbl1hKHmKYuF/f44qGOs+fR5OhdXDNahn4vknctT7P8pDf/sxDnzrr5EKj9SBVFYUVuQp\nY6/T1nLL7KC9Gk9iJCkuleiNl1Gu20ItJWNLl7H2ZS9gy1TFAZvX4KzGmYHUmPMYC8CGyANeRUTa\noOsGEdlG4GJIhFRgKM7VDE0EQuwx/9sXcc+Zz2TZq3+GyRu+jdMSKRwhlcTaEby0QjkLaS3L/ZHH\nCoSeEV5jGOvOOp47iyGK0YIlBzyF0VuuJ1hBx/UY6Zckl4fP3ox+UDp1zBsQm/ZQtzwSAUJyYNkT\nj5VUrkJFsTjEUNnn4Tf8NMsuuhKcIhNriZ0DEGeo1TjZOm2x05YZbU+NvUL7bZ/jGFA3Dofp2jcz\nGnrlfG0w6zVL8rzaD098Nhsu+AKalKf0J1n00BZGbv0eqQvmEsOhxFwO9xEMZ8ZQHYjeYAfOkJZt\ncUrORBKl9jXRaoJXfHSU1PQTlK5gwzkvpX/S8aCKly041mLWQ218elv5cjYOr5mOphkN3HZQGKpl\nN9OK5RzHrHGiaOOwoSaJkWwQli2Y9REp6MceloDxzTzt6u8y9Vd/g/eOm11gXmHUzpDJJmbRHEmk\n8VLDwPHiTGnNmJ2TXFOIxBteO+AFM4dzHmeOBVVJrCFZYrI0Nr32daAdZGqC+rtXoIyg1kS/WpqO\nyXSDed3GmjQZXOeWPU0rlnMcAZxB7SqUGlGfi0qIYqa59qKCmaMjgbhpAw+87mU4hK4lxn/xjTyD\nhBkUKhReqN0g3XFGrGUTRB60vS13leSgSI+8xaKDXgAKcNJBXMFYP/LgOSey8Wf+J93RlWy+/ALc\nj27BW6Ip/sZ0wNMj4p7akfjeoA1Kn8PkoVfEEUAjlfMUFqnFUwD0e0hIkHIsoPrA7W96DcMP3Uc9\nZYwUIKYICXwHVcW59vdzT5Lz1nO4vgl0UsVEYRTRiMGhAl3ms/S/vsg3TzuNp44Gll78LZA8EI/1\nFGUxjKI5utRk2yD9VjF/bNqg9CcBQr5JFMNcU4xXi1wsIjkoh2HzFESlf+f3WH/6CcxfvRoxGC5y\nyI+JA58D0r2fO6E7cxVrgtwFBSrGS+gmoVTHUN9RJsfqJUtY+8rTOWq0JJ36slz2jYQgFMUwZnVj\nZxp1U3i42WjLHqQVyznKVkMi5zsncrqgKCQCZjWsXwPFCNZRkh8laMWICs4ZHZTSgXNuum/NbBhl\n7O+kJsxKBZwFhqucz94roddR+t5YvP4enARqVZa/8z3IVJ/ed78OgGzZAIBpwjQh1t7Ee4s2dGiO\nImTnTpJ8ERNZOPtBGYqCxQ1oVfH9N7+CkWcezaJrr0NclxCVMirJBYQIuDZOci/imx8kxaFOKZPm\na5hyznohQnRGTH0KX7LutOOoknHwhVdw/+nP5ZCLrkUnejA6iqUa5/JoYODoaecu9xztj9IcQK2p\nSjOoSGNNTUdRvPXzMCz1iMBwLay/8HPUq9dwyxtehm5YT3n9N5AYQRLjpSA6hFlAKWhvrX1DMCUo\n9ELBROGJUqISqJ3h1WPeQ1KSh65T7n/V6azrKwZc+/ITcRoJTSV2YGtb4H16VPs3rVjOAZwAxGnv\ntBGwxsMNglPAdfG6mX4Q+Ms/oXPAMg6QIQ7vdBitO4jPlWy8Keaa2o4t+5RsCRqd6JuQLMOZR0wI\n6qi94tVhrkAjHFiWPHTGi3jhhVcidYV6w4mAaS4SJdBGW+45WrGcI+Rhdw2aSEDfK6IQ772t6fsi\npG9dz90fehfihrjhzW9ErEaLRmJ1azyeSp2F19rLvy8xwBs4y/nkgzYeyVkuZ6fZjRMFOo3JOPzm\nN/HAK87i7r/4CDeeeTL1vbfkTCoDU5nu196y+2nvllmOoTk/RwJCkes+Si6Hm9wkD116DdSOLed9\nnBs+/McsuOoLWIIDH54CE1zdWBozrrQ0cXuPrNPYsi8wMTw2PW5wOignKiSXy8d5aiarmol/+ReG\nxBG/cgkHKjx0zbWDrVA5RaR1Q+wp2jM7y5FcPhczATzRJwIBZ47EEAe/+c1YAv30P3FwMcxwWMAG\nqRitA5PRMfyoW26Ha7MRaRqdFcmRnGUh7VVIUTDUabKoVHG/eS4Hve6NUI+TBBLtD9+eprUsZzk1\nhqdARahFCYTs7dEanyIyOcHmM59DccrrKbRmCzVBBSUy7F3TrrZlztBcrNorYgkxQzslokZHoXz3\nu7kjFTzlla/BbArtLMoN08idMoFBbeKW3UwrlrOcYlD81oQyJaaiokDtStac80LGX3UyFh311ecj\ndQenAUGyVQLTzb5a5ghCnmpRITqHN0OtS4xgvsuS5z6PKlRsvvIyhHlUKN5AUXBGMiU3DmrZ3bTD\n8NnEoLTazGC5BP1gdMTR91BaH5ccP3rxcSzudpjQEnG5UGzdFcqeEMs+ZSqb9gyxDSeZZWwfC+nM\noTPmj5PLKahBS/BCN9Zot0SA+3/2F3jBV69FXW7lWyJUAh1V1KXcMdPk0ZpHtjwBdmpZisg/i8ha\nEfnhjGWLReQSEbm9+buoWS4i8lcicoeI3Cgiz92TO7/fMTOquCmtRoCO5tjK0gKOIW5+y1uZ3xFW\nLz0UHwLOlQhGEZVUJIIGVIzknjzjsbk01ZAjErI41l6JgxTIptaaN3ApoRJZV0XuXrAUNc+6WHHw\n+VdhrqZyERHLBVAM1Dmc5STKNslgz7Arw/B/Bc7abtk7gcvM7CjgsuY5wNnAUc2/c4G/3z27uf9j\nGElSbtDVBBgbUKGNEiQ0TbLuq//B8tU3UaYOB25YhVp2AtQM+r9AFZ48IgnN6ZlZw3OwXGZnyP0g\nRAhygWBHTn+MTumZoRYRV1InY0iElVse5v46svmIo7jjb/4McSVd6TSFhXOt0cGN7KWdWdtT7HQY\nbmZXi8jh2y1+FXBK8/g84ErgHc3yT1tOK/iWiCwUkYPM7MHdtcP7L4Y3yYV1B1kZWlNqAIxEB+/6\nECsmXv3zyH/9XxZqpO+6YFDM+N0r4/47u7Jtv/AZbXEfsSL/0+3a5s4GsoDn6xV9zPndlq9zxwkx\ndBDr0zVFxVGWgeeefzHSKWHDuuaABBE/c4Mg7fz0nuTx3lXLZwjgGmB58/hg4L4Zr7u/WfYIsRSR\nc8nWJytXrnycu7EfYTmezhCcOEBBA+PBGPv8p9jwl//AlkMP5ugNdzJPu1SpZiIM40yJPlKm/bvX\n9CCj2rAsEoMK4Za2EYmBTqYZVqVKtrhnE0kGsZS5VJuiuLpP6s7DpT73ugUc0e2zbrLmqAuuAfMk\nwC06eFZay08GnrDN3liRP/ZX0cw+aWbHmtmxy5Yte6K7MecRcRgBMcmiaQV9D6PVGMte+1aWFIbb\nPE5KI0wFIwRFJIH3FPu5UMIg+8hNW90ImA5aZjRZK2qQcsFjRy6KPHOIPpvIu5Rw6hBTQPG+oNOb\nxCkcKH1UjWMuugxtpmhMbPaZyU8iHq9YPiQiBwE0f9c2y1cDh8543SHNspadoCjSWEuqJbXkztQ8\ncB/rTj+RjWmKI/vjFDpFSENUMo9uDUV0+P2gernTRx9Cbi0WkUXRNIIlnHOI5nVeczte825aT0yY\nFs3ZRnbwSFOT1PAGPdflpgS3jC5i6F3v5YHnnUgsR+hozhXPVdMFtSfXnPRs4fGK5ZeBNzWP3wR8\nacbyNzZe8ROAze185c5RwKXc88aZp3ZGWHsfZTTSYc9gUqY4pNtFk2dShkjSw5FQ8UQBnSVqYDvJ\nIhlUTVKJRKm2WSc7Mf/U5VJyhXj8QGQ0F5IICJui0XvJ6fhkOIMtSK7CYxB92NqSYXpfmu3u4HOj\nJfKQf8+JkldHkQJIJNT58Uiv4umhy9N6PZaedBrP+sAHCSmRxEMSRAKCIW3E3z5hV0KHPgt8Ezha\nRO4XkbcAHwFOF5HbgZc2zwG+CtwF3AH8I/Bre2Sv9zMSkUlvBM0+7U49Tly+ku+84lSuOfskRi0x\nISVlt8KCMRwFi0Lf+6Ym5exg0OvamqYw2fEi0x7qgSBmZ8ZWkaqsv02YU3TZglx39hl5u5JT/NIg\nflAcAcdEM/q+p1IWOWHkskvw730fOKF38CF4BDOjiDPPkW4jm9Z0quynenpZEI8a24iSNO/bXnQf\njdSYtzt6tViuWakuYubQAu49/EjuPOpIer/4RpZc+DWoejjtYN7jpjaiRT29v21k0L6h7cEzS9DJ\njbjOMLUrcJIlcP1pxyPAcIDJSghimEChRnSz947xmrta5NIQW4ebSRSHgpOmvJxj00RiwZCjWwqT\nMVGknA2vkvC1Y8GV3+SHp72In7r4ElS7PPyyk1j+lUvQskBEqMxTXPU1ZNFCOOY50NtCmjeftV/4\nN1a8+BTu+R+v4eDLv0Mw46a3/QIL7rqV1eY4xJRCXNP9MiISqK3Gi8dQzKDrAmaRCqW0EpXsmHGW\njy9o5NF8pIYy6FUWkgPJweeDsKGe9BmyDpPWZ/Mzns2Sm7/P4kuvZ8s5p7D0ggsxuigRLyU1PQrr\n0KrknmFXe/C0YjkLMJSeCQUJLHLNGSdwXAjcwDCHpZpSKxS306HqbEGomSoGvVo9wzGi4tHs60eT\nQ8Sxpj/FoZ0R+qKsGnU8ZSzxoCWWdxxL//NSfGcelsaQchTE6KF0e1NYZxgZH4OoMNTNvYZMsHtu\nw448jJi6lBJzG1kJ/ODnzuKpv/VuRp//ImxyEhkd5fKTn8WpV13Pqne9nc3fvo7aVyyXQJUcI0VE\n6zzEd24ItRpnRvKRIhYgSvQJ0+Ixh2ZiQnKJ5MCpUCSfY2J9wqNU6ihShbcu7u1vZ/GrXwfiUPG4\n7a51tD5BOnvwqj15aRuWzSHEHF2JBAI3nXEqRzq4+7nPY7i3BazPpKQ5I5RGwqzDUC10otFJue+4\nmQc83dQHp3hX44phUlFz0EVf57Ben6WXfp0lZiy84FpYc3euehzmYVN9Io6uGZQFSQJp3hBx0RJi\np7M1A+bwo3Fagsulkj2C18izPnMBwy84AQvKdW95DeoTp152LVR9Vn7wL3joiCNZXHQY/v0/Zkx6\nWNFh+SXXEMwRLVF5ofIVYh7EiD4ReWyhhFx6zWgCz02IPuZhtBkSE4V3uO4wa0KHRa9+PT21nKoY\nq0dsqxXKfU9rWc4GzHKr05jAOb5x6nGwZISf2NQnd9eBnbtAZhMRw+NJ+ODYUBnzHGx2wyxz46yS\nIdaMb+b0K7+Hxc2IX4qFiFhBjaLO4TfeDYsOIaRA9JKH75at1WQgzuEa90xiMNQ1oqMpLGGICo5I\n5QvKxoOcxHAqSK8HTnjgqitYfsbL8VYRXcmaM57HJoXaeQ79pV/G/vFTqOTq8kN4JqSiGwN9l+di\npwud7BBBTKhCxVAf+kXAN2FCG7Vgoa9ynVLnWHrhtQyqlzopcj3nYHPqqs9VWstyLqCDmEHBrIdo\n5KEzjufwjvDsiV4WCGZv2t6jYmDSByup64oRpwy95W0cedHVUEWecf4FnHbljSAV2l0GEpGHV2PA\nvWefQocpwqIVhDowKPMuJnl42kz6BXLmiwEeJbpcDDkYIBETzS2BnVCmrQ4e18QT2dAosTvMgWe+\nDL/uQcwVBDPGY81T//wTHJWE9E+fokyJNc4z+fSTqaYqRsoulUskJzMqQu34NIiBiiEaSE4p+n2S\nGMv++1IWuoQpPPyTz2LZly9p6jBLc6ygrVDOOlrLcpYQb7yW63/nNziMSEweT413JcklxMKsy0B5\ndIxbRkdYvnkTR194NeY73HfGCzno8m8y9cm/YfTc3ySaJ0geyopGChdQy818k0guKjejR1BqhtS5\nUGPZ1G8ceKazG6nfLPFAMI8KYIqIy44SA/CoOJzlsKu8u0bEKBlk/jhCfzOpnA9W46fGsaGFyKZ7\n+PZ73svS793L0LweHRWSCcGEuMNAccEN8vuD4mtPacpkKIhmrPzi+dCrsKUHsOpf/p6Df+nXKMxR\nuUTZltXbq7SW5WzEYGYoYkWFmqIbVhOOeR7H//fFrEkdnE8E18FbblgFkbgPKwi57X5QvfomNCj3\niElNqE80Yz2BZ29ex0YnpO58zAvlr/4mwTzz3/o7zbyfok0tusI5ooATIUkCiSSJufc5jfc424ig\nfvp5NjCzqEQ8nSb6MjRN3JSEIzf/8hRk+9NlT7c0jTU03wClNdvPlUCpy5Hs0LEAvmDjX3yMyde8\niXDvfTzgx9HoWPSlL5CKREVN1B4QoZh5OxlKwGEUaoiLbPKBAy+6mpGiQDTBggUofVb80q8QpIkL\npWaKyT13MVseN61luTdoirMKCZGC7K5QggVEHFRjrHv52dxx4HyOuG8tKhAkC0L0MQcv7yP63tFJ\nmq0kgVocnQQxClIodT/RKR1J4K5kLLbIitJzB12eedGlOD+Exoh3eUhtosgD9xIPWonHg0CiJlBQ\nUVNSkGchPYNu6DTiuC2Wvc2ieMuhOdnibARrZm1QeMT6Qexnfklerk0HTUmGOZdrmuQlmMuvu+Gk\nk1n6q79A75/PY56v8OryPGlqMoisIFiiT48Yugy5mpiMoJ57amFLIZQOTrz0W2AVNQVCjyBbG4CY\nJsS11uXeYlctyzYVYC+QBBwOzJGoKa0AMYzED95+LktOfBEPjHqO/e13s+H33k5obmiBfSqUSRzD\nNSCKmOK0xHwkOUOkYILEqMDUOecQzv9vxixwZFEy+oWLefq8IaauuZqhk07HhdBYgpZ7GK44nDBj\nPi5YFtIslMD0MNTnwhmAkrK4TiPNK7JQDizJXIDEbR0zDSprsDVoHvKPl9/mReAGt4MHNcV7R5L8\nI+fMoyI842tXsOWcFzCpgMsBXW6qQjodzIOkHn3niFYyr1ZSAHHCgk/8M0+r+owcdAi6YDnUkRgC\nQRyShsFDao6xFcrZSTsM3wvkNlNQyaCDX01FwlU9nvnn/8CKV/8ih22aZMPv/q/pG3pQwNXYN5P8\nlc/OkC0dZax0VD5Q+wiSiBJIw8owHeRt5xLOv5ilF3+Tl15xNUsvvRYbHaGoA67oZtuwyeYx0eZ4\nhGrmiOYxgq1lWhR3ICAz3uam17tHfqu32/y2QrnjOAMvDpOahCEpkEQQAx+FBRdczSEfeh+9yjF6\n6XdYY1DESKinqMwwZwyZETtQp8SGyiEKIwccjM1fmrOGioIgeSZ2sOs7PMaWWUMrlnuRkHKbUyxQ\nqieVI5hF1v/d+xmKfpsK19PFI/ZRDfAygbea4VpZ0BOSGJUItQOXKnzq8+BRRyPn/RvmIvG7V2OW\n5w3VFfQChBNOpAB6MypbDH5evK9IAAAgAElEQVQMyn2YjeJ28WtvBEqU5CMhKnVMSPB4HWL4hHM4\n7IrrKB68Dee7TBUlt48uwXyHoj9FbQnr93ECS3wkrDyceuEyEMFrO6Cbi7RiuRfxzjA8cvX5GMrl\nP3cOhILeRV+gV+g+ksVHR3F0kqPyimrOzCn7iqFsfuUbGV28lPlfuZgDv/jfdI59NhIEk4perKlS\npDQjAR0aq1KUNIe6TToE0WKr1VpA7YyU+z6gcQq3+EhWlp7Segxt6KHOiMUw3nVY9tUruXsi8WDq\nUneGKDQhyYMpc+YktEzTOnj2GgbJUFWcD1gaY93LTmaqVELlKXoKRfGIZlb7gpnVxQtVNnWUBePC\n5hFlIyPE4UU8dd0qFn/lckiKzV9M8o5gRpQ8N+hRxuqK0aIEcpZSdqRIU413Hx7grjDI1myqrfuo\nWHCYKk4Mak8qKqT2xMJRJNh09VdY++XzGfnuzRx85YVsPuMlLLjoGiQm6HTpE+nE0NSN29cH2DKg\nDR3a52xfEkyILjtp7/nIOyD2MGDZ+/4W/6JTsaIgbVcwbGclz3YXzqzJLBk8z6l63iA5oZMU6zj6\nvQ6HqOewLWtYcNX3YGgeccFSEIfDMMkqkOPIHfOKzvTxWFNQowl3nP00Oy4JfAJCDuFyzmGixCKh\n4kgB4k3fwXSS4sVnc+Tv/282OEe0Dj9acSi4Cut0SZbopICFhEn9WJ/cMktpLcvdziBmZUerlIRR\nf+/b3PSxD7Bi7YOU2gHzqGtKcDVhLJUThqIjiSESsT3yu6YklwgpxwOmptmVihLUUBFMpxDJ2TPL\nL/g+N59zIkddeg2uTvhiOxPJIEoOscieXTfty5YmvGdWmM474RG7OF2dXaZjZaNLBDy19ailQ8fA\nO2HDyccy/8rvkO66geLwY/KPiHP0ga4U1PQIdBon01z41dj/aS3LfcLAOhtE8W1rGZpzJEsUE5/i\np/7Pv7Pw3TfjSGwp1xCBWhJRsuVRarbFzE9gOkxXJ3b73pooQXOIdxSHtzxv6pPHRECE220+93YW\nE7XgB6e/gCMuuZRiagJfBLYNZARkayyab2IArIldHJyV2S6UsINdbM7F9Eqfa14iUEiXYRF8UzJv\n8VXXEUToHPls3KaN4DuIFHRIOUzdugiCx7fTlnOMVix3KwNLa2CbuO21hHTHL5P004xdcCjj1yxk\nwftuZsnLFfFGN3Wb+EOHs5pAHyWH7PTcyO7bSxu0YpAmSyZbsKkJhK8KJTkFCxwTag54w89z5/xF\npOc8k46V6NDojK09tvo5QrYu51p++y5guUJw82QHL1iyfPqhSBfBbRMltb+dj/2dNoZht5Pn5qat\nylzaG0vG+nv/iPmr/g/lCefx4C1f5oATvgTVQcTzD8ZsjAk3SaHDeFFIgegDfR8Z7UUeo0XNj7+H\n03esz90hneHNqJ3gDTrJqEUwg9UTyspPn8dJF1ySh+kq9Okx5Lqk7eIVH439VRTE7brymbW1e+c6\nrVjudgaWZdPOFgBFvGPkrj8jnbaK6sqncEi/S68biTbOgl+6n3TAg2z48E9iJDYxwWi5AKc1I31P\nv0xE8QzF3Ttwy33Ocqri1js5ISbUoWRtOcRzLv4GGgSpNhKKUepaKYe65CCodmCyq7RCOfdpxXKP\nMBiGN3N1BvWG8xl66SR20YGYh6nhMUIffNmjv3QDXVtMEk+n9swrhnAa8Si1LwlaUdjuF6YyeXoh\nISguKWpGVx2bvWNhvyYwgXnFm4ewiNRTQteBpdy7u6XlSURrGuwR8mlNKTWZODXF4rMx24Cc8U2i\nGENWUNKhxijMkdxmlv/BTfjXrcVXZa60I54N4S5Eu812B5kwvnn8xCoRTRQ1TqFQSF7piOP+E46j\nLAK3Lp7HQV+8DKljbtvqEq6bcvylyC437mpp2V9oxXKPYFiq8D5ABVIXqCrx8oPYcvUzKaphJlMe\nojsiScosQDEwcswDLPn9DZgFTDosqQ9D3aC3oKC5WgM7jMH5Ma3PUhVxSs9HrM4e8EVXf4N1vR7H\n/dPneOA//o0N//6viClaRRCPOUV0Oy94S8uTgFYsdyeWwHKhL7xg1KSOQAH923+FTWk+ALGcohwU\nzEiKl4SkiPdVjrMcWsXo6zYgCeqOIz3rbrBICh1AUFdjboqcYjIoWwtNue1dooyBJB1IJUOTHbqF\nMRkSUy88lXE1WHwAK375d6i6QzgJuDLQ9F3EZkmf8paWvUkrlrsVwaSJsTRBKXD0IT3E2tU/Yr6f\nYCjWzSsh+iaA3ST3n9GEJUffRuguE/o6TjrofsbXJrxfDDaGhYhZhVIiGCYVj8ffXIWmLUPo47og\n0Zj3vz7AkR/5Uxad8MLc8kKNFa/9ueYdgznK0FR4bL86LU8u2m/87kRyzcqgNYNZPaVLddVhHPSM\nN7JBfnLHb5OUe8ZYgfiEsIVq8Y84+F23s/L1C7E4QfW0mxEr8X3wNg8Aw01n/vy4ZNsw0deCSYUx\n75n8wPvQepIj/vRvcmdFJ+CKnWyppeXJQSuWu4nBwNREMSdNsl+EzZcSj/473A/ezlK9lR3N9RkO\nsUQSh1gXXG6zOnXrT1AVt3LkmxMT9w6z2W/C3BBSA1YwRsLp4xOzgS3qnCHemFxxNMu/cTVCopxD\nlYFaWvYWrVjuJqbFxxQs16YUgzS5mnD7byDalPF9FCdMwuFcL7eeIKFAccwPkeRJznHYr6/mqHes\nozAYevVqqoPvxpsibvJxCZsAvRBJBYxF5ann/StiBTI0j3qHDbhaWp7ctGK5u9AmkEcKsq2Y2HTp\ncjrLX0nVfT7mocK21j7bDnMyXXTCBJx1wed0QSc13jt6cZyH5WE6z36Q+asP4qB33Ujd7+xwxlK3\nrQSxzTqnEQM6VWLDmLBkWCi2bMZpIgGFFTkv/YmflZaW/YZWLHcXLkf4a5N3nSyw8PSHMIRy7HqK\nl9xKeIw4brECCJiA106u/yiKYnlukkjh4Mh3r0bpMPyuW7njL5dQFHmjSWpAiVZhJLZxWNvWJl2V\n86gL2YJ1HZb9+m+z9Etfg3mLMdfBUTTbArG2lFhLy4BWLHcnpmA1iToXJ9NE/+qV4KG68icQffTQ\nHmkELrdqrVA/lecjpUKkyhXWi5p+mgTfR1zkab+9BEJEykiIAfWam6GhmNRoY8WaJPqhRoDQxHcK\nHRwB/3//Dqoe1A40kiThrGh6d7fOnZaWAa1Y7lYcTgLePGY9Nl18HN2TbgTrY6ZY2nkcpGt6YUtT\nOUMQsICnItYdQghg2liw9+NOW8uSd95D+bqNSO2YkikwwakgKk0zWYdPHUymSK5iKngmvTDvsiso\nJ6ZY9fKzsBL6XghmYDELf0tLyzStWO5OJPfTTprbpvZHanqXPx0TwVJCfoyzbdsFmFtTBREcph5s\niNpPMvr8B1l/yTzG79qEoSx/112YCbUYnprCKsxKgjq8duhqzWgap7ZIJyrzLruG5ZdfQ00Od6oF\nhJAnPdtJy5aWaVqx3M142YLziZC2ENIqNoZ5iBlOHPwYxScGFiaAQ6crpYuvAQEX8U2xjnmn3s+K\nM8fplxMU6SA2jtxBwPKw3nm89BCg7kciwkRYwEJzqB/CxYqgAcVRWKDQQHSRfddXsqVldtKK5e7E\nQPwCnBXUYRFdapbqeqCDp8vjNdUUD1KDVKjmCuWiBRFHUkgEoiQO/d8PgUyBjuJNmBrqYdFDUKLU\nuO4QEzqfkXojE1j2urtRfLOHJom+i7mboVlbVmw30/74zG1asdzNJJSxKxagjKH94eyYESP5SR7P\n7WKSoOnBY1YiJnlOkj6gFBQEE5JXLE3S6zzIT/7uFub9wc2E3giT3U2kqNQAvUmGOz02xfncFIZw\nBkki0dW5TJsJRdM3R8P+r5S7W7zSTvpZPVZR9cdi+9fv6vt3tj8tPx6tWO5OEkTpM+/UTUxc8GI6\nYRzDMGqw8Liajol5zPKA28kUXiZx0s/PzaFNR0ghUHfqLHZ1rnx+wHsmoJdY9Ac3M0wH6wRSqjni\n/P/krC9/iSix+QYItSSSpBx2tHvPyhPGtvmrWNPPZkBtven1RhYJ2+69M7e1fQzpjmJKB9uZuW7m\na2YK0faPdiZRajqde79NhNd2rxsch8z8PN3xZ6Smf9LMme6ZEylphsNucJ7gkce+o8cGW5u2Abkb\nfBO5saMD3E+ZbffF3CZAaV16q/6GxWf+EPBN3neuFPR4ZwGzR9yDNbWabdDsKjcbAwVVLAkVSgqT\nJAe13cmKP9oCYtRDDg19NvoDWP3ynwY/n4DgCTgcnkCgwDUB6bkYyBOrl7m7GFhkCmCKmE3fvLX1\nKKSLpuaMqOXsqRnvV3L86yAG1swQNbQRGLOmBbHm1xhZ0EwEp4aZTgvc9BUU2U48FW8yvX1mfKax\nVcwS4MVhTe69zHi/WhbQZNqIo01vS0RQNXCS94UZwqZZUgf1+QefJeKmxVAQzGybY1fLaRA2Yz8H\nx640+ROWXzuYk1GmGkfjk29We6diKSKHisgVInKziNwkIm9vli8WkUtE5Pbm76JmuYjIX4nIHSJy\no4g8d08fxKxClLHb38OmW3+fydQlSv4F9hp44gM/Q60AthUDyDeTt0BIBU7L6eXxnmWkAAtfcT+L\n//qPGX75Kdy7bKSJUx986XO3bzNFhMbnLiCzo5D+4Ib3WP7BsACav7qFdVHrY16IgLp8HAPLUMiZ\nVW7QxtYMN0PoRC0fayOEKoJo7nYpprkVsQlRcntiaSw7r1mwEzCWtwTNfooIyZQNqcnJmmGVeTUq\nsiBNW8C61YZTJzhx032SslWpODXEybQo2wzBS02lK6fWHHPez9vX5xFIn9wLXhtbMIkxJQ4kC680\n3SvVDDFrIi/yucK21pvKbsZAot/8cmnT3fjJEWa2K5ZlBH7XzI4BTgB+XUSOAd4JXGZmRwGXNc8B\nzgaOav6dC/z9bt/rWczGiw9g6UsfYvTBjzPix3Ck6SEzJjh7IgK0VSL1MS5d8lu/vMVhd0OEzqEO\nuekDHP6WX+OFn7uMRCTOsE8hWyKzkUELuNwlMmcVqVYYFdHGiKnDlioLkWjuUvnd+3Lr4GRGMMWa\nhmw3P9zPAtNYdSowJsJvffLOaWE56cMbCWY4E664eQvRCb4RxiTZ+htzgtJYisC4CLUTvnvfBK/7\n6G1sMuG/LlsDwD2ba1aNJU78wDoeinkyZro9sBqbnPAP5z/AZT/cnI9VjaDGyR9cjwGTzXX5+Bfv\nwzUdOP/z2nX55Ihg4rhpTR8VY7ypSRCd8NRFJWbGf31tLVNNc7UpcThy/K1ToyeOUz46lcUYo5Lc\ntK4GNovQc5IzwAwcCUPw1mkWeQZ97p8M7PQozexBM/tu83gMuAU4GHgVcF7zsvOAVzePXwV82jLf\nAhaKyEG7fc9nHQlQFp+xPhfIlQQm+EHaohgIqOy9oW3ynjpGvDOminvhzD9lfPxziNw1Pe80Fxj8\nvAiQpAmbCh1IHnOjFNLn45+/g5d8ZD09J5z67ls4+tARLv3BJiZF+NPP38ekCKd9YC2rHpyiJ8JJ\n77qNk/74AU750x4v++B6/vJtR7LFCWf+8YN89T2L+Z1P3EnthBc8fSEnvfMW3vrxO7ht9STjzdB2\nRI2eCJ+9fA0d4LqbNxGBZxw6wmfecTRfvGwNP3/mCmoRvBcOnF/wtXcv4Wc+uJqT37uKV334R1Qi\n9J1w35opPn/TEuaPFrz0IxN8+P/dy5gTrnrPUv7hggf4wZ1jmMB37pzilI9O8tufvJNPXrieCXFM\nARPAYQcNcfKHNlMAk07oASe9/0EmRXjNSQfwt/+xCjHh9R+4lRf94So+d9EDJIGXvus2LnnHEH/x\nxfuoxXHGR8aZckKhxsc+fReFwbiDH6ze0nxntBkgxeaaOJ4sLUbEbNcPVEQOB64GngGsMrOFzXIB\nNprZQhE5H/iImV3TrLsMeIeZXbfdts4lW56sXLnyeffee+8TP5p9jNlmRIZYe9Xvs7D+a4SS2nk8\nPQxPkUrUVXtzj4CESyXOefqdki3jC1h61j2Ygd/aU21OEAFHjTUTBZU2ndrNOOX991MMLQIzTj98\nPZfds5jF7kHu3+z46WeXXHDXUmI1yQJZjxr0ihVc8K75KNmKeu2HpugHiJvv4XmHFnx37SgX/dGB\nrB+r+eW/qklEjjvoYb790LI8NLVAzyUKdRy/Yg3XPriM5DxFMib7G3DlCB9/U5dli0p+8a8mmZp4\nmDedOMTYROTC2xZz4qEP87X7l5B8gZgRt6ziqx9+Gj/9wUnMxTxl7MCpYBoIThgfv5uv/snTeO0H\np1BXYc7jUiKGknpiLRe8/zDOeu/ddEaW8MV3zufl719LGTrMZw1bOBCAw4bWcu/kcj76C57f/Wwe\nfhfRpj9THVTj6+kMLcJLh0/8qufAhSVdG0fcMFMIw9o0gZdcvHoOfYV2iIhcb2bH7vR1uyqWIjIK\nXAV8yMy+ICKbBmLZrN9oZot2VSxncuyxx9p11z3q6jmDAWOXHoBJzVCawDkQtBnGKWKdvWpZAlRo\ndoDYBGUJ6aSaav1nGVn8C82kvea7crZ/4w2QPoaidHEYEYcZaIKz/3wMadJJaxFWhDVsqA9FcEym\nzZS+i+CpJlehfpROdz6Yx+NRM/o6zrBfQOUTRXQgyuffPczPfGAC8YahJO95xZHr+MrtCzEpKJMj\nuUS/GqN085EAFg0JhtOQ239YdjjVBs7loe/5fzCfV32ox4rhVayePDQ7V3ycnjemceilIJTN16UK\n4LVGVJrpWoe6wcBQeOsJW/jkN0cJljAMSTmv3wsokV49TqccRTSgPpFUCSJ88PXCH/17QZI+0nsQ\nLVeCq/O8a+jwzpes488umccvndDnf568bFrEddARAPBz4fvzGOxWsRSRAjgfuMjM/qJZdhtwipk9\n2AyzrzSzo0XkE83jz27/ukfb/v4klrVGxm94C2nTwyyIl+El0fcF3mpCKlC3d4e/znsqjcSiw8gL\nxxF3C6ZPQ/KMP+IF1Xwjz3ZMczF6rAcEkjh8VfOi997JS54/j2/fdgCVq3Epx6JqkZ0hXj3J1TQ+\n3iwmgGhA1JH9G0JyFU6zua1Os6MkedQngpYYkWSCk7ylKIlgQBNLkEzBKUkS3gKmoD7iU5EdTI2i\n5Or2We6rWNOREZKvssc6R+ZimuMUohnqt1Z/cilHL5hLOHMkIjSxvCB48ySXSESc5rRVDXWzD7k6\nlRAQ9YhA7fp4ClQiZk1NApcwATXBm+DM85l3DnGAmwIL1M4oCLkyFnO/J/quiuWueMMF+BRwy0Ao\nG74MvKl5/CbgSzOWv7Hxip8AbH4sodyfUKA0xW/4MvPS/2fvveMtq8r7//ez1trnnNumMnSk21DB\niFhAaQKKAmooCigmxGBi9Bv1m4hiw0IR288UYqLkKxKVKhLFCIKiMWIEAdEYqdIZyrTbztl7ref5\n/bH2OXNnQBiGy517mfPhNdxT9tl77fbsZz3l87kMVCgDNLWdEzyujTCztGdRKkwd7Qfn4exW0mUv\nJT3wNaIlxOerfC4YSsiGUoEkzdx9RFa5DAu244g9N6HynWzIXARvYJlGpHQdLHpc93KXbHDEGYRE\n8iXqMju8OdfNZwCe5COKUro26gR8wpyhLlFogbiAukRyCfMJ07qyQEG8Zok3H6lcibpI5TokFIt5\nX1puELzhKXA4XCwyebRToou9DlmXsvYRXjFvIIY6BQ15f2uLlci1soIgYkjICcbkK6KLWJ0AMlfl\n3wOSHGIOEcnTcfU49VCvB4NvXRUZpUlbcq481jHvuW4onwjWJTW7J/AW4EYRub7+7IPAacB5InI8\ncAdwZP3dpcDBwC3ABPAn0zriWQwP4ALJhlg5vC/zR7+NIVQu4DX328w0XHQUvqL1xjtJ5ggH3k7J\nIho5vTynpk+xTip4HLmQKJFCwRXvhwNO7+BMMBxectWLr4+3iwFC6pW+YKC5Gz5/Ty5/cbj8Q3G9\nGsLs6WXaO6BXY+jx9QmX2ghnohPz2eg6L1B37xsRT4ERyVJvAUIen7latK5eu4R8UvJYPYmq3pZl\ngpN6VOCxlKu7PB4QklR4KRA8gqASswGu9zPVD2pzeWx5Pwqsrp4wy/tqkkMHwQoiFS/ZchmvfvG2\ntDDaBIKVmLQopY2nxbozHsxtPK6xrGOPf+iW2v9RljfgnU9yXHMS41cUtFyiYCHF2I8QUYqqRdko\nUTGK5JjpPoBERdJB2pdsy/zXXof5JdnMmMy5izxYyF1HvaSC8ZObJ3jFM4fqdzXJseVpNYBS4UJB\n13B14dZ4cE1JUtQxPlffGr633B8+b1OPpKy1nfzLUH8XHuVGmrpemfLp2tvnEcut5mVxay0ra6yj\ni0dfV/cXrn4ITflMhIIGp71tC179of/l0k89h3kK5lqUTBIYwM+xB+6TwRyZgM0NDO1fMbn4vZBg\nUFYQC8kZQxOKlAkwZhqeAqxiwSHngt+U3gRzrl3gdWg91AYnap4G7rPTILcui3Q67TUXr2Pxbj28\n+bWNzMaG7oOm2/FzwGnKicftgEO4dTwX6zdsoCar3jjKhqBvLKcV916xN4MPfY5WYxlJPUUF6pRW\nLFDXazqZMSRVyuRwIminm1hyODOM+AjOzFkPyTG5ROp5M684LbHT4oAbv2vNRTemYNpTBBHp/dvn\nmU2O+/z/ssO8CnOGSL6e0gxXd2xI9I3lNGLL/S9nslpEse+DOQ7khZB81rQxx0zapmSO5AZZZVvQ\n2eFkXOt5wCjSi72FudWmVts+X/+XG5cLzntf4KFSKBbtnBeb4hX1sf7oHT+dAODA09p0OhMILmfM\n1YNkTlVj4zCYfWM5nah+y9CBP8dGr8almvhgSh/3TPo6XpQGbTZxd9C67UMsvWw3sBGMXCqjVMy9\nqGVOlkRindSABQ3hj88o87EV6RvJaULPM3eDAOyy5F7+vxNfRKwfsMlVoN1Y7Ny7jtYHG3dwZrpR\n7MrK/1jEgPeYNPCSMKkw8TVBQbcXe+bMZgLui1uy3UG3kCkfWnUlX4HWvTBzBgYiAU83CWNc+asx\nZPntlK7AWktotkaA/jR8umAYceIBPvunO1KgKAFEsxpp3Y2W61a7FQNPX//r6btnGwS/Q02wkZ1q\nrzKiLv8Vcm3czBnKboIDtt//N2ANYBBq1qKalmLuYK2sq8NI5jh4txGuOv0FNBfu2DOU0J+GTxcE\noTEwn2AVy5OrJZZz4jJn90NtKLsEcU9fPL33boYwee2hAIxdcSBDzUl8+ybEJUw90tPsnuGLqeZW\nLP0iqqsWgcJdlz0LdLw2k+tDRbyB0cu+5npBJ4YmYYX4uRR9nXMwafEXZ9/HkZ9e1qODMxzW49SC\nJ08/OPsx5+6X2YiBF10CgB95JlUs6MQC56glIazXey0zeT3V1/DD1WJKW4jYJFsfcBP4AVbzac8p\n3xLohjACUJBLq43mWge2m8HtY5qgyq33DbLLdqM4Vbo0wYaSH1NdEr2nN/rGchoxsMflDB14DQN7\nnYtZqntnuz3BG+Zi2jLcwYO6FUt/9qa6f7gOBZjMPWdAun+EVPdpK/Cav7meF222UXTUbhg4R0yO\nL7z5Gex98oNMiq/L+B3JMgPU3LuYnjj6CZ5pxMM/2pPmVvvQvOt0pBdjq5M65kASNUX5jKDbOLeV\n/w3trQ6FHlVDVdNrtWZsLE8adXtmlEgg1J2GHiMSFj2LX9zv8Gjfo3wKoCZc+aGFmCWu+thmYLln\nXOvW0qd7YqeLp/8eziAam72U4Z3/hna1Obgp827JSZ6ZfPia9OwLxX6/ZXj7k4GUSRqsAZYN5ZxK\nhEhueTRqQYOUC1mueH8TVx/rObU/cwGWY8MHfPI+zrjkfnI8yWfGp95CbmNwLPvGcrowftunGLz7\nC4x/f0eG9r8QV2XCBqHrVc7seLp22kiM3/sfCB2MQFZGNKg7L+aaJ9Z9CDg85sF7xyrnMVu9H32D\nOY0QSCa857Ah/uqwLTOTEYrH6hyi9ZZ7uqNvLKcJQzuchD/ghtwh8+NXglgdFhSQVKdUZv5wj6Ul\n2G8/RMoNjnQERAUhkIhoXeY9V5CIud0RRU2YMDjk1HbPs+xj+uHF+My/5zYGyPo9mfyuq0K0caBv\nLKcR47ecQ7Xk0CyB6hWvLssC4BGyFo+b4Xt6xC9jNOxZy7QaTVXMKaZWy+AKYa6Ero06Xpm5Fr0I\nzVrBsaNzqrx+TiFZxSUnLWK+KBNk8bYur1FOe2wchVt9YzmNGNrpNBi7AbNYE64mos+KgIIi5tAZ\nn64og/FXmHgcHkFRyUSvOtcCTVMS+IrLpStmVDHRcgn6JUPTDtU2XgredsqtgHDcqTfXqo5Ws7Rv\nHMkd2Fj2coYwfnkDmlsxXjw309MmRahopEw84GYwEw45dje51Ydg67dhjCGWSK6ZvxOrJ1RzB5FY\n36CZUMMQzINbdVOeENrq3vB+3HJ6YDVp5mhsssKE8z6wQ/cbutHjjQUbz54+xRi9fJChA0qG5u3M\nQPpNrU/fQrR7U1eo6IyWDokI8favUd75PYyER/FW4SwhTGXenuWotbEE6WXDu1AVGoufnVUep8Qt\n+x7m9MBL5gNt06QQ4X1nLc3XMfTITDYW9I3lNGHkgAkgMnrfTxGBjmtR+TwdV9/GpSJ7ljPI05bU\nM1TcC2kVTkaAXPJRq36BzhHvq67p9+ZJNY9ibh71ODM6EytoayYr6eOpgOGCZ4jIrQ9Qd+1Yz8vf\nWNA3ltOI+y/bmyY3MuH3obA2wSJYA4nDGL73RJ4JGBHvKjppiJHGTaTJa8jEvzWjpcmcVJvqKjNC\nREmoE162w2StMljLLczB/ZrdEPbascm1d1d89oTNseSmCHFsPA+ovrGcRmx+4E8ZW/Q2Ws87HK9N\nQmyBeSyMYa6cQQr+7hTb0S52o1PsgRv4o0xCLFketlewOIcQZTWPpRBwKB2Eq/9nFJm4f414ZT9m\nOb147+GDdGyAnRYI6oROvzsAACAASURBVB2Y4HHMMe6qJ4W+sZxmNB78Fsuv/heSnyT6Ns6NgRW5\nINzVMq1POVZfwAvtRww//wzEHsTZGGjNkO4A0xk04E8CU/V3TOoOHiWZY0CU7356V5pDm6zxE+kT\nAU8r3vjpMV62ZcWKJDhKhKyTYnMsSfhk0DeW04xyi7eyaeM39c3qcoywFrwXsyx8P4MQhPKXe6My\njNjCXNIkOausInNDWuIRzkumaPPiqVR442mTfOsDg4+Yfven408eRtY9WvXgLRx08u+YHyKGoqKo\n03w9bSSxy76xnFZMUkiDUiKmVutJZ9JfpIOJgFQzNpq2LQaEoB6f5rPi2r+qM8sNnGXdbZkjl0Cq\nPZgoEaGoj22FBy7/2wavOaXseZJ9r3I6YXg8//2ZXXn3sdtR4hFt4Mzh1CHm8HOlquJJYm7cKbMe\n3RtzgIXPfQetl19FaDVQUcQC1gEQnLqaqXxm0JKH6bgCE8f4lc+gWLwQbEWmarN8E8ykxMWTgTef\nO3gs9EIHRk7sC9Ca0utuZn2vctqQj+PBn7yd/XYexptQmgCaVQA2ouPcN5bTgvqCsV9S3fddZGB3\ndLIiKEgSvB9GzNWkGjN7cbWi0tj7F7RlHkPbfgJz8xFTzM09DsIosSeHa0QE4cAP/Iq9T/wVHQtc\n/IHBnqHMSZ6NJ572VOGoF6xAgbLYilIhCgQHmfdJSCi6kUzDZTZMV3bffXe75pprNvQwpg1j1xzB\nwPJvgQ1gAp6KMnRwOv1eZZe1eup02gCnHnMJLQUpDF3yZzSf92YoK2jtRxSyxJQZbo54B5Eq11bW\n+2oVWFGRkmOfj95FY2QxkrrHQRHpFk3XVe3dY2S5NTL/DTxuWYAlkLULsLsdLDble8mDkmKtZdZe\nT4I5VNCdgMtOHOS+VRXbzC/qKLfhibX+DvXDa25Ox0XkWjPb/fGW63uW0wwDhl70Z0TxJCnxWlIW\nHUTDU+LImeTpUHQxK/F1C+GdYgjeBdrVlril/0R0eyKN/RCxOos5dwwlQKhDGGZQpYQV4NoFJtAc\nXNIzlCo1K1EteWBW9y/bFA+o6yRYzAaul2O3+n0X3YoBpXcCLR+7vNxaQsfip/x+ygm3arXBtVpO\nNg2QzHp6SbMN3RIswWFm/ODqhzHLiqB5xKsf/nPVUD4R9I3ldMJ+m//KQYRXnk+bTSmdIQqhaj4l\nJBrePEVyBA14Leo2xgp1JUiJSYeR4g5Uhhm79e8ZveEoKA2dg4StObljCJMEcYhlw+gd/PVrJjhu\nn0nMKpwZCYeYoeSMv9bepZr1XqekJAIqPk/ZLeF63qCCVfVkU0gGSP494kim+RhC3RGltZF04DxY\npuVTUdRirkfsGlFpo+pAShqSSNrEzGN0CBboXSgb+Pxo/SDVaLzuw7/ibQdtimhZyzpncr85UEsx\nbegby+mEPAfRDtWyc4k/PYxmuIdGbFAFKJsTeVo8jV083cmlV6NdVKg4imqYygvOBG/5gm+7wJnp\nDfi7zqS14jvQ+B+aqhv8Znyi8AhijiSBynUQ2tCEl546yhe+3+R5zxhBxJPwiBitOEo1Nkpql+TJ\npAMShgPnkFAgmr1DxVO2O6hpNn6WeuvJLE25O9SRSJbJb8HQVJFMet6iWgRN4ASx+hiLZ7uRMQAu\n+cAg7XFjUFeBb1O5CufKfCqkwfJUIT7rcW+o3FvXo3S19z0sD8HQtoDDfAOVnBY0C/UR3TjQN5bT\nCLOS9hVDNBa+jo42MCsom+O02kN0s+HTyTyUiaod0TmGOwUpjCMyTpEciRZtGcJLpFnBW0eu5I5n\nf4JirzEi8+rC9DpuN0eQBeAMb4IRqSR7j6/abpRPHR75uwsfJiEc9Kw2L9hkgk4xDz88wrxGxQ8+\nMJJlW8VnwwdUY6vAF2jZAYwwOMiQjXHpB4ZR5/EondFVaNlmMK0CckG8d4KKZ0TGwHsu/uAQOEdV\nRgyHOT/lOZR55W56wNMeH+d1p4xx5Sc3YyIM4ixgVrDrVg/SRIm6imGfsBmsmHg0dNUxuxUFbb+Y\nC94/HweIxVp9R/Llw1yKvj45PP0DDTMIoUHrgMiKa99ISyoqmUdAiCFPv6TrVdr0XF4+eZJTvAEY\nhebb2WtitJFo6SSpMcJkaHHQ7bvwmts/wqXzTubcO3bhl+86BzNyEmStPMRsRRQIJhiOpg4Cjkk3\nxkhcyq477c6/vDvREU+LYQKR8/97ksP3GOKS64wDThnlhx8c5k++eD9nvXtzblupnHAmbBJWct84\nXPnRhQC86lRoEvnhiYPsd8oYr3xu4NRjFmAIK3B86eKHuemuDme9a3PO/qny1j0HyN4q/OBji3nd\nyffyvY9uwX4fX85VH5lPNwm036cSV5w0DMCPbm5z5QdafPPnYzywrOLn/6OccFDJZy5usf3Ivdyd\nngGWPeANWQLVrSzYdYu7mSc7gSlCQFQwl7LnLBuPv9XPhk83bCXoOBM/3goVj1giWAuhRKbJSE5F\nEkAiQYWxphIUqAYYTpOUbggnyu8GtmKn9gMcfs/z+cJbLuXhyZW8eGBzBI+ZmxOlcrXpqJM2Ocwg\nJDAlOqFtgW/+aAXH7zu/5unsyrXBSgLzUM64ZDnvO3QRt69MbDM/0OxF3IxxPGOVcv6VK/nLg+bX\nREdWkwxDB6GZ/Ub+8bKVXPTLgh+cOFSPSHAYd08KQwOOBdAbw3Ic1900yb7PbALChdeMcfjuAxiO\nish7/3kpn/vzrXnViXdx1ambMqotXn/6ODLRxgazqNyGNJjOOd756pUc+oLN8vGQnMn3KlSuomDD\nesHTgXXNhveN5TQikmVa88z2CtJVr0Yta/B0sb4G87GcP5WEqFKYw3QQK8ZwqcFpo89mmevwn2PP\nIamSvENSk0oe4IZjrpxTQZiqpmzIddAJ0YC5CrMCZxXmCqgNaa18VEcVu/w4OSNudcqGukqQ2iCe\netEyTnrjImDNY51fu5rtyHprtZ6cgtRaNFM/c/UYsjGewDNQf54Vz/M1EFIkeofDoQbf+fUon/uu\nMTT2EJ3BzYCZMZR53x65nakdUT94/yDR5fhuoYHKVVmUxPycmJU8FvqlQxsAgQDpBsTdj8h8qi0+\nhiNldUeTJ2Eorc7PPvJ0RV/3SdsQ2CBLR7ZAkueFKw7jR6OBFw60aDc9ND0NM971wiO46A1nZTE1\nW2MjvW1NfT9bUGBZ+8XACKirgICrPetsHF1toLpsOF2z2E3u0Pt0tfcZcDhOeuMmOfFT12Pm1911\n5V9avd68nUg2flJvKyB17p3elnM14iD0tp9NqPXifB7H0gm4cyzxhfOWcsAOK6gGu6QgM2OFphrK\ntFYEUlUxy6J2RSwozFO5aorfbrPuWnmq0DeW04TsWyj4XbHyJqofvoL23Zcx2X4+lSQKdWCGyXok\nVFwkMoCrdXwATBKFGiE1Sa6BhnEmG5FFnTv5oweO4sqtr+GOxg58fNlOaDLElI4X/vHXZ7O0WsHL\nzz+2JstVJliVezCqlNUTbTZ2ZDik1g4SKoQCI9EmgQz0/Dnwq2/inhno0rrRM2bZXGWSky6k/ny1\n6eiG9LX+bfYypRbqystPKXbH9X6fCUr8Guuu2TYpMCQp+IBoxWaDE/zJ55ax1/MCb9h/Cw597gN5\nyRny2LoPSFFwdDADtXGCTRB0jCtPHOKwU+6j8hFEMIOQChw+D3KOe5brin6CZ5rg6v+3taIodqV4\nxU/Qn36W4H9BUUH0hkuKJcOewFE3PE4jwiQquWvCmVB6obQ6oa0NVEoKOuy59BCOGbmZ/yw3pVk4\nBmJFRwpKJzSSMimBEy7/OD9445lEMUomaMVhJv0EvigIyVH6rEIpszHxI0zpGhFatXGcOkw35f9T\nflZj7eXDI5Zbe30ypfrykcNZi+loym+mrq/7uuvr4rNHpg6cGW8/qMW/XXYPV980jIUtZzSOLAhq\nDnW53MxchxQHaYWKMizk4Whc+qFtess3pNh4UuBT8LiepYi0ROS/ReQGEfmNiJxcf769iPxcRG4R\nkXNFpFF/3qzf31J/v91TuwuzCy1XAPMZ+9EhLN7zE7lwN9TTr+Cx8MSeT0LCCHhLIBFIJB8Rg4kw\nxLh3DOgY4ofopBE6jYKLR7fmlN9vlr1Jq3CiOMtTy3kpMmDKIef/NVEmKWwQMWMotWjjQYxGzEmf\nNNsM5Vpwa3iM647HW1Ye5fWTORSPNr4KoZKIEWjLAAe/aIRLPrwbKh608yS2th4wcESaWmszqULR\noiPzGNBV/Mlnl87seGYp1mUa3gH2M7Ndgd2AV4vIS4HTgc+b2U7AcuD4evnjgeX155+vl9uo4IDm\nAb/j4R+/D2cVSRq5tk/Wb4obUmDCtyhdgQCVb6I4Ais59PcvZY9lR7DCD/Hau5/PYKdD1QAbgEYE\n55sgFd7aJOeZbASSCMlP8kA5yo0rbyaFNuYdjQqSc1iYOnntY7rRxihjhRG4e2X2W5sC944a0XnM\nNXPH0AxCcFTOSAIWHSYrEJSYmlz4gc1ndjCzFI9rLC1jrH5b1P8M2A+4oP78q8Dr69eH1e+pv99f\nNiK+LKXM3cRSsPBV34Ydz6CSVwIgzuPliUc+oo+IdWhVFdEVDHU6rPILSW5TnuXu48ebXsB7bt2E\n0cFtGQ8FyYxKBA1tNCacQiUDgDJYJhqa9bZ/v/IOdlmwBWM2iRJJhaOqL4m4sUTtNwBaCC1JKCXb\njCgkjw/wtn9Yia8p5/yM3jGKSU5XeVOCCoMTgar9EJJA5oqw3VOMdUrwiIgXkeuBB4DLgVuBFWY9\nN+luYKv69VbAXQD19yuBxdM56NkMJcd8JE0Ql/4LnZv+gdbWLwEMWY95rVcHVtEyw1NQFU0m3DCt\nAobLldyWtgeDEJtUMonXgEcZLJt4NZo+YhSIRJzBRCgoXZMkwnuu/BwH/8Nf8IUffw0IBIOW5SRS\nT3C2f588CTzawcvlRFfc3kI0UIlx8GkdDjp9jHJyZW6VnGFY7cskSUQHVWOIcsCxxdByzj9pCN/z\ndTbui2GdjKWZJTPbDdga2AN49pPdsIj8uYhcIyLXPPjgg092dbMGwQLeGhQsorHo7YRn/QWrfvf3\n5LxzIuoje8PzJZhQMWRtGreGYtIkESglt0z6eU32vWtvXvzgIaxqNdj7gYO5cWhznCVKD2YNOiFh\n1mQi5Km7VyGoUfmSQjuIOZxFVm0ywcX3/4wJVlAKJFx9OzvUqtmX4JlDmFqa1fXUFSNhvHKnDsEc\nNy81NN5LoYFWawmaQ/8bBM3oaCSPSpsz3jLM6Sc8i9efej+pd8lu3BfDEyodMrMVwA+BlwELRHpz\nyq2Be+rX9wDbANTfzwcefpR1/bOZ7W5muy9ZsmQ9hz/7YAiTIpnxxxkD276F0Gwi1sDjCc71nuQm\naUqWNXelmKvWaIeMHTDpoALOVZxw13ZcOr4d5211LSOrbsVJBAqa0qQVjYIu3Vj2GJox1w4GhYki\nd/pE8UAgtRxM5krBAQZoaF2dKKnumCmY6mCuXZa5cfsZ64LcYZRZ6dtAhaPCUoeguYB958WBiclJ\nTNpEoKUVyWbWYK7O6OcHbNOEj515LX/2xVF+8OHN8b5/pmHdsuFLRGRB/XoAOAD4LdloHl4vdhzw\n7fr1JfV76u+vtNnQJjRDqKTDYBImnSAqdH74fBq7fghvJaJ14Yh1a/MEJMeLVOj1iCRKvFOcRrSo\nSBaovHH25Av5yma/5vzbSo6854WsXLBDXUQsRBPUNQgp32hTD7gA0UEjBbyGulZQ8R2j04pYmuCl\n57+JL//uIpLAP/7uYjQZFdrlgZjSEbP6/cbtZzweDJEAYiRR1FpAgeIJzuPF0XEO8UZzwVYkbRCA\nyCDOjW6A0RqdkOtDJyVSBM+APdTdkxkfz2zEuniWWwA/FJFfAb8ALjez7wDvB94rIreQY5JfqZf/\nCrC4/vy9wInTP+zZCqNBC3MwEBuYN5r7LKW15Hg0htogkqt/UVTrFriaGzH6RLCIl9wpUjrjZ/5Z\npDDEd9s78JXRLdhz2SH8fnAHRJSAAgXB2r0RSE2YJWuM6tHhTGmJp1oRaCB85jtf5ucPXc87nvVG\nvEAha9YKrm0gN5on4BOEUZMCG0QUR8qzBqUXZvnIefexdDxx9e0TMP4wkiqis7rjq7kBxmw4c4gr\naeoAF31kN5YuH8VTzvhYZiv6veHTgSl8seos6+yYgSiijvuu3p/5O72Z4jfvwKvRsay3HHztXXYz\n0E5zl44KogFHJAWlocIfPXQERbtNe3CY+eUy5i2/k7s3ez6NFOvO3id2HpNkqjNcohEFE2VCGxy8\n1Sv4+F7vxLvVFF1Ti6rX9lj7eCSyamZ9tMyTZHXLK6liIhQMYUBk/08sx4oFJFcSkpGcw1tippnH\nhQ5RrG4EbfKdvxlkxHfILKJP796Vfm/4TKJXuRxxlkP4+cPEuIf5L7+CsOnbMQLJe5wzCt9ti8vk\nCmIQkmB1mUZsFOy28jBefNee7Ln01QymcebbQwzoSlYVIzy0yY7MK7NH6eSJ1246E6JTmpVDJZca\nvWHrvbnx/pup/CQdm+D3VQ5Dr/1A7U/BHxuCB3OYeqIYCSVKxDGJC8YQHQ44+TZuGfeYj6hEfCpI\nzlOkIovbzSAFeQ79NHHWooieztidvOvMW7DYqJ+OG96hmg3oG8tphEnIXp60EUmYVAwptBhFDLwM\nkmKojU+XzyYhWmR52sYiROcTymH2vOMwjpWbaTU2oRMGUQfLWlvhU6CwSJQCQ/C6FiHG442x/ivk\njjsaglkgWIN/v+9HnH3kx/nt6B185r/O4RlhS8pY1S2Gqwk9+vfOOsBAXSSgFAaFGmaOUjwQKYst\n+Kdzbye0x/Fq+FxzhqOqWw5nbqiJmGlD1EgaOHL3xXzxhJ0AQbXsxyxr9I3ltCKRyVCbROnkKY0l\nnMzDywS2z52UNo+pBGBOAyYR0cDnli7ibx/YksmBAY7d/AYuLp+dqci0JJrPTEEutzoKRse1yBpd\n695rs+Zl7xg1R+UgSUm0kn3PP5a/uvRjfHDPP+dff3ER3ntUldSVSFibv6yPR8IUXEXCEanDMjTo\nuAbLzfOeM39LKwjXPbgIHdwKJJLcJE2NVC4x3ogzdmyVzJblFJSK1Cw56lUL+fy3V2QdJz9YE9n1\n8fQORswwpDZa+cJqkoDgHaaQbJDC30YzrMTRxMh93h1X4mnwkmWv4RcLvsvuKw7noLvbtKUFoU0j\nKSIFKtBMFaVv4Uyz6+HGECtqOrEnjlICA6liwkoKcTjn8VZQYbz0rDfx0z/9N5brOIvcME6EyhIF\nmVk9meI3IpbsJwRxJJSicmgRMWkgqjQERjBufmAe2kigrdq9L/A2yURo0lRHKzJjcY4upZ06aKYG\nz110H0d/8iH+49SdMIk92pG+b9n3LJ8SCIGAJ9SGBZvEuQcx6yDRKEOHMuSMd3BgEtlm/DbMDSII\nu9gtFNLGyzilN5pJa6ahgDOgS2hrzfU2lABNzdN5H4bwUuBqVcGUEr4V2ev8N7FQBnjl/3sroHhZ\n7V+kjaeD9YnDFE9AQ9Wj7ivJceJXfeguJt3mmDj+5nXLgczFiTRxVI+52qcCXXYkRalCmxvv3ZQL\nP7wjA5rqh7/Uaph99I3lUw0RxA/gWcrEZbuxouMJ5RCuiqzyQyTms8cDr+fhKLx02WuZ1MTv3DZE\nhlARnAWSKzG0p7Y3nZeuI1JoThAlKhTFOaEKyguGd8FZ4Cdv+RqmghiUEokYDZGe3HX/VloL4mjr\nKpwEnAbG1KGm7HN6h6KxEHUrMIPTLizpTu5MHIVuuImeGLxul5IqjrLJYCI6j7euZnr/HEPfWM4Y\nxtmFoYM6LDx4GfaqlahuxnjYgeuHduRAvYHJeTsy3FnJAp1kVTEf8Sug2pzKRxKtngcAT90MLUgD\n192OOa5ddQMvuuAwzrz5PEwMVaNQj5c8DZ8O+rKnH7JZacgQKy17bCf83Y2cevFSvCVcUeD9Jpk+\neHDTDTtUgGRYVYHBt65tsP0SrSVua6XKurOsj36d5YyhV4p50zuRnd9Oe9XN7PuDc5GqogrgE0y6\nJs1kVH6S0g+zoFyJSiDRWp2JniFEWnibJFnFYGqgTfjqG7/Im896F1cffx4iWZfGzAguyyv0b6nV\niJbwVGgZePVnVpJkONdSWsJrwOo44YbE1Hv/+x9oMCGeAkdh4ETxUXp0ffD0fSj26yxnGQQYR7Gd\nPk2bnfnfiXm8vniIMgViMsxaDESHiWNhB4rYpJJhSg9Ie8YvVG9tcEpLNFO9mefK3/0XP3v7NzCX\nsqFUpXC+Jwm2saJrcszyw0NVCeIx9bhGIskATV9lDSYdJLkNbCjX9o8MjvvcLbTU8b1r78cJeHPE\nkLuQujJwGzv6xnIGkIP8HYZwONeikAHO+OHZXPSAR2wMBBwl0Vc0bJLRhkPCUpxFglY5kWNFZtGe\nARgCTokMIHgOeMZrcKniSzd+DUlGSJ5SEuIcCUO7sdQNP0l56vAY+zbVkGjK7aZlNF7zyXt5zSnL\n+fgxDdrqQEqSqyji6pXZhogGCmsoN77nj4doTyzndadO8qxtN8Op1soXeRlvM1ghP4vRN5YzgNzU\n2KRCsVThUP7tzedw1dv+nao9wiCBmHKmOxHwprTKBoUqqktQcYhUOJsZrkOp2/SCRSZdg+/d9UOq\n0MbjuWL5L3nJN45m33OO4n57EDOjE7u96YZ2b6wuW9GUG21O2lLL2eyOTMkJ65ovSpsgWUUbcPUd\n9eGv3865H94WpMlJXysxU7w1EIwqdNtIH12C9ilFnUTqtbJaYtmyB1juns/3/lZ47qIcMDIU61Y/\niDBV2G1jRd9YziAKQFyBWEBMWRUiWibaCKmltdStw1mDKA0mg89EwsQZ90C6/Eeh64FoFnT98BWf\nJSXFCsfiOJ+9vvmmmsot0VX+MyBKllAVha6Jl7nG61a3nkov7RUxEpXrGk5HZRXg+L9n3UFTjf1P\nn+R9X72Ng/fejMNPGaeiQfDgauOUNcK7651BQ2lgJkRfZZJf6+DN0V51F1QBF1cRXQsRIWmq99nV\nwmlCvyS7byxnDPm5nJsGhZLkhFvLe2kWFc0oFBUESySXqOqzIpaZgcTcBkmfPFK5MA+s0czm78Lb\nvs/33nQm+339OBKeslMhkr2SBJQiVF5wplRmIJmNe87A5fKomkOISC42d7WEbrRxggXUlM8fvxMS\nHSlWXH/3pnz8GwapQ1emUafEKJzIjB8HldXttSaKs0i0CYoF2/G1nwmfe8dims560/PZkPidbegb\nyxlCpnoVkgRe9M3j6NDmzPP+kW8e92WuOPpcGqMDlAGCCk7zTRrM0Hr6pzPJrPCYMJI4gjjOuPZL\njDBA1ZrgpB98lr2+cxz/+fD1JBwNFRyaFbcFgkSgws8xGTQx8BYJlqfMHqnrDyHYAPfbJA0dYN/T\n7+Y+MYpWC5GKdjWJNVo9o7N2t5O3mTwONakzFU4dTj3qB5DCI2pc8rcL2HXTzFkgIr1/fayJvrGc\nYTjgF286h6YOcvbRX2RJYyEHfelIynmZUKHtsrBD0IThKTTfnG4mmRUeE/km6ugkjdBkj/OOYXh8\nHttusTXtMvLiTXbEoXx/xbVctew6GhaJgFhurCMppLnhXRrQroW8qiR5Vm6eKMrl9/6C+2Ulm9Fi\nv0+PE6Xi5POvotJEkAFaFI+3+hmEZC0nqfkFpEJcJKUmBz9nFDWoCDm2LoL3c+uBNlPoByJmEDXl\nZU2plm+6wiqIimcQv6JDXOypSHjN8hPOFJ1lPdgGeGlieBpElg2t4Bu/+joy4TjgrHcwUUzyk2PP\nyTGvZAQvmctRjOiEMMv2Zypyp3SGAC2rp68+MzwlKamsohpo8tqLjmfe0ki5c+DZfh9+vfJOWun5\nJGmhCy6n1C1oTfzRGpnnDYXkEqEqKBsRSy28VaCRd75+U669TXjhdkrhZ+95mQ3oH50ZRDcHmmgi\nQGHQsiZX/eUFJFOaw4O0yzbRCTZR4izljKTNPmr/Vkw0UwRTCpQkMDLUZOVIxQ/f8hVEApXAPl/+\nU2KKVJKIFgjraDDWK2LWlWzttYWuuRZb6y9MSWzXcLCGZnclmZ9U0zh//p2PcNjZJ/Dr6lY+fNGn\n+N4bzmH5IiUl43q7Cm3eRrXp5zPV2sqDaU7smomA62nt48YB10NT/tHwCNE7crSy3RonlE1So6Ty\nHiYexHC8fJvE0Oy6vGYl+p7lTMGgR0ZhfnWtmwScCK2VEStLwmYNRBxusKCquylMutnp2QFviU4A\np0YSIfpG1m3xEZ8qDvjm20kuoUsdh+52ID5FfrbiNyxqLuDZw9uiUuBZ7cVpXUTjzHIvPY9eBK1E\nLNOT8KhmTqiz8nmJzB8fEUKvTMeIGJ4KoUFW3HR4IgnM4UUyEZDksqcgHsVwvsEvx25m62ohf3HB\nh2gMF7z6nLcgwZG8x6rIpA8EG6FsrqK99V8xdMfZSC0CZ3XZVzcuCHm9gtaEFVrLjaRMy+fW/3yb\nq0AdOEXUYyhIREjERhunBc51uOKUranMkIYnWqTR950eE/2jM1MQaq5L36OFBOuVlJxy+Ac54iWH\nstnSQBCYFEN9JBHXyKTOBqj4XBPqXC4xMcV5T6MyCivoBMXjGFxk7LnTbrzkX9/CHotfyKKREXY9\n4/WUNonqJBYnMXJMUCyztUPt2aU1jaFaLp/KZfCpV7qjAKqgEaxLY9utPlBymqn7LpIIWfEDI9Xa\nR0Ztp3I6imUyzldv/HcEx7UrfoPh2eOCYzhm51dz77x7SNZi0ntCE7xvZE58FQb8CM3lb8bCA5jA\n+PZHkcIk0Rm9R0NtKOnS7CG5ZEyqLD2Bz8utJ6mGUddu4UAFdRUiCfAU2sBJQdF+mEapREt87T/H\nEDMafb/pcdE3lhsA0mMer0tqgBcu3onj9ngD/+fod0OpDK0wBiZ9Lh+ahZnJJI6Oy0Yoollyt7bp\ngx2HiieJ8NEfs07eWgAAIABJREFUngabFOx94bEcesFfs2lrgDvG7yM64bXfeDcrWEGsva0idb3t\nbIgBsCo/XERw5nPLoHkSDrNMcFY5n5eXHBdUssnJRBBaNwUYgicQcShFqnAW8VQki0TnMcum+D+X\nXcslN/4Hf/mtk3nR/Ofw8ouOo7DAv955KT846pv4IjEQFaeri6ua3hPTKCu3fS/tLT5G6GwFroMW\ndxG3PBF1eR+6RfpGxEmFSUmWoPWYWP7rEqyHVAi1BIRKQiWSQgenTZIAKGqBo/eYJBXDfO+D8zEZ\n4Ji9hkGUJLPrgTwb0SfS2MCY2sVRVQkJiSCBd136CX42eiN0p23qwc2dLHJQw0gE9Yw1EhFjJA6S\nbBR8E1fBj4/+f7zqa+/mirf+HYd86Z2c/46/Y1WnYsvGYpzkh8kkMGCCScSZw2ozKAJlPbFOJjQl\nTokJFzU3pNRLWK/CNR/xAFZm/XYDlcTLzj+WjjoGU6TTgJEVg0wWE0jTiJIfBMGapNAmoTSS4Qjo\nH3iQCRXmJrG0hIFlR1Ft8iVat56b90EMRxt0PrgJsKKug6zyBMSK9WvGrsMHKmVvXyH3eSuOAS+c\n/tYWz9w8cewnb+ZrH9qFYBCogPXc5tMAfSKNOYPuFaqEAi694ydUJrx4h+cBCQuKSpozhhLqPRKj\nEwrKoDg1BtVhrk1wDTQqdAIvueitHPqsPQniefXuezOSBnjDhe/gjBvOIYu+ZU55QdjvX/6Uu9My\nINKpY3sNSxREmpLQOpJZ4XG5WImHrV0XyKd6Ai+0MwUIpThecu5hvPjCY9jr/KP4yRH/ynVH/Cud\n0CBEx9j8cTQEPIrHKADnJ3Dqc4xRmkT3h6yLELWJ2Xxwo5QLzkPa25MWnYsW4yS/EtVmNmpmmLRr\nUuccolFX8ciY7LoeeHA6UNOqeVQdScAn4ZCXGd/5yTj/u8zzD+9/Lg2ljj8IJjNPPDzX0DeWGxiy\nxmvjkO32piPK56/+Kp6ApYCb0QLmJ4/SCx3vaaZs6IqacTu6bLCQkomRPDG+6PffJZnyjdu+w+4X\nvIHClxy84568+Bt/zB7nHkK0iiO/+W7MRTYPi7hHJmmirCKBZBGwq5ddV6duPEWtrGnAAmlwR7WK\na0ZvA8v9R00K3vfvJ7PXVw8nMIgfN2Iq2PvfjmGPC44maJuY828kb6gISRzJZSGQ6JShylP5iiI9\n+u1jJLxTIGI2TPQRWr8nDf+U0W1PIG1xBjCIGogYRrNuOvCAx1nB+tyaoh40h27MCswEcaCVkQrh\ngh8b7z58hHd+eYx5oZMF1UxBDJlVdaGzE/1p+CxATvjUT3iyLEslxgFnHctYM6HtitBqYCF2Z1qz\nG5ZTL1G6E2HN02dRMF9Pqclia7XSZXuswfBwhViHZMJug8/nhvH/JYnPTXqSuPCos9iReRx45p/x\nwIKH8CGQTPn2IX+HU2XToS0565df58/+6DgeZoJ3ffX9fPG4T3P4F49jYvPIic97G5/97dnIKsMG\n21SjQ/j5QDKUKme+XUKiIE7qdE/+Z6J1HFUpEjSipwx/6N4xTHICxyQzjntzGBHSAoplf4Lv7Ix0\nFqJ1V1M2VoZKhZMnfpYNq2WYc4Iox3kTokY0oykR66zgvI9uz5BPYJGmhrocwZF8VgjfGLGu0/C+\nsZxFSOSyHHCskpJ9z34TYaBBShUijqLjqJqKmsPNmvbHP4REEun1k4uBidGKDsUoQ26FDJa10sUp\ngYrK8rQ3yny8dSgrB82Ex0jmaK1wVAsrnh125Jd3/RoZ8JSV8qmD383mrfm856rP4XGYM8SyWByA\nSp6apypRFC3+4cCP8s7LP5qVNSlywTmGSMKZ0fGBYAmxrKsugDMjikdFEYygnkYySv+HjIxHRRBL\nCCn/ziIBz8CtF1Mt+XvC8iNw5WKcdACfS4bMsbZwuNWZ/ceGgiVUsoidpooBWcnm81Zxz4rNuOzD\ni0maCBIRbRK94VVAEiJ+bjyInwL0jeWcRC5lcXVxclsEs5JXnHdsTlREhxUJkqcpkdIZUQKFbvhz\nuDZUNLfWmceb0PXTDOgmzoPWVQFqyFrxP7ExTIZ77ytSJhVxhliBAFWq8C6QiBTSACASCZoz9N45\nMJ812dc2NKKIC1jS7PFZwJzW5Tv1InWsM5gSnWRCE8vspPaY3KIOkxKxgLoSpw2UAiHmuYO2CO1t\niQO3MXTbF0kDd1C0nwE2nKf/VHhCXWaUC21NKgSPSgdnA4/cpCVMPEkrxOXSLqpJzMZ535Gbs+WW\nBT+9bhXv2nchSScQBvOxnFLBuzEaSugbyzmHbvqmVyWoUDmtScEihQVe/s2jEfE0tKQTHH6NfvHZ\n4xfkabfrGUVB6ux49/vViM4IKnUsQuBxi7HX3E+jq0LIGgXpsLrgfE3kqfQfgor1NGd0reF0jefj\nH+fuNjyrz+zae5EQa+GlZOj2fwKbR6LK03GbQAhobSCNAnFtnLZq//IxjpFWdWepx6QgSIfvnzjI\nLSscOy909bZrI6mr7fHGnL3oZ8PnELq3v/ReO0QEh9DEM5Aa/Nc911E4w4lizjDJJS+rMTsMJUBX\nnjcoFJrLiGD1Pk5FqOV363nuOqx9bdq4KcZwLW/vkYYSHi/LvNpQ2iOGY3Wi6vHR3cYfrmAI5ZYA\nVJIY3e5dVIM3EDf9J8xFcEJqPIxLLcQczjpAQqWCx2EtV1fkpJoIXkc5Zr/sp263YPW4u6Vq0j0h\nfSuwTugfplmCqby42dh4vNUErC6w85JtWNQeJpCIEhArASE5RR7jpuxj/fBUKxpWzYdQP0kjjaAu\nMrnFpzAc1dCPcm95tRjzJSKx5gdoZPJk13nU9VntuYekmHpKgze9MnLUSxbypcsffPRQzex5vs4J\n9I3lLED3ms086as9BxGhYxUmkS0am3LusV+iHPPElAg6iK9yIbP1PKj+1T+noAOUfpLmxLPJKZwS\na/2Oya0+gjbuqXu8m4h5RJuoFjhrPOqqzGV/8eIPDuFFuOykIb7xkxwDfceBm9LG1W2hGz7sNlfR\nN5azBN0JXncKm8N4jqY0a/MZWBGXw/yKS9/8FS486u9BPGmimBLBWv1K5xIj+UaGXNIDIm0EpT14\nM2YF1cjPKef9CLEGUm1JXPx1Vj7ziFyobop3FVFbj7rOzJaUOPT0SYbi7ZQY3z9piAIlYLQkYs7B\nHyyk7+Px0DeWswhTEyBd7ksFAgUiwjZ+Cf/1xvMY64wxnyH23+Jl+EFH9ShTM2cepw2sbzRnHcQ8\n4jpksosmQrV6UmAFqVgKKOXIj/FaUG75cbxTzAqCm0T9w4CSpFxjvU4c3/2/Lc75yC787PclCerO\neAMXHist1Mc6oG8sZzFc/a8bzyy9Y7/zj2XnxtaYeK566GeElJNAYt12vzqZUpet+NTvzJh9MEyH\nQXJMEkBsdYrP/DhjO/wp6sdIAu2B32PWIBUPYRKhWgI4SIr6NqEm4LzixEG8MwY0sfc2gUY9J8lC\ncY9MWPXxxNA3lrMIUzPiUz/zQEBpJOUnR56H4BkahwMX7Mu3j/xnnhueQzFQczZqmbtQzMAC6mJN\nBdbH7EDu1BJpI1pg5nPxvETEqAvPDXGrkFr3p1kuoBz+GWnwetLgL6l8IqknqOFSg9hw+Ik7OOen\nE3gRbnrYEXzu5AlYbS2lH9J+kujXWc4J5IrBnAztQExIo8mDxRhD1qRJwcsuPArvhNhJpGAUKRB9\npIjNbDD7mEMwkBIsgJtAqm0IVZOy+TCb3vL3rPLDDEZjkdzMfbIDauNccdISgmWKO1fT200VIKs/\ngFlI97eh0a+zfBrBTMksjYY1C2xokFR4NoktIglB8Cs97939BGJhFKnAtJ17k63PJjMnoU1wbdSG\naK54FZ3m/Yzc9w4e2v6vGNZxgkywTLajqJbz7oMG8UwiDpxbLZvsnFtT96dvKJ8U+sZy1sOyzETd\nOeJweWpuQKnMY4i700p+evw3ufO+OxETylDhnNBURZxmpp8+5hAEXJVbJKXDxJKv42iwcot/pPLC\ng9t9lPGBh4lOOHrvRRyy+wiuGsKo2d8fp3C9j/XDOhtLEfEicp2IfKd+v72I/FxEbhGRc0Vyc66I\nNOv3t9Tfb/fUDH1jQbfFQvFoTcqQabVscIAksI0bIppywkuOJnaM1v2Kl4A159G2Am+PVlzUx+zD\naiPntIUzQ3Ue1KxLRsI5x1b3/DUTm36JZc94K2dd92tC2SVvcrjoHulR9jEteCKe5f8Bfjvl/enA\n581sJ2A5cHz9+fHA8vrzz9fL9bFeyES1GVLH6aXOjmd1GW9gqiRzDFVw/THf4sfv+FbuiptYRcPp\nGoH9/i00W7Fmg3aWefC1XpDDrINnAGEVd2zzYd70gh255siL+PF7X0jVGMcVEVHFNfpn+KnCOhlL\nEdkaeC3w5fq9APsBF9SLfBV4ff36sPo99ff7S/8xt55YzQaZSSLI3oMZrqZUEFFcaNKkwhUtbol3\nE7zwkyPOo0PB1Yef+4i1Jq2VBlEMrVmOuiY4oWudrX7JyfQjOlnrOAuFJiqnNYdnppTzeEwVYRij\ng04mWHov79v1L7l+8oYslCZDmTzDe2ya5HT7eCTW1bP8AvC3rJ4nLAZW2OozczewVf16K+AugPr7\nlfXya0BE/lxErhGRax588MH1HP5GgPoQR+lWXcLa7Afy/7d37lF2VfUd//z2PufeO5NJZvImZigg\nVQtaikgtotWq2FWoK1INAUFBWopV12otbS2pbVltlbW0a7nsC0UKIhUwJFhBqkUEtNXKSx4lZhII\nFggEGJIYJsnc19n71z/2vnfuTAIOr7l3kv1Za9bdd58z5/7Oved8z3789u+HYE0J7zyvzpZTqGCM\n4e7T13D8NasAaBTBgVlRrLF4r6FFWg/RwFvZJr0YvBj6OuaFpopn4sVRmJD2N6aEj2v7Y/xPk2M1\njDJ7l2HZiRZlcuOw0uBvT/gLfvDx/6DA84Z5R9PK5ujjUI1IytL4cvFzxVJE3g2MquqPX8oPVtUv\nqeqxqnrs4sWLX8pD719IhkHIWj9VO6Vuiw7RNIJ3nlyCy0gDx22nreNXykeS1/q45IQLaLg6knuc\naWBNBfpCfEkkpLXNHfQ1PdXky/6y0YrC5I2j5IsQqJecepah3uFsDcThbRiMzPJx1p78eW5YdQXH\nLT+SAeaQxYm+XEKiMTvpYZp4OZjOt/tmYIWIPAx8jdD9/gdgSCYeY8PA47H8OHAwQNw+CGx/CW1O\nPAsigs1s27+uLIadOsY/rvgEOqCc/d2/oWwrvModQmZzKBeUAaUc8mkTUtwaVQoTR0tTF/wlR/E4\ngZKD8axM01gybWK9YtRgvJB5IfeKLw9QmH7O/upfskTnkTuJKUgCaYRr5vi5Yqmqq1V1WFUPBU4D\nblHVM4BbgZVxt7OA62L5+vieuP0W7QXP9wMAVW0LpVelWsB8mYehzP+suorTh9/Fd079Mg9v24p1\nBdpsgDqsdyGqeebxotSzeKNOJ85t4nkjGDIPTix9RQNRqNsynhpeLHmjn6JaYbxe5kfvvZK7Vl7N\njWdfhIowzwzFpZGJmebFtNv/HDhPRDYTxiQvjfWXAgtj/XnA+S/OxMR0aa3WEBGMCBUbUqpmxoJk\n/OHxH+Yd15zBVz/099y08irEG1xWoqF1rDYoNZSK8xQiFGKwyV3vJUcJrfaSg7JzeKP0F020sZuy\nqyB+N4jn8P6DWXPqhRgsDk8mFhEXf9/U3e4Gz2s0WFW/B3wvln8KvHEf+9SAU14C2xIvgFbLEsJc\nalMMuW8gYtnT2MM9p1yLSshjU1AwWJ9PXqmxu7YHU81pzFP6C0dDMrz4NBP+EhNSbEDTGkSF3Dn2\nlBRfVzJfxteVZlZw1C8cxSGlg6lR0K9hADl6jiW6RJo628+YvLwtIwe8KWNVmZf3h+Aa2qBqaszb\nNcB3z7mMtY/ezEW3XM744B6sOuoKGI/VfV8eXgxm0iqRMLPrjJ1Sf+AQHixmUiCUQipYrcUVVy7E\npRQLJse4JtZ7arnD+BJ/97bz+eWlh7IkG6JJnTn04SWE59N40LpAeVpZHhMvB+lb30/pbBCaVmRM\nk4MRrMkZ0D5qC+DCOy5mxfDb+NZZl/A/772S1Ud9FCPSDvlGMxzJqMcZR3COj6G/ggcoIaKNThLK\nA6FBKh0n6WPXWMWFMGpSUHZVQMm9YxxD02SoGlxR4CrCrj2eT//qx1HTYPHcOSy1izEYBlwFbXpK\n5BidyF9U9iDexOU6iZkmRR3az/F0PhEnZ0Ycb+6mLx9gN1VKTaWWeXY1d/FP37+Cm3fciceDd1gR\nkBgCTgURxajSNNpOGGZ9iIx0QHUTFQpLCKUmgvETPqkmhsizMXWuF8GULI2iQdZQXF3RecKcgSFu\nPvGSuJ5bKbsSku39JTYBi0aHocRLSYo6dIDTegRO/oEn32ilrIJTT1n7cHlBhX5Ov+w86v3wZ8ec\nzadO+DOMzfBG2repxYc1y1hyL6CKFw1O0RI+ufuP35nBxBijRg3iYy70sEgbQxMxVSA4ode1ER4+\n1QyfOypL53LHyjWsfs0HUB0nw5KR4yzQ2LuFnkMSyi6Txiz3U57ztooNzHBzC04UYR5W63z/nKso\njCdTeMO1qyg9kVEcVJBrgSIoFqXAGaEgo0mDisvCwkxthnXMou1cQvsrClStoeQduToKkfDtqEcJ\n3e1iu8csDCFQpFHCi/LN0/+Jg+xCjv7q+7hiww2c9dqTg9ekGuo4+ryH0sRtudfv2Dvp4Q84klge\niMSbLRNLQwoyDS1Ha3K8CbPot+/cwN0r19IQwWid49eejsOS7TGUfYna0Di+ZCm7nHr0FczjrO2B\ncC+3ksuJF7w4asbQ55s0JMNZh2JYdcQ7+fq2/0Jo8N9nXk5dPHMYogDu+eD1qNbDUlUfUhrPcYbC\nFs/9mDkQvtweJY1ZHqAoilcHYrEhZRZ500EuNDF4QkoD25rM0YJxU+PXLzqT6869iM/cdCl3jq3H\n1mF8riNraCshQgw+q3t9nszAnT69z3n25tmzBxOfPPrbomnAiWOgEKqZkO002MEyztfRvODm936F\nPdRZzCCqOWWEmigWwWgY48zUIyoUxuGBks/SANkMksYsE8+JoljJsF5Aw2JHjRMLeVxOZ9QH53QB\nkYw+cu7+6Dc4OHsFF5z4h5w+fCIYodRUnAWpg6inpIo2J4tlK1RHEVOxtpKwwd4z5zopppzD0Jiy\nPRDSBbdCgLRmjWXSsScb0cp0KShFey6/fVxVRAhBRtq5NaHQJh3pFyFmalccfYWnIhWa1iK50pjn\ncL7KykNO4q73/jvzmMsyFnDOmtWAoyph8idkyNHQtRODClhsmC5Ld2VPkn6WAxQTc/qEAOw2SINM\nCFlZm2RiKAM0C+7Yfh/XPfj9EMdD4Y+v+is+fvwHsaMF31x1aTiegWMHjiLfbZGag2bRFkmvHsRj\noxCFtF2BqQ05mZBDiC5MITJPwMay0SiO4umURxWPSli6Gc7HR7sn9pF2KIoJWuOsIYWNtAOWZFkF\nh2OiRSoUKLevuopxp+juBped/FnyJ+DEJcfjd1vWPX4jDQq8D8Mal6/6LGWx9BFntdWH1TiqNAll\njzyrb2ui+6Ru+IHMvnqj6qNIaCzbUCS4xyihS97EYAnCJl5QHKI5DVPlbWs/iDqPNIVmxeEdWJvR\n3K3YOQUiOUYL1JqQpTC244LwRVETh5IhFBh1FNJPydcpxMauq8PF7TARGzJMWAWRVfHRF7K1nwGK\nIEqAw4TcYBhEFKeCkQLBxrDLgroGlVKJposi7z3W5Pzdb/45n/r65/jW2f/MO6/9faoPV6nMr3D7\nOWuxaoOvJTky5Yt2rom1k0M67buDn5gpptsNT2KZeHbimGYoE2fQw5hgSIRmg4O6ZNQYx0g/Hniq\n+SR7KPjIV1YzWBrgujM/jwJvWPsBKkVBPbP4okkprhQSsbCnhPaN420FVHC2ivE5aBmkBhqTckWB\nczGzq8cEwcajonhnIAvyIz7H0KRhSxhfYNSgUiO40xusVxCoFMqukidXS45F1aNSRgQazlPeCW9/\n9bFc8I6P8e4vf4xPr/gj/nTNhXzvw1dgKOMEbAGSCeo0+PlQIKQ4d7OBNGaZePG0hBLaLVAljHGq\nhFjtTZPRELD0k3sPupuD8oM4Mhvmxt+7gi+cdgGK4TO3XcacXSV+cNo1eBVsXgZf4reXvpPzfvVc\ndKCBWgVVDI6Sy0N33ARxExlvt3DHXRVBcOoxsUvuULxAboBCQ/ddGnhyKkUNLxnOQMilDWV1ZKZB\nRoNq7jhj2ckUtsDthOvfdwnz3SAN9aw9+ULGBzzf2vRDfv1LZ3PTh77EGxccza0fuQpnyuwxUIjg\nLCFKUx6d0JOjyX5HEsvE8yIM51mMF6wWZKFxRubAG6GipRizGyoIK676Yzw5f3LcWdz6uxfzQP0J\n7jxlDVpVfnjGFXxzy4/QBoyPZQw0lnP7qet4dfnV3HLqGrxXxHvA8um3rua3f+k9OFUym1NIQVMa\nhFaiR9RinITUwKbC3avWYJxj99g2tOGoFDVKT9YxY0qB8oV3fJamlvnBKd+gEGHt+m/zo/ddTTbP\ns3nHo/zn+y7iO6d8kQtu/Fd+9IFLuO/DX+e2c/8t+qSCkQyjYd69jGJMEbr0Gprg9gBdI78/k8Qy\n8YLwRmiaCb9KyRxWC5qm1E564QXu/NDVABj6gQFeVfkFLJ47zljDLmnygw9ezqmveRf3fugKbjjj\nc1z50H8woCXKWCpjJZb9bCnfXnU5bzrol/mro8+g6RtUxgxzd+eU6aNe1MA7nDR5+7zj8F4p73R4\n9TjvOedtZ6IWbnv/tTDfccTA4Vy34ov8wa3nc+ySX+Ou7Ru5d+W1ZEOGspb54aq1HL/k9Vz8wBoW\nN4f4t/dcSIkhVAxCicxnYdRTHZk26acIM16SY8TFiPOKpjBq+x1pzDLxvFHA4VFM8BFEaYhFgRLg\ncZg4i63qwpgkClLgNMdoSJSGwLjUmevnoOL5k9v+md855M28ZdkbQJq88eJTWb3iPE5Ydgynr/sE\ndfawo7GT777/aoYkZ4cWDFJiTJoMknHu2k9yT7GBhY0hzjr+FFb84gnUtMFiMweP4oAfPzHCccte\nyw7qzKfEEVe+h43vv45J6TkKhy9ZpBA0K8KKnLhHyzcyjJsqEme14zw6RIekMGeeBHM2kCZ4Ei8r\nCviYiredE6iVJVJjWDEBT0i4ZoOPUhBJnQhg63ExOIeJDuGh+6oxsISIjYKbt+slrkF3anj7F1Zx\ny0euQUT52k/+k9NfexJoECwj8OkNl/HJI8/psNvTjP6NHoNFcQqZQiEFVnPECKrBWUjEYHB4wtBD\nVEGYeGkfV1RSwMlZSBLLxMtKWyyntJ48MXVue0LIU8TpDmk5CbVm2TVEdW8FLA5CGN2W2quAWuLT\n8rMM6V6nncWw5QrVns3fOx7kRGb2iU/0HpwpCBEli/hfwcMztpNpOTftdZC0gHtWkWbDEy8rIQLR\n3qLT8uee2M9gEYQMaEafxnxyRPf2a4wJiYNJbusQJMpO2n/ahna87qtrHFyQOhsNHmMgx0TnnwzR\nsEbIdvxPNvlEO8xNQrk/0tNi6X2aUZxNPJtEmLbETPgdmk63pL2Ok/Hcl+az/+++Pn0q+woiZ2ml\n4uj8n45xTMmSBB7g9LRYGjNhnvd+2uLZGlqYOsTQeu+932tb4sBhJgJ6JPY/ekYspwphS9A66zu7\nX1PFs7VvKx1sOyXsPvZpZUBMJBKJ6dITYlmr1TDGtAVQVTHGtAXNe7/X+86yqjI2NsbGjRsB2pMG\no6OjbcHcVysztS4TicR06QmxhCBemzZtYmRkhM2bN7fFbGRkpC1+Tz/9NBs2bGBkZGRSV3tkZIS5\nc+dSqVTaLcqNGzeyaNEiRkZGGB0dBWjXd+bXTiQSienQEwtYK5UKmzZtAqCvr4/x8XGcczz44INt\n0Wy1GgEWLlzI+vXrsdZijMEY0xbBhx56iP7+fpYtW8b9999PqVRiyZIl7f/v7KInEonEdOmJlmW1\nWm0L2IIFC8iyjMcee6zdHR8ZGcFay+GHH46qsmPHDg499NC2UDrnMMagqhRFQbVaZevWrQwMDFAq\nldi4cWP7+FmWsXHjRh5//PFun3YikZhF9ETLsiViMDFxs2DBAqrVKosWLWJgYAAR4ZFHHmF4eJjx\n8XHq9TrLly9n69atZFmGc8FpuVwuMzg4yLZt21iwYAEiQrVapVQq8dRTT2GMYe7cuSxfvrybp5xI\nJGYZPdGyBDjssMNYunQp1lr6+vrYsmULIsLo6Ch79uwhz3OyLGPHjh1YaymXy2zfvr099ui9Z/Hi\nxSxfvpyxsTFe8YpX8LOf/YxSqcSuXbsYHBwkyzJUleHh4W6fbiKRmGX0hFiqKg888ADbt29nbGyM\n/v5+RIQ5c+a0BbJarXLQQQeRZRnVahXnHIsWLcJ7z8KFCymXyzz99NNs3bqVRYsWMTY2xvDwMI88\n8gilUolt27a1P6s1PppIJBLTpSfEsjXZUq1WaTabzJ8/v91CdM5Rq9X46U9/yqOPPsrSpUsZHx/n\nySefxFqLtZbR0VGazSZLlixhyZIlVCqV9hhmq37evHk0Go3kLpRIJF4QPRFI43Wve52uW7duL0fy\n1jikMYahoSGeeeaZtj9mi9YEj7VhCVx/fz9FUVCr1drbRYRGo8Fhhx3Gli1bOOKII2b4DBOJRK8y\nq6IOicguYDb2jRcB27ptxPMk2TxzzEa7Z6PN8OLsPkRVF/+8nXpiNhzYNB1l7zVE5K7ZZneyeeaY\njXbPRpthZuzuiTHLRCKR6HWSWCYSicQ06BWx/FK3DXiBzEa7k80zx2y0ezbaDDNgd09M8CQSiUSv\n0ysty0Qikehpui6WIvJbIrJJRDaLyPndtqeFiFwmIqMisr6jboGI3CQiD8bX+bFeROQf4zn8r4gc\n0yWbDxZzgNuIAAADqUlEQVSRW0Vkg4j8RET+aJbYXRGRO0Tkvmj338T6w0Tk9mjfGhEpxfpyfL85\nbj+0G3ZHW6yI3CMiN8wimx8WkftF5F4RuSvW9fo1MiQi60Rko4iMiMibZtzmzsjiM/1HSKbyEPBK\nQsrp+4Aju2lTh21vBY4B1nfUfRY4P5bPBz4TyycB3yakcDkOuL1LNi8DjonlucADwJGzwG4BBmI5\nB26P9lwDnBbrvwh8JJY/Cnwxlk8D1nTxOjkPuAq4Ib6fDTY/DCyaUtfr18hXgHNiuQQMzbTNXfmx\nOr6ANwE3drxfDazupk1T7Dt0ilhuApbF8jKCfyjAxcD797Vfl+2/DnjXbLIb6AfuBn6N4GScTb1W\ngBuBN8VyFveTLtg6DNwMvAO4Id6cPW1z/Px9iWXPXiPAIPB/U7+vmba5293w5cCWjvePxbpeZamq\nPhHLTwJLY7nnziN2815PaKX1vN2xO3svMArcROhx7FTVYh+2te2O258BFs6sxQB8HvgEIW8v0YZe\ntxlC1uLviMiPReTcWNfL18hhwNPAl+OQx7+KyBxm2OZui+WsRcMjqyddCURkALgW+LiqjnVu61W7\nVdWp6tGE1tobgV/qsknPiYi8GxhV1R9325YXwFtU9RjgROBjIvLWzo09eI1khCGxL6jq64E9hG53\nm5mwudti+ThwcMf74VjXqzwlIssA4utorO+Z8xCRnCCUV6rq12N1z9vdQlV3ArcSurBDItJakttp\nW9vuuH0Q2D7Dpr4ZWCEiDwNfI3TF/4HethkAVX08vo4C/054OPXyNfIY8Jiq3h7fryOI54za3G2x\nvBN4VZxBLBEGvq/vsk3PxfXAWbF8FmFMsFV/ZpyFOw54pqN7MGOIiACXAiOq+rmOTb1u92IRGYrl\nPsI46whBNFfG3aba3TqflcAtsWUxY6jqalUdVtVDCdftLap6Bj1sM4CIzBGRua0y8JvAenr4GlHV\nJ4EtIvKaWPVOYMOM29yNAeYpg7QnEWZtHwI+2W17Ouy6GngCaBKebL9HGGO6GXgQ+C6wIO4rwL/E\nc7gfOLZLNr+F0BX5X+De+HfSLLD7KOCeaPd64K9j/SuBO4DNwFqgHOsr8f3muP2VXb5WfoOJ2fCe\ntjnad1/8+0nrnpsF18jRwF3xGvkGMH+mbU4reBKJRGIadLsbnkgkErOCJJaJRCIxDZJYJhKJxDRI\nYplIJBLTIIllIpFITIMklolEIjENklgmEonENEhimUgkEtPg/wFaPG/+ZxAINQAAAABJRU5ErkJg\ngg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "FcLMnSKYPcjA", - "colab_type": "text" - }, - "source": [ - "## Euclidian Distance Transform\n", - "This operation updates the pixel value with the euclidian distance from the foreground pixel to the background one.\n", - "* Note : It takes only binary image and results in transformed image. If a different image is given it results in a image with single value" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "-OMh6oeRQaYQ", - "colab_type": "code", - "outputId": "bba9e240-3427-41b8-f1ec-38f5afdc0356", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 240 - } - }, - "source": [ - "gray = tf.image.convert_image_dtype(xray_img, tf.float32)\n", - "gray = tf.image.rgb_to_grayscale(gray)\n", - "gray = tf.image.convert_image_dtype(gray, tf.uint8)\n", - "gray = tf.expand_dims(gray, 0)\n", - "eucid = tfa.image.euclidean_dist_transform(gray)\n", - "eucid = tf.squeeze(eucid, (0, -1))\n", - "_ = plt.imshow(eucid, cmap='gray')" - ], - "execution_count": 12, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAADfCAYAAAD4Bhh5AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJztfX+ofOld3vPOvfPj3r2hadQua3bp\nKl0pUWpsQ6LEP1LFEoN0ESQkFI26uP6RoEKgblKothJIQZOmWIJfSdCAmqQYyRJC0xgVEcwPE1NN\nso2uuiG7bLIqa9x75/fM2z9mnjPP+cx7Zs78unNm5vPAMDNnzpw5c+be5/28z+f5fN4QY4TD4XA4\njhe1fZ+Aw+FwOHYLJ3qHw+E4cjjROxwOx5HDid7hcDiOHE70DofDceRwonc4HI4jx86IPoTwyhDC\nF0MIj4cQHtnV5zgcDodjMcIufPQhhDMAfwHg+wA8CeBTAF4bY/zC1j/M4XA4HAuxq4j+pQAejzH+\ndYyxD+C9AB7c0Wc5HA6HYwHOd3TcFwL4sjx/EsDLdIcQwsMAHp4+/Tc7Og+Hw+E4ZvxdjPEblu20\nK6JfihjjHQB3ACCE4H0YHA6HY3V8qcxOu5JungJwnzy/d7rN4XA4HLeMXRH9pwA8EEL4phBCA8Br\nADy6o89yOBwOxwLsRLqJMQ5DCG8A8BEAZwDeHWP8/C4+y+FwOByLsRN75con4Rq9w+FwrINPxxhf\nsmwnr4x1OByOI4cTvcPhcBw5nOgdDofjyOFE73A4HEcOJ3qHw+E4cuytMtbh2CUajcatfE6/37+V\nz3E4NoETveOgkCLwWq2G8/P8n3IIAfV6fafnMhgMlg4oqYHABwfHbcOJ3nEQIKEWkfr5+TlCCLlt\nuyb6EAIW1aGkXhsOh3ODgxO/Y9dwondUEpbYSea1Wg1nZ2eo1WbpJRK8JdbBYLCQiBXn5+cYjUal\n9y9zvFqthmazufCcYow54h8OhxiPxwB8AHBsD070jsqh0WgUEryS/Hg8RowRo9EoSdKrEH2MEcPh\ncGtEH2PMzTCA9CxjPB7nBirOEjzyd2wTTvSOSkDJvV6v54gdmJDgeDxGr9fLnisxaySsKEuOJNVt\nkemyXEK9Xs8IPoSQDWDNZhPj8Tg3cCnx6/k58TvKwonesXeQ5Ov1ekb0JL7BYJCRXowRg8Ege58l\n902Ib9ukmTqeJWrC5hh4DYjxeIxarZb7rjbid9J3LIITvWNvIFE1Gg2EENBoNLIonqQ+GAxykbsl\ntEMiuKJzrdVqc7q9Rvsc+EajUW57SmY6pOvhuD040TtuHUURPOWZ6+vrOWI/NgJb9H00Ure5ihAC\nQgi46667sn3G4zHq9XoW5XOmc2zXzLE+nOgdtwoleY3gR6NRFrkPBoOTdp7od6bco1G/RvPn5+dZ\nHoPyTggh5/E/xWvoyMOJ3nFraLVaOYIHgNFohE6ngxjj0Ubvm6BI62dS2sperVYri+aZ12CU3+12\nb/v0HRWBE73jVtBoNDKpptFoYDQaZfo7I3gn+HKw14l2Uzp6zs7O0Gg0MikMmMwCTnmWdOpwonfs\nFCrVXFxcAAC63S6Gw2EWlTrxrA9eO1pPGeFfXFzg7OwsG1yHw2F27xH+6cGJ3rFTaNIVQM5JUwWC\nPxYdW8+fCdzRaJSRPe2aquP7LOp04ETv2AkYyV9cXKBer2MwGKDdblcmktTzGw6HACZkWVSN2mg0\n5l6rortFI3wtQKvX62i1Wlmilt+ZxF+l7+DYPpzoHTsBI3kWPd1mBF/UUdK6WehR5zlqdW7qfVrN\nCsx72df9frtodcBBq9/v5wqxeONzYNaF08n+eOFE79gqNFKu1Wq4ubm51SRgo9FAs9mc62Zpm4dp\nlMvoloVKrMqlxKGDAj3/ALKq3V142LdR9arvY0Uxv8P5+TkuLy8xHo+znEmtVqvEbMuxfTjRO7YK\nRvLAhGhuQxawBUa0GmqHS20eBsxHt7QihhCyPjskb26nZ53b+Hm852cUVfEWoWi/bbU4YHTPAQ2Y\nDXzsq0O4jHOccKJ3bA1qoaRtchefAcwTH7czItfIG5j49ZX4WahFOYYEzcfcVwcADgzNZjMjR+7H\noq9FmveqxM1CKQ5em5K9bbXQaDRygx3gMs6xwonesRWwGIok3+v1tkIWVjdXmYQgCTebzUxiOT8/\nz+yco9EI4/E41xSMBMoCI41qGfkPh8NcFExSbLVaAPIzlvF4jMFggH6/n5GqEiavD4BSA6Beu6ur\nq+xa8LzWibwpy/B47XY7+z60X/L8nOiPC070jo2hkfy2E6+UgkiS7E2vkSmjeGrpJGS+5+zsLOtd\nr+/RAUQJnVE5JRyCchDPQd/HVsJ8z/n5ee4aWJlIYWcpNvLXLp3NZjNzMa0beet35XOeW61Wy/IS\nek6Ow8ZGRB9CeALAcwBGAIYxxpeEEF4A4H0A7gfwBIBXxxif3ew0HVWFRqqdTmejZF6KXEjYjEJJ\n8pa0QwhZEpaE3Gq1cH5+nkXzKaKndJMiehvp87iUher1elbh2+v1ss/S9/B7cbCyJNtoNLIGZfz+\nPCclc+sYoi10HXskfyNNRKssxYFKbaeOw8Y2Ivp/G2P8O3n+CICPxRjfGkJ4ZPr8Z7fwOY6KwUby\nqYU/yh6H90qEjNB5oxUy9TlamEWy5yDBVgAp6UZXryJCCFkPfCvdMNrVJGwZKUZ1f/1+PI+zs7Oc\nDZKRtfYAIihd8bqtG91TbmI0n1p71zX748AupJsHAbxi+vjXAfwBnOiPDqqdU6pZlQwYcTOC5YAB\nzAp+SGgkcFoCLTTS1kZfJC4bZQPIBoNUb3cSvY3oVRq6vr4GMCNxJW313JO07dKCwGzGYp1DvAa2\nCyUdNNfX15mNlUSciv4XgfvZugEmquk82kVS3XG72JToI4D/E0KIAH4lxngHwN0xxqenr38FwN2p\nN4YQHgbw8Iaf79gDtH8NiWBVktfZACN11b2tp10j9BSs/VGJn68teo8OHpRyxuNx7n0q9wB5p42S\nPPvDA/Pe/GXXhKBHP5XvUC1/MBhkg9p4PE4uObgMw+EwWXOgOQy3XR42NiX6744xPhVC+GcAPhpC\n+H/6YowxTgeBOUwHhTsAULSPo5ogSa/rrqGuf3FxkXWz5MIjw+Ewe6wOGkbn1LotaarsQJ2ZUTXl\nm5Sur8VRRK1WywYbu51k3+/3s34yJPVWq5UNSAAyDd9W4MYYcXV1lRtoNKfAmULRtQdm0T0jeV4v\n29KhDLrdbtYUrdlsZte62WzOrc3rZH+Y2IjoY4xPTe+fCSH8DoCXAvhqCOGeGOPTIYR7ADyzhfN0\nVASURdaN5IF8sRJJnElW62m3+6r/PXVc9dDz/vz8vJC0SbKpRG2RdENZieenej+hC5zrjEL1cH4+\nbzoz4Gs8d6vLp6J8vYareu91lsCZAc9Xj+k4TKxN9CGEuwDUYozPTR//OwD/FcCjAF4H4K3T+w9u\n40Qd+4f1gq+TfOVAobbJy8vLTOsvcrow2qW98PLycq7vDElStXo+L4roSag6uJDgiwaHTqcDYGJ1\n1HMDZlZFa+mkzAPMfP3WLcSZDa8RJSBdIJ15DLqQUpq8JotXBe2cALIWETr7AlzGOURsEtHfDeB3\npv9s5wB+M8b4v0MInwLw/hDCQwC+BODVm5+mowpgVN3pdNb+Z1epgTf1iFOX1s9UfV4j4JR+zsja\nRvZFRK9yDDEejzP5SCtjObCwQIrRLgc/bQXM4/C7cSBj62But24hnj+Pq5W3McbsOQckWzymcg6j\nem5f5fextksvpjpsrE30Mca/BvDtie1/D+B7NzkpR/XAKBNYPaJLkQ2jZvWsU+NOFSkx+lc5RuUb\nlT1SSdkUdHCwyUhG0HZwUKImIeuyiLw+vNd1Xnkt7PairpL8LJ6TvofXTfVzTcSqC6fVaq0s42hi\nnOe4rizk2D+8MtaxFOrAWDWiU7kHyEsNJDKN6tm2gLBFSpQ5KJdojxuSk9XobXthYlWip91Rz1sl\nm1qtls1IKM/w/RzMioie30335yxAm6jxuGzPoNdvNBplJN3tdrPrvE4RGxO0mlhWCccj+8OCE72j\nFEg6trJzlfdz6cBlUCnCetdV5iDRW786JRBr37TROd9TluiZl7Akz4GEujv957YrJB04KukwYcsB\nibo8X7euIM4mNMfA8021etgUWn3LAYnfw6P6w4ETvWMpaKe8ubkp9Y+tUg2jYACZdc/aCm1kzf2A\nPHlTjjk7O8sieq6Nyn2BmeOF90yaLiJ6vpfkniL6TqeTs4ECM2ljNBpldkuSur6ftko7AAB5t5BG\n8pw98JjAbGbAiJvHurm52Xphk0o4KbnMq2YPB070joVQO2XZ/UkOWrEZY8ykiUW2QiV+bQWgUbq1\nTtI/b/e1SVwL67wBkMkg+hxAUq7RqlsOJIyqLdFT1kkVb2mzNPv5JH09j+FwmCWuu91uNtNQScf+\nJuuSsW2TwN831bjNUV040TsKYfupLPunZoMuTvW1ba8ej+2E1VbIIh0SCMm81WrNyRvUszWy10FB\ndX2VcKxOn3LdqBNIH2tEDuQjecoZjUZjrkeORvSpAcC6hSjdDIfDLMJXWyaLqbRqtt/vZ4Sfsr2S\nnNdxStk2CSwUo3zk3vrDgBO9YymsnzwFbdCla5WmoklLwDYC1yhfLZWpiN7eK9Fbm2WqmraI6Emu\nWhmqrZI1kmcBFTDzy6s9k5+lRK8avRI9P5fnS6mHxK6RO1tC87i2T70mquv1eukcSQoc2Hjv7REO\nC070jkKsos2ThPkelUNSujyjf3XSsBhKj8PH1NmtVs/3aoWtEqcdLAibwFVvu0b0JFQr16jLhgSt\nRGwdN1r0lIro+bkA0Ov1MBwO0W635+QZvsZtXO+13+/nZl26EIy2U15Xxkm5cFqtVva5rtVXG070\njiTUzVIGKnUAyGm62s5Ao3VG20rEttWBDgxK9Nr3hu+hJKTH52fwOUFyVaLX75Fyr3AAo9YeY8wS\noBrdW3mGUTolDy3C0nMgsVNnJzmrJMPXNIq3ko1tGKcFXZsmbHVxFdfqDwdO9I45qGa+rl+azbVY\nlaoWSW1PzHYGqrczsmeUrhE+I3ur1fN92nuGUo6tEiW02lX72CjZn52dZRF9USRPkqWOr/ZJlVpS\nTh4AOX2dt16vlzWM63Q6GcEzsmYkz/3pledvR787B1wmkTexXqoLBwDuuusu1+oPBE70jkKU8c1b\nGyRvJEadFaj/XXV5asjWI28XErEavT7X/TXatFq97Y9jWyEoydt2DOqXZ/TO89coXxOofK4VwEq2\n1P91XVvtha9RPEldo377G+kMhdeOcs8mdRCEXh8OepsOII7dw4neMQeSRZkkLP/R1dZI8qSOTijx\nWiJPFUWpfGNlnpTNMpXcVQnHylHLiH4wGOR844xeSdyatOVnpSJ62+oBmCV5+Zj7KtlrElbJvoxk\no7kKHm+TVcAUmpjlgOsFVNWGE71jDiTN6+vrpf+4lFcoqajs0mw2c/vavjXsbcP38F4TiXxs96Fk\nw8GkVqvljqezCltABcySseqjZ2QeY8xshHw/2w2wOyX70GsSltG5avxaQKVEf319nSN2S/KUekjw\nKRslpRxgvsc/r1NqZa1NwL8H2kBVEvOkbHXhRO/IgVFvmam4runKxym5hSBxqmdeH6cic02+6n4k\neE3kantiJXrrz9eoXm2RSvTMK1CS0RYH2kVSJRtG9Cmi14VMer3e3PVNJYCtl5/Hs6tOaSRvZ0U8\nL14fm4xdtbulBQnfk7LVhhO9I4N2eyy79qhq64zGNRJvtVrZvrZvjW1QZqN3jVCXRfacTZDo1Tuv\nNkttd6DSDTDz0FuS1x43GsmrjGLdNozcrUNm0epR68BG8vqc35GzEWCWJ+F58Vos6m9fhHa7nZPQ\nKHM52VcPTvSOHNZNrFH+0GX8SP5KrCRcRv+2TYG9pSJ8G7lqRE8pSbV6ew/MtxK2LYY5SJDsGZkz\nateZQMpWyeMy2tdiJZuk1mtjZxs2h8BrUtTmWK8VoYVe+nmUqOw5lJFf7LoCvA6u1VcTTvQOADNb\nHqf3m/yjUi/XSDPVi0YblJ2dneWiUkbmbHCmr6kGfXFxkWtepnIQZQlGr7Qm2qKlVBMzJVKVpBjR\nM1mrtsoU0au+bp1JmjDVm8pMStqUj1R753H0evA66eIhwExm4fs5u7CLlawCJoV5jeyg5qgGnOgd\nAPILX29qwdNjaiSt0o22QEhF9lbr12jVRvgcNFSjZ+KU+ritWLU3ddssuj6MWLXXjUb0trqWMwJL\n8hrVa+GULfbSalS2YdDiLzsQ8aa2zJRDRwcbYlWy5/7qLHJUE070DgAzpw3JsEwDMyUvgn1emPRT\nZwajTeuJ1wZljOzVuaOafJFmz9d47krwizT0VBMzK12l2iVoghPA3CDCSL7X6yUJnIMXidzeA7M1\naanv89rZpG2qGI2OKS244j1/7xTWaXrGmRPX/t00wevYPpzoHTmUieY1aasata56xMpRax0E8itA\n2V40qcKplC6f2ofJUW0XoI4Xa38E0k3MioieEbD155O07fEWyTMpuUaPRd2bcguQXouW2/Waatti\nTQQzsud58libgrMbTe46qgUnekcme5CkyibiSBq8V2+1XVJPbyQmNjUjeVNb19bD1heuXnsSIdsF\n8Bysdq4dIG0ET9iukry3iWSN6NUuqnKTRuup5DKvk2rm/B0A5JYI5GfxO1AqsUTP49Nnz2Zj6sdn\nSwX93Rf91mWTqprQdvdNNeFE7wCAzEZYVp+3ZG8930qkVg/XzySJW11+2Y3EbaFErwSvGrolenvu\nPGeeI+8t0WsES82c0ksq4ZpqzaCSjZ0d6CyBZM/PTV3HVATPWU3KSbWM5FeJznmtbZ2Coxpwondk\nKNPyQGEtduv2O7FJ1FTxj0bLJLOi79Dv93MErxq9JXpL9qsSvRK8NkWzLZMpZ+k9Bwp118QYc/kN\nLdRigjn13SlbWcmGUtY6CfYiHd/CJmU1ae1RfTXgRH/ioK1yXTKwbpJ1QBIjoTOpp5INgFzB0aJB\nhcnYfr+fSTeW6FWTBxa3J7Zedkbstt8Ou1yqW4bkTWmM34WWRNW2dcGSZWvRUm9ngrXX62E0GqHX\n682RvV3paxFsIrXVapVKzmtSlktGus2yOnCidwCYRfOrRmBqcbRyhe0nYwcDPrZWRbUspop6eL5F\nsElZbRBWJqIvSsZSltDCKRvR8xiqsfP7WMuk5izUL8+BhO/TxKxW5Gpzs36/PyfXLIK2bLZLPer1\nPj8v39aAv6PtceTYP5zoTxxMxHY6nZXfpw3KNJmqvnjKHHojYXW73dx6qCqHkHC1N0uqXUEKKlmk\nbJaazFSiT23XwSpVB8BIXhueacK11WplMya2I9DBjN9VnUEkeQ4SwGzNWSZUe71e1rdeLZSM6HWw\n05kayZzfYzgc5pYcBJDT5ldJrBblAhz7x1KiDyG8G8APAHgmxvht020vAPA+APcDeALAq2OMz4bJ\nX8o7ALwKQBvAj8YYP7ObU3dsC4xSV4nmdaquUB3bJiPVggnMIleNuqlB12q1ufPZFtEXVcIu0v61\n4Eu1eWrSGuVrpG+LmEjsOphpiwKSvP0uduUpTTSniqJsLYEWQ2nRFc+Z11ddR7Ylg+NwUSai/zUA\nvwzgPbLtEQAfizG+NYTwyPT5zwL4fgAPTG8vA/DO6b2jwlh3eTmSh5I1I1a9B2b2RdWsbQSvrxXl\nC2yvmhRUw7YunGVEv0ijt7ZKuoW0dbHq8pRiaAvVSJ3fW5Or1NlJ2LRI6mNbBMVFRRjht9vt3HfX\n34lN5GhZ1YGJ35MzACaCKUGtkljlb+mFU9XBUqKPMf5hCOF+s/lBAK+YPv51AH+ACdE/COA9cfKX\n8/EQwvNDCPfEGJ/e1gk7to8y3vll77eLa/Cm7XlJGiTKIm1+UVK4jEbP82FUq1q9Jikt0dtt+plF\nbhsSokbxwKzwjN9RG63RLqmavt50m53paG0CCV0lKSV5+5um2kfYATPGmEs883lZ8PM5S3FUA+tq\n9HcLeX8FwN3Txy8E8GXZ78nptjmiDyE8DODhNT/fsQVoE7N1QTJRx4i1NFLLVtuhEj8dKZwdPPfc\nc8nPKivd8HXVq3mvEs2iZKxGuZpk1oreZrOZRdr8DppUtbMWzkYsuafkGdunRqUZjexZIMV8h5I8\nI2pdK4B5A84yLNGz8Ru3r9JWmX8LHMgd1cHGydgYYwwhrJyBiTHeAXAHANZ5v2M72PQfsshe2e/3\ncxEuI2aSlyZldTawbNApI93w85iQXGSzVFJX8k8RPZOjjN5tdK++c11uj5KIHaTsOdibjfTtgJDy\nyttInoM5JSct2mINBKGVt+vaI3UxGkd1sC7Rf5WSTAjhHgDPTLc/BeA+2e/e6TZHxUD3xaZQzZu+\n7l6vl2uyRQ2bmrAt+R8MBqjVaqWcP2WJHkBWOKUOFGCW4LR6vMo3PL4OZLbrpurt6vVP5R9sF8si\nsrfkriSfWl4wFeUDyC3HeHl5mStE01bPHJz4OY1GA6PRaO0FUrSITuUb1+n3i3WJ/lEArwPw1un9\nB2X7G0II78UkCfs11+ePH9osixEsiYtEYvvfqOtkFQ//KkSvEb21VzJ3YF02VtLRiJ5OFH4+yVyl\nGr7H2ijH4/Ec0StSUlTK12+vmQ4ChC4taFeAsu2deW9nDNuAR/XVQRl75W9hknj9+hDCkwB+DhOC\nf38I4SEAXwLw6unuH8bEWvk4JvbKH9vBOTsqBEZwJBobsZL8FnW0JOGWIZhViJ6Dil3yzyaJiVRC\n1hK9tVhSEuLnMbmsg5omNPn+ZSRY1o9uB0rmQFKJV31NX+dCJMPhbLFxQvMT3tLgcFHGdfPagpe+\nN7FvBPD6TU/KcVjgPz8lhBBCrqc8MB/RK9H3ej10u92l5EbCAcoV55CEVcLh+/SxItVLRgulWGSk\nFkpth8DvxNe14MoeSwuTUlXDOgMgbAUyC9O4EhePTQslHzMJq72D2AG02WxmC5ar35/7cruT/OHC\nK2MdWwGjSS6ZxwZl1i5ofevsz8Jti7AO0VO60dYAy6QbPbbaP3lM5ho4iNmqV5VsSI5adaqdLy2R\n28S2TQan+tjTscSBRZukWclGK5iZkNUBRYvcNJnuOGw40Z8orFtmG9Ny7ZlSZH8kuTNxy0TsMrsk\nsB7RU9LggMJt2qvenp8lehIfSVMlHEbySvwkXntsra5VB4ytIObNRtf8DB6f140JYBI9i7lI7Izk\nU+vunp+fe/OxE4AT/YmCER5JZFv/7EVLDALIRdMAMqJSTX8R1iV6dd1o90cOOgqVlXiONrrlY9uX\nhsdj0Zeen0bl/L7c17Y0Loqu7epdwKxgy8o7qVbPGtFrrx67aLomeVeF2ivL5hkcu4cT/YmCJNVq\ntQDMt6fdFCQKetiVnNrtNmq1WmYBXIVUeIyyLZVJ6ozoUz50RRHRq1WU34+ET+mGzqNarTZX9JVq\no6CrR+miI+qSAfKdJlVGYjM167pREq/VarlIvtFo5KyXLCrTOgfbGXPVFtY8d9f0qwMn+hOFRnDa\ngGxTCce6cLSBllooSXbAvC5eBI0SbcS86Hvy+NTqbWsBBQlPid5KM1pZa9sqaO94e+6pNgpWyrG9\ncPicko02UyN4HP0sHUisNq9J8mVY1f7qqCac6B2o1+tZ1Efi25TsgRlpkriur6+zSJ5Ewza/ZYle\nI/qy0oC2QNDWAnTf2OOovKTyB3MZlCesnZLVpBolE9rVUyNubYrGRVZ4fM4UGMWrnZMJX+3jY68T\n73UtXm2B0O12S7t6vEHZYcOJ3gEgX74/GAw2juytTk83jkafWrKvcsmiY5I8tTvjMiipa/+YIqLn\ndj4GZi18bbUqI20tbrIJXb7ffn/ux+/F7babJ++B+eUFeY5WgmJEz4Sx1en5e5dx9WyrgMqxPzjR\nOwAg16oAQFZSvy4YQXe73SzRq90bSfK1Wm2ukVYRGAnz/FYlepVtOp1ObpsFv/vV1VVG0jZhSfD5\nzc3NXCGYnrv652k/JZHqUoOtVmuuqpazLUb4lIhShV/WxcPPosuGywPa6H1Te+WiRLxjv3CidwCY\nVZpu6x+UWr02KVN9XaNIXdi6CFqdymMsc+rwu1hdnoMQj7UtOUJnJkVEb6tr1eLJQVblIq0uVhmH\nMwA+XkT0uuIXH9vGbtty3WhuwFEdONE7ACCn9eoqUJsgtUIUMOsJA8z65CxrgaBRNVAugcvPU4LX\nCHiberN+jxRJ8vxJ7Er01klkiV/tqJo/UNlHP8/KL0yIU6vXAZKPbWJZyb7szEk/34utqgUnegdG\no1G2MlGMmy1CYmGPFWPM7JXUm5lUXNSiWBOMPM6yhKzdd9UIXpuDaXGT+vkBZNo/XT2pc7NJWMox\nGsXb+8vLS7RarSzxy8S2TcIWRfT8XNt2QZPfdrC0kT0Hfu2M6Tg8ONE7cgtb7NpGpy0BtBcMyaQI\nmojl+5fp9LYB2jrfiw6XVLdN/U6sGdAOkLoPByntj6OfkbpnopgDotoytVBLk8fqiedzfayyjW3g\n5jheONGfKBid6pqjwG7tczw29fter5cV9Kw6k6ArqAq2v263i+FwiE6nk2vxYIme+QgtErOLlnAQ\n0O0XFxe5ZLBW+XJw0T4++rm2pYJaOrUgimvOrqvNO6oNJ/oThEoS/Ge/baLUvjjr5AO08IrHsa8t\nwrIBQlv6qu0wJd1oXsP69Al1uQDzq1Hpve3Do9v19yoi+pStU2cDXEVKdXgOIPpd1tHm3XFTTTjR\nnxiUwDSS3xc2HWC63W5ubdSyJN9sNpM5AR6LVkRt96sLfGuPIEbDXL+V15WyCPVxq8PX6/WMxFVH\n133UQ09bKGcP/Bwl5lQ0rhWy2maZRM/3c6bA4zLSX2cg5uDhqAac6E8Q+4zkU9hW10xGrkUgiacs\niXYfLSyyj23Rk42EKYcwutambalePRxwGMnbffT68Pj6WXZxFQueM4BcYZbKPLbQi89X+TtJvXcb\n7i3H5nCiPyEwkgWAXq+Xk0+qQPibgtG9zloU2umRlbopNBoNXFxc4OzsLIvotd0vWxWQABn50pky\nGAxyLqYQQnYcIG+f5GNG4insVZyQAAAgAElEQVTNXmddHEQ0srcN26xGz4Gp2Wzm+va02+255LFN\n0hYNiMvgPXKqBSf6E8P5+XkuQiMpHgvZ6/eylkzaC7XwqoiIdDFtG9mTOBmBq4tF5RBG9TroWI88\nMIvotTWE3ius5TEV0VuNnjKN6vQAcrZWm8xdxz/vqC6c6E8EGumyOReAQq360KHLG7K9wGAwyMia\ncsqiwY19YhjBa7tfbetLfVtljpRVVUleWxnoa6rZA/O2UvrgbV6Azp8iomfLBBarcRbQ7/ez2YHK\nQuv45zUZ6/p8teBEfwJQgteWtwCOenqtbRgY0QJYKNloky8LDhJFvdat3KH7sO2Ctmbme7S/kN6T\n9C3R8/1WrycxW+mGsLMJrRa2x1pWdZyCavSOasGJ/kSg8gMwqxo9RoJX6Fq2i5Y4JLTro1oqlw0S\nZc+DBViarC2SdainK3gujLpVJiLRq9tHI2z9HG2vrAOFSkGeSD0eONEfMXTJuIuLi0yGsEk320cG\nmPVVOZaBINVZkRKWlVfUlaOtAjYFE8A8nrYLtiRfr9eT/Xx0VSqVVlTC0d+VFb2UbNj3ZjweZ9ZJ\n6vR6HD5f5fd36aa6cKI/cmgnRCYYtUSe//gA5rRd9qUvwiENApqcVWeLOlqWRf3bqBrl4JlqgaAr\nWHE/W+mq+r5Nnmokrv59kry+VwcL68Nf1o7CcXhwoj9isAKWqxbRNlir1dDr9TIXyng8zgiOoAUx\nRXiM+A7JqTMcDnHXXXdl1+T6+rrQH24TrQT3LbsMXwr6eSqPpBK0RUSvq3LxfLX1AiNx/qbtdjsb\n0GOMuapgm4xlInnT39Wln2rBif5IwQhWfdRaHTkej7Mp9mg0KpxuF23bdpfLXSIVqVNisK0QtIUw\n9W8uAqKR86b91jVRrMlRXVuXRJ9qpaDumkWzDM4erBxF371G8VrotQ60h/4xJ/kPEUuJPoTwbgA/\nAOCZGOO3Tbf9PICfAPC3093eHGP88PS1NwF4CMAIwE/FGD+yg/N2LABXRdLyfY3qGd0xekxJEkVV\nltoD5ZATulo8lHLHqD2ToEURSCdlNflZpjZBZwh2qcJer5fp7Xq9+XtqiwU9T00ca00BvxcjfOv3\n563b7a53QeUaOKqHMhH9rwH4ZQDvMdvfHmP8Rd0QQngRgNcA+FYA3wjgd0MI3xJj9HZ4twRG8tqX\nhbfUYhRAmtS1zJ9gks8W+xyShENwgLNgpK3RO1v7aksCzo5Szc5oYV10TTT3YS2d1gHDwUVbKdgF\n1bV9gp2FcZbCe3XmWFul4zixlOhjjH8YQri/5PEeBPDeGGMPwN+EEB4H8FIAf7z2GTpKgdG6RvIa\n0bN8n5G9umq0UyKAXAtcSyaUeXS5uKr0zFmGwWCQEaqucmVh2x8z8gUmAyCJXl1N4/G4cHZkoe9T\n2ytv1kmj0Tp/U+rq/G1st0kdfLkmLwd3/o7AZAbD9srb+g3X8eA7dotNNPo3hBB+BMCfAHhjjPFZ\nAC8E8HHZ58nptjmEEB4G8PAGn++YQolDdXhbvq/Pad+jZECXDTsXplwfttxe9e6qR/VlffSERsGE\nLhXIQW7VxKz+VratAp93u91cQlibtqkd0+rpy64/dXObKN22ll4k+zn2h3WJ/p0AfgFAnN7/EoAf\nX+UAMcY7AO4AQAjBh/81wUZlGsGTRDSi1/J9SjIkeW3KpS1wrXTAfbU3ClFlkk/B6ukWjIIB5AZG\nzmQo57A2oSw482o2m9lxWq0WarUaWq1W4ftUz7ckWubac6DjgK7v2cZvZ9c4OKRk/SlgLaKPMX6V\nj0MIvwrgQ9OnTwG4T3a9d7rNsQNol8ailrqpZlyNRgOdTieXhLNtdpXotRRfI8xD+ke27Q1I8ClN\nm0h1uFRnDJOoqxA9wdkXaxsY0esCJdymeRCe08XFBYDVcySaXN7276fuHke1sJZHLIRwjzz9QQCf\nmz5+FMBrQgjNEMI3AXgAwCc3O0VHEVI6LzX4FMnztVSBDeUa2zdFfdnrVkxWEbpo9iLoLEYblul1\nWrWISgvW9HfhwK0JXa2eBeZzBmXBnMNt5FM8sVs9lLFX/haAVwD4+hDCkwB+DsArQggvxkS6eQLA\nTwJAjPHzIYT3A/gCgCGA17vjZvsgIbCtAROt9jkHAcoDarPT7ocq12jRzc3NzdF5oSkp2F4wRVGx\nbtOOn5QqrI6/DDyezgyAWWuDRqORyWvMoeji4N1uNym7LFsa8TYH5221jHBsD2VcN69NbH7Xgv3f\nAuAtm5yUYzGKIvmiyJ5yDRN9uthEqj0tE63HSvK0KapdsazODSCX1OV12rSACpjJKpRwbJ8aYJY4\ntee7z2S49hFyVBNeGXtgYBuDRZG8Jl5J9K1WK0fuSvL9fj+L8DXZekxQcmw0Guj1eqjVari8vFz5\nWHTk0K65aaWsWigV/P2AWRKW+rddOIZRPyN7Jstvg/z5+Rxsju1v5xjgRH9AKNJwU4lY9cszaWhL\n6TUiTTW0OqZoXmEj81XtoXrtVk08anEVI+CiBdr5W5PstdOogr+3JkK1Kd0uf0d122ijtWP92zlU\nONEfGJTcmcBLRfLczn/CIjIhlLhO5Z+UDpZFxVOrYpltk9AVnoB0TyEmjLUnjerfaq3lwM/WCOxS\nuovfkrMGW6BHJ9ep/P0cEpzoDxjaqCwVyTOKL5MYK5OYPEYwOb2NdXN1kZIi26auNKVRfWpfjZJ1\nkZHU5/b7/axqVxPN28gdKKylV7+ru22qCyf6Awb/qeic4bYYI87OzjAcDrNIjxq9+rOZ8OM/LV8D\nkDk8Uljm8Dgk8DssKlZaBjpo2AohxphbmzflkuHgSyJODcbaD2eR5n5zcwNg4s3nLGHROgKrIkXu\n6v/XRcndbVNNONEfILTrIDAjfD7XQh8Sjy6Pp9GetkwgFhURUS46BpJXrOOc0UibbSRssVORZFbW\nFpl6jz7XlcE4wGzj99GBQls2kNy5jb3xve1BteFEf0DgPy/b1QLzqxPRwcFIi5EX92HUyX/kVquV\nRWPUV3nMq6ur3OcCs396RqvaCkFJxy5uUXUwCi5DktbxQpLTWQEH3TL+9mWfswicdfG3WAWWzHWw\n59+YynlK7pwxagGdJ2GrCyf6A4NG68sieXVhqCSjC0TwvTow8B8+tRgJC4S0oZYSH8mjXq8fXITX\n7/ezRHYZwtKVnfT6UtrQKH/XsB03y0DJXYmdzxm5s2hLO3aqJs9Zo5N8deFEf4DQhB4wI3yN9Nkr\nnWDb4cvLyywi02IcTarxH54Rui2E4cIVKSmBZH+oDp5VolKuPqUSju1Tv0gG2zbKnrcOxhq1a+tp\n7a3PvysW3tm1Zj0JW3040R8g9B9a+6STeGi54+uW/Gm9Y0SvTcy0FXGn08mRlMo0VmPWQp1DI3dF\n2XPX2RNthiyc0vqEKpGgbZGs9Rcke5Vv1HrKQYAWSvZHYrHdIf/mpwAn+gOGRva6xJ2VdvjYkpEt\nlLIrG6XWD7X/0IxaU4PAsSNVOKV+dyX8fcL63huNRs41w9dtTYGdITJ/45LN4cGJ/oChVj16qdXT\nDOSrJmmnBJDsd0OSZ9RWRsbYdI3RQ0MqYcvIvdvtol6v76WVRMpOqZZIlV+s3s4kcoroAcytUcAE\nLFemclQfTvRHAGrjRXZLG8kXtdW1UahHavPQ1Z6szVXXl72tdgBWOlOQ3NX7biN5DgaqxRO1Wg3t\ndjvZ+M5bHRwWnOiPBLYQh7orp9ectlPCScGW71vy8H/qCex10GvNm1oPtwmVWfi5SuYK/o5qidRI\nvtls5iJ8Oq5CCOh0OpltktG7jez97+Fw4ER/ZEgVS5F8bBMuWzil5fvAfFm+/2Mvxy4dNmpfJcHb\nKlWF9cBzf6vRaxEUFw+nBp/K46RyN45qw4n+CGGThOr35q3ZbGZWSo34dZBQCSLVRtcxD9Wxt6HP\n2w6kFxcXc4OyknmK7HW1MUb4NrJnzcT19XVuQRqVa9Rp4zgsONEfMUjW1JP1xsZXuv6pOkf0n12T\nu2Vgk5XrluSrdHQo5LKO26ao0MnaIbXYTe2QStpWvlHpRo9BjZ4RvS46k1qQhn8zjsOEE/0Rwpbo\nn52dzdkpr6+vs+eM6ohOp5NbUnARyVrNGEBuAYzU87JgRSZnHMRtLqqxKlaN5tlq2G4DMBd5k5SV\n7DVK14InIpWE1UXNQwjZgK7rAtsFabwF8WHDif5EoGuUMmIvitLK9lQH5pOCfJ/NBawTDZLotTqX\nM5HbWFRjHaziWlJZhu8FUGiHtESvfYe0J71CfxcSux5XZ2zaEtk+d13+sOFEf8RQbV6jeQBZE6pN\n/nl1dSESkfbBJzYlel2qT7tMHnLeQGUZetc5mOn1tMlSq6un9rVQYlf3DYDMbqs5Gc6YbtMm6tgt\nnOiPFLYnuV0vtdvtJoleozurMzOC5opWurqQRpbbBPvm8zxIQtpTvyoEpIVSy8BBkmv9audPJW4l\n9/Pz87kukjqbIpmrfVZdVfqe8XicJV4pyQwGA/R6vTkZxyWbw4cT/RGDnnnb8kB72iiR64pUtr84\nCcZG8bpWra2s3AYoN3FGUKvVsgVVGOlXRcIpuxyjOpxIvCRfIL9WrNohU1WtJHhdbUydN3bdAb6W\nst5qDYA+rkqvHsf6cKI/YqR6sXS73Sy5Zv+JSfSDwSCnM9O/rZZMuxg5bX+7JnoW8vC8GXlWgezL\nLMfI2RBJVyN1zoYs0Rdp9KrVW0eNnpMebzQa4ebmJku0WgslB3iN8l2bP3w40R8xqJfr2qQarVuZ\ngURf5GrRToa6BKElnm3CSje9Xm+uDTBQDb2+TETP66oRPQcyum/UI6/krdo8720xVBHR6zlqFK/R\nO4neW2EcH5zojxgkYiVj/mOT6K+vr7P9U1EoI3nVgJVcWq1Wtr1er2cEtC1QoiHhkAybzWYmSbEV\n8z5IiTJM2e+8bD1X1ex1QLWFTyql6YCXsllS7rq5uZkrgLI2WtXlqzB4OraDpUQfQrgPwHsA3A0g\nArgTY3xHCOEFAN4H4H4ATwB4dYzx2TD5i38HgFcBaAP40RjjZ3Zz+g4L61/XG3vWF6GoYEc1Ym2G\nZTV63mx15iYgwfV6vRzxab/91OIotwkbRa/yHm0/YXvT2EQrI3nbvkBXgFIXDtHv9+eKoayjxkb1\n3oL4uFAmoh8CeGOM8TMhhOcB+HQI4aMAfhTAx2KMbw0hPALgEQA/C+D7ATwwvb0MwDun944dg1q6\n9kMhQdBGpwQdY8TV1VWyKEojed7TYcPl9tQeSALivtsiXvZe4WN+B11VyzZiu21y0tW4ltlIdZBM\nyV/c1mg0sshepaqiPjW85iT8Wq2Wi9A1YlfZjs+1x7xG+Y7jwFKijzE+DeDp6ePnQgiPAXghgAcB\nvGK6268D+ANMiP5BAO+Jk7/4j4cQnh9CuGd6HMeOMRwOc+u1MvJlwZQSDPdPdaokwSgJpYp1mES0\n+2yT6GOMmZatbhsleJLePuUGbR2QAgdhXbbPNpYrslVai6SVbmxE3+v1clG8bU5mI3x93ZOvx4eV\nNPoQwv0AvgPAJwDcLeT9FUykHWAyCHxZ3vbkdJsT/Y5hWxUDs2pLFkhdXl5mC02QFLnGq4IygY3k\n9blu474acW4Dg8Ega68MIHOOsGKWWj2lin1668v0uNFZlw6MGpmrPMV9Sf42kudrJHjmX1I+eI3o\nU+0OUjMAx3GgNNGHEK4A/DaAn4kx/qNGbDHGGEJYqfQxhPAwgIdXeY+jHLQ4il563rOBGaUbEkaK\n6FORupKSJmZTBKTHWrcylsSpWnZKqwdmLqNDgHUr8TtpdM8bE9zWR5/y1bfb7blKV6vNL2t34L75\n40Mpog8h1DEh+d+IMX5guvmrlGRCCPcAeGa6/SkA98nb751uyyHGeAfAnenxvS3eDqArT5EYqCOT\nIFN96oE80ReRvGrMup0R/qZgktVq2iR+fjaAwlWzqgBt/KY5Bb22+v1SSVrOXPh6KlGuersle2un\nXNTuwJOwx4cyrpsA4F0AHosxvk1eehTA6wC8dXr/Qdn+hhDCezFJwn7N9fn9gWRvyfzi4iLT84ui\nbRKOlWyYjE3JOpqUVYvfqudstWJtp6xSzqHAJrZVomm1WllCm4OmtkjgINpsNtFqtXLEz0GERVC8\nsW+8Fj/Z9gbe7uB0UCaifzmAHwbw5yGEz063vRkTgn9/COEhAF8C8Orpax/GxFr5OCb2yh/b6hk7\nVgIjP+v11qRfkaasZKRask3AprTmdWyWSkwp2UF931VMGC6Tp9SOqlF8So7SxHkqQasNyrRnfCrR\nyntbIGXbHbhsc7wo47r5IwBFwuf3JvaPAF6/4Xk5tgQb0fOfu9frodls4urqaiHRs/pSE4OMQDWi\n18ie0oJtsFV0fmzLoPckKo0+dR+2cKgS4RclMfU3AGaEzwie14rP7b1G92q9DCFkRVAcAFPJV14z\nXsNUgRSfeyXsccIrY48cjACV5JmUpU2xyCFjnR7WKZK62X0XRfXtdjvXOMt6ukletopT9yM5Afk1\ncKvQ+4bQ6tnUwLcostdErY3wbSGULXpadHNt/rTgRH/k0H9cdn0MIeTkkLOzMzzvec+be69WaoYQ\nsmh9UUTPe2rRdhAZj8dZFMoI00brXPGIMw8dCHjTyF6J6jb7s6hNctk+akm1Gj1/E5W/UpE+t3EQ\n00jcRuj2mvV6vbnr7dr86cCJ/kSQ6mRpkSqasr3Oizoo2qiebhA9Zkp71wEnFdmrLGG1ZBv9L/pu\nu0SZpLPV51MFU7ZlBSN42/dGo/kibd5G+KyM1mvnrYhPB070jgy0XbKgipGmVsKqs2ZRRK+R/bIC\nHnWKMFonkTFyJ9EzYasRv22nW+XIVAdDTWzbNghK7uq+aTabc+0LUlq76vF2EOU+qWvrOE440Z8I\nUi2LCUZ9WppPucYSjvXQFzlveG+XqlOnh40+U+4avdlo9NDb6WrfIVsoVdTwzC4PuUiDt9tsi2Kd\nERzatXOsBif6E4H952ZrAbZB0MZh7H9zdXWVJXO1ClMjetsmwd6zUjMlxSyL8Pv9fuauUd0+xjgX\nsQKTRT2qRlrU2VWqUQIH5mUd21qaCW26kIokLtXduS2lzdvBskrXy7EbONGfEBjlsTGYPtfl5DRS\n5oIYWpVpq2KLInoAc21wi9riFpXop0hNo1qVHRb1ed8n1E1j2xtQHiPpW40eQC4/oW0hUteyqK2B\na/OnDSf6EwGJUP/xSfBKFOyBozc6QizBa2I2dd/v9+eOpSSlCcNFJfpK9IPBIFvukO/TiHTdatxN\nkfLzW1ulSmGpBKx9rlG3ldP0OmnC1V7LlCymyesq1SE4dgcn+hMCyZ4+eiUFdavYyJ5oNpu5bora\nf573tF6mosyUKyRF5qnOipQdOp1O4XKH+4zqtQMksNxWmXIusbmcVghrh0pel3q9Pid/8bmdFaU6\nUzKB7R0qTwdO9I6VoX3SiyL6Xq+XDRJ6n5KGNNLXxykJYpm7pkrEpdJYSr5RvR6YnLvuC8yatbGZ\nG7cvk7/sbMmLo04bTvQnBCYGaZe0rg7CyggKbqM1UG2ULKYCkBG9vQGYI3clHxuNagXsMZTok/hZ\noXx2doZ2u52L8pkMZ/J7OBxmEf94PM7cTCpnWQulrx7lUDjRnwjsEnbLEoOpG2Fb7VrNvmzTsbLF\nTTowHBJBqSdek59MhpPwAWT32g+HK2tpApZJbkb23L8owreFU67Jnyac6E8E1Iy1NQG3WV+8XQTD\nVmnam2rzSlTLUHaREB1YqMEfCuF3Op3MuspoHECms/d6veyak/ytjs8oXN97cXFR6LzRGdGiBmaO\n04ET/QlA3R+2Ta4t2lEyV704JfXYaL9Wq2WaMjBP5Cli18GESPnNSYBMxFapaVkR+v1+NuCppbFW\nq2V1DIzc2WBOHTMAsqieoJyTKhjTGZLaZa0s5jg9ONGfCLSMvl6vZwuFaBm+Ns/Sm430UzcAme6c\nshTq46KbRrMkdXUJURYajUa4ubmpdHSv7hvVxCndXF9f5zptMnLXegVG+FzykYMzk7Lav0Ylm5Qk\ndqjyl2M7cKI/MdhWuErkKWdIkVyj5K2+dv0cK/3YGYDOBGz+gJEoCZ+R7KGV7adW+Or1egDy35uw\nET6QH0BVmlHtflGdwj4avTmqBSf6Ewe1YBvNq36fivCBiTzQbreTPe2LInYOMCRxew8gq8Rl9Mrt\nvV6vsj3ny+Lm5iYrQuM1GY9na+BqhA8gJ+mwoA1Aptlbiaao0ti2i3CcFpzojxxFSwkWQaWDVJvc\nEELm92bEWLS4yDKpRo+rco22ZCCx6bG47yGRPGc9fMzZE6UUJXxtfQAg13DOJl/LfrZG+47TgxP9\nkaNocfAyIMkDyBKJmthTnTh17GW6PAcLJW+bfGS0r+9hz5dDiuqZfAVmRK+DGKNxzXloAlcHVhvd\nOxzL4H8pJw5N6JFMSL7sGAkgt49G/Jo8TUFnE7Rf8p7JVGrQOnjQscLeNtyXUe7FxQXq9Tqur69v\n4SptBtoZNXnMJC0lGHVEAcgROeUXXiPvU+NYFU70JwZL7KmWBCR366YBkJEMBwPtN59CyouvchDJ\nTaUNvmaLiziwsMnXvmFtocuQ6suj2zgrWqXhnMNRBk70JwJKByRtJVHedKFtjSaBSSKUJG2rbJlE\ntEh1bQSQs0zSSaMyDgkfmFWMat98atbL5BueZ7fb3eKVnGFTRwvPmfdXV1drN5xzOBbBif4EUEan\npz1SNXKSLCNXujy4jW4bEnYKRcVPGqGrNq+aPQcja+csm5TV87ttHb+oi2bRgATkB8YyoL3SI3zH\nMjjRnyC0mViv18t1WaRNUrsmchbAVglK1NxWRE5FRM/j8fgAss9Wd0q/38/lAeygsAjcn771bYP2\n06LXlOxVEgPyLZW1LUWqdbFtScFrz2Pa1sS2iMrhcKI/IdhSe/qrGZEzImRykKs5qSZOrZgDA7ct\nInpg5uDRlgpqM+SN52fdOXbGYGWhVJWs5hZ2hUXH10IvzoS0hYPOaOzCLprwLio6sy4c9dNbfd8T\nt6cNJ/oTAQmQ0XKn08nkES1YIjFRC9foXcnJeuAttOqVxGR752jEymiWvnw9PiUcEqBWzKobx5LZ\neDzOFtnYJVJaPZ02jNYvLy9zLRx0kCPBt1ot1Gq1XHsKLVKz7Sg0gtd2xHYlKbsAieP0sJToQwj3\nAXgPgLsBRAB3YozvCCH8PICfAPC3013fHGP88PQ9bwLwEIARgJ+KMX5kB+fuWBEq0WhSk4VJjDw1\nytde6ACyfUjE7MKYAqNQAFm0rvKD+vFttJrS5HWQUE86I1lLYhpJ7xKpZmE8F61D4KDE787vpi2e\ndUCzUb69FlafL1psxNsgOMpE9EMAb4wxfiaE8DwAnw4hfHT62ttjjL+oO4cQXgTgNQC+FcA3Avjd\nEMK3xBhdLNwjqAkzwuXydGwWptG9DgbWBkliUt93EaGq/GLdNyQ23WZzACl9WmEHBiC/+hW/d5Vw\n11135a6VzpS0uRwje9uegnmBZV0rVcbRdsWO08RSoo8xPg3g6enj50IIjwF44YK3PAjgvTHGHoC/\nCSE8DuClAP54C+fr2AA2ylSphklPIO/AIWnyOe1/lE6svq/QCBRARtgcTFS+4HPuY288XirC1947\nHIjOz89zNs1dgueQSvraOoUQAprNZva6Dpgk/KIIX1tFOByrYKW/mBDC/QC+A8AnALwcwBtCCD8C\n4E8wifqfxWQQ+Li87UkkBoYQwsMAHl7rrB1rQ6sygVmZPStQSdjaekDbHJC0mVRkRL+M6Jk8VP+8\nSkBa9p8i91Si1pIeiZSzldskep5bkadfV5gKIWRLLlpi14FqkQvH3TSOVVCa6EMIVwB+G8DPxBj/\nMYTwTgC/gIlu/wsAfgnAj5c9XozxDoA702O7gHiL6Pf7aLVaiDGi2+3mkoappCuJ01oqtQiqCKrv\nqyZNeyUjWv0cFlTZKHY0GmV2xlarlWndw+EwyxVwQLmtlZQ4+JGU7efxOQcgfmcAOUlGE9KUcFTK\nIfFTzul0OrlcBpHKbWji2uWb00Qpog8h1DEh+d+IMX4AAGKMX5XXfxXAh6ZPnwJwn7z93uk2R4Wg\nyUtGzBqxU7/nc3XJALP2uYyoiyJ621LBSj98zuPqvZ0R6KpY/FxGyDro9Pv9W2nHy89elvBdVrDW\nbDYzSUfrCzjwaWSvA5q9qR3TWlN3bTN1VBtlXDcBwLsAPBZjfJtsv2eq3wPADwL43PTxowB+M4Tw\nNkySsQ8A+ORWz9qxMbT8niREMqWEwGia7RNUQiCRNBqNQlcH30PQOcIZg2r2eqMkxHMBkEtQKskD\ns0Gr3W5nkfxtJB+31YqAJH9xcZEj+Gazmd3zO7fb7Vz9g+YpmONQkvf+8w6gXET/cgA/DODPQwif\nnW57M4DXhhBejIl08wSAnwSAGOPnQwjvB/AFTBw7r3fHTbWhSVqSFgmY0Sqjc03KqnWwyHVjHSE6\nQ9BCKO5n2xRzZkEi0wWyeRy2T65qZ0dds3dRZK19gzhToBQFzCqFtS8Ri9o8YncsQhnXzR8BSP0V\nfXjBe94C4C0bnJfjlqHRL/3eJCjqyqrdM5Ik4RQRvbYIILHTdWI1f0aw9PWrns1iIn4eyXyfhUFW\nD9frpINnvV7PonWbWOV9CCGbiTAi53UZDAbZQNftdjEajdDtdrOOn+6TdyyD+7Qcc1CS1FYFQF67\nJ7mnSuxJgLpd+63zubpteExN3hKM1KtUGKTSjbpm9DunpCltb8DrxGgdmA2AnMnwdQDJ6ldeE4ej\nCE70jkLYKJ+RpSZG2R3SasGaHORzEiHJ3pIfXTUcXNSXbgumqlgYdHl5mZOQFIuklXa7nduP15gz\nHCX8TqeTzVg4e9GEty0qczgAJ3pHSehCGRrlk2ytZ10Ji8+BWVsC1fwJkhU1d4X2yUlBNfDbthFq\nl0iNtBWUXuxqWva61bs5SQ0AAAY6SURBVGqz/vwa1QPI5B3aR3nTz09VEDscTvSO0rDkyZYKtdr8\n4h58jdDqWyVkJlPVXdPpdABgTvbR91h9XN0nu2pLbMHrwXMi8Q6Hw7lzaDabOR896xcsKas2r8sJ\n8rtRx6f7ptvtJolej6vXyb30pwknesfaUKIreo2g9KOuHgC5XjYkLL63yLFDucj2e0k1F9slSPAk\nUn5+Sj5SHz31+FTBmdYZ8PvaFbc4mNgbMOsv1Gq1cqTPHMJtVQs7qgUnesfGKBMh2jVSU/12qLMz\nkrcR/aIqXEuitwFbCKUDT9E5MvKmPGMHJo3E+ZomsJl0Jrkr6bPgTZvB8TmdTO7OOU040TtuDZaA\nNRI/OzvL5B/15xNM2LZarVwUS6KjK6eq4Hdvt9u5ZLaCEbhKWSR5TXpre4dOp5NJRSRzrTnQhm/e\nwfJ04UTvqASYYLSN02zSV6tiSfLquqlyJaiSbKrIiXkGzlyUpNVCam2l6r5RG2aqTYInak8TTvSO\nvYGkc3Nzk+uCSYLTVsZsA6CumsFggG63m5F+apWp2wBbRCwiU205AUwKwOy5aoEZv6smZKnfq0RE\nouc6A5zxaLtnjfCrPOtx7A5O9I69gT1qlBhtVKo960l0NprVyP62pQnV6VU2WQT2BypKWOtARx99\nUfWxhe1/D2AuqnfnzenBid6xN6hrh0nUfr+Pq6ur7HUSqSZsi4qk9k1em3aJ5PkzmleC36R/j43o\nb8t+6qgOQhWmciGEvwVwA+Dv9n0uFcLXw6+HhV+TPPx6zOPUrsk/jzF+w7KdKkH0ABBC+JMY40v2\nfR5VgV+Pefg1ycOvxzz8mqThKXiHw+E4cjjROxwOx5GjSkR/Z98nUDH49ZiHX5M8/HrMw69JApXR\n6B0Oh8OxG1Qponc4HA7HDuBE73A4HEeOvRN9COGVIYQvhhAeDyE8su/zuS2EEN4dQngmhPA52faC\nEMJHQwh/Ob3/p9PtIYTwP6bX6M9CCP96f2e+G4QQ7gsh/H4I4QshhM+HEH56uv2Ur0krhPDJEML/\nnV6T/zLd/k0hhE9Mv/v7QgiN6fbm9Pnj09fv3+f57wohhLMQwp+GED40fX7S16MM9kr0IYQzAP8T\nwPcDeBGA14YQXrTPc7pF/BqAV5ptjwD4WIzxAQAfmz4HJtfngentYQDvvKVzvE0MAbwxxvgiAN8J\n4PXTv4VTviY9AN8TY/x2AC8G8MoQwncC+G8A3h5j/BcAngXw0HT/hwA8O93+9ul+x4ifBvCYPD/1\n67Ectpf2bd4AfBeAj8jzNwF40z7P6Za///0APifPvwjgnunjewB8cfr4VwC8NrXfsd4AfBDA9/k1\nyb7fJYDPAHgZJpWf59Pt2f8QgI8A+K7p4/PpfmHf577l63AvJgP+9wD4EIBwytej7G3f0s0LAXxZ\nnj853XaquDvG+PT08VcA3D19fFLXaTrF/g4An8CJX5OpTPFZAM8A+CiAvwLwDzFG9mPW751dk+nr\nXwPwdbd7xjvHfwfwHwGw8c/X4bSvRynsm+gdBYiTMOTkvK8hhCsAvw3gZ2KM/6ivneI1iTGOYowv\nxiSSfSmAf7nnU9obQgg/AOCZGOOn930uh4Z9E/1TAO6T5/dOt50qvhpCuAcApvfPTLefxHUKIdQx\nIfnfiDF+YLr5pK8JEWP8BwC/j4k08fwQAjvP6vfOrsn09X8C4O9v+VR3iZcD+PchhCcAvBcT+eYd\nON3rURr7JvpPAXhgmjVvAHgNgEf3fE77xKMAXjd9/DpMdGpu/5Gp0+Q7AXxN5IyjQJj0930XgMdi\njG+Tl075mnxDCOH508cXmOQsHsOE8H9oupu9JrxWPwTg96azoKNAjPFNMcZ7Y4z3Y8IVvxdj/A84\n0euxEvadJADwKgB/gYn2+J/2fT63+L1/C8DTAAaY6IoPYaIffgzAXwL4XQAvmO4bMHEn/RWAPwfw\nkn2f/w6ux3djIsv8GYDPTm+vOvFr8q8A/On0mnwOwH+ebv9mAJ8E8DiA/wWgOd3emj5/fPr6N+/7\nO+zw2rwCwIf8epS7eQsEh8PhOHLsW7pxOBwOx47hRO9wOBxHDid6h8PhOHI40TscDseRw4ne4XA4\njhxO9A6Hw3HkcKJ3OByOI8f/B31rY5QP9HskAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [] - } - } - ] - } - ] -} diff --git a/examples/layers_normalizations.ipynb b/examples/layers_normalizations.ipynb index 7eb91dccb5..253b4494bb 100644 --- a/examples/layers_normalizations.ipynb +++ b/examples/layers_normalizations.ipynb @@ -137,8 +137,8 @@ "outputId": "6e55e2de-663b-4ce4-fbe7-4e004594516e" }, "source": [ - "!pip install -q tensorflow==2.0.0rc0 \n", - "!pip install -q tensorflow-addons~=0.5\n", + "!pip install tensorflow==2.0.0-beta1 \n", + "!pip install tensorflow-addons\n", "from __future__ import absolute_import, division, print_function\n", "import tensorflow as tf\n", "import tensorflow_addons as tfa" diff --git a/examples/layers_weightnormalization.ipynb b/examples/layers_weightnormalization.ipynb index ce572e883d..904fbee4d0 100644 --- a/examples/layers_weightnormalization.ipynb +++ b/examples/layers_weightnormalization.ipynb @@ -119,8 +119,8 @@ "colab": {} }, "source": [ - "!pip install tensorflow-gpu==2.0.0rc0\n", - "!pip install tensorflow-addons~=0.5\n", + "!pip install tensorflow-gpu==2.0.0-beta1\n", + "!pip install tensorflow-addons\n", "from __future__ import absolute_import, division, print_function, unicode_literals\n", "\n", "import tensorflow as tf\n", diff --git a/examples/losses_triplet.ipynb b/examples/losses_triplet.ipynb index 82f27ba18b..79a02d2500 100644 --- a/examples/losses_triplet.ipynb +++ b/examples/losses_triplet.ipynb @@ -124,8 +124,8 @@ "colab": {} }, "source": [ - "!pip install -q tensorflow-gpu==2.0.0rc0\n", - "!pip install -q tensorflow-addons~=0.5\n", + "!pip install tensorflow-gpu==2.0.0-beta1\n", + "!pip install tfa-nightly\n", "from __future__ import absolute_import, division, print_function, unicode_literals\n", "\n", "import io\n", @@ -378,4 +378,4 @@ "outputs": [] } ] -} +} \ No newline at end of file diff --git a/examples/optimizers_lazyadam.ipynb b/examples/optimizers_lazyadam.ipynb index d77e652ab9..037e811b63 100644 --- a/examples/optimizers_lazyadam.ipynb +++ b/examples/optimizers_lazyadam.ipynb @@ -120,8 +120,8 @@ "colab": {} }, "source": [ - "!pip install tensorflow-gpu==2.0.0rc0\n", - "!pip install tensorflow-addons~=0.5\n", + "!pip install tensorflow-gpu==2.0.0-beta1\n", + "!pip install tensorflow-addons\n", "from __future__ import absolute_import, division, print_function, unicode_literals\n", "\n", "import tensorflow as tf\n", diff --git a/examples/template.ipynb b/examples/template.ipynb index 8e22d9d2c8..4030320fed 100644 --- a/examples/template.ipynb +++ b/examples/template.ipynb @@ -129,8 +129,8 @@ "colab": {} }, "source": [ - "!pip install tensorflow==2.0.0rc0\n", - "!pip install tensorflow-addons~=0.5" + "!pip install tensorflow==2.0.0.a0\n", + "!pip install tensorflow-addons" ], "execution_count": 0, "outputs": [] @@ -319,4 +319,4 @@ ] } ] -} +} \ No newline at end of file diff --git a/makefile b/makefile index b58f61c7d3..b4284e1025 100644 --- a/makefile +++ b/makefile @@ -16,6 +16,7 @@ all: code-format sanity-check unit-test +# TODO: install those dependencies in docker image (dockerfile). install-ci-dependency: bash tools/ci_build/install/install_ci_dependency.sh --quiet diff --git a/setup.py b/setup.py index 5d6d3cba40..ff056c7b5a 100644 --- a/setup.py +++ b/setup.py @@ -29,28 +29,15 @@ from __future__ import print_function import os -import platform import sys from datetime import datetime from setuptools import find_packages from setuptools import setup from setuptools.dist import Distribution -from setuptools import Extension DOCLINES = __doc__.split('\n') -TFA_NIGHTLY = 'tfa-nightly' -TFA_RELEASE = 'tensorflow-addons' - -if '--nightly' in sys.argv: - project_name = TFA_NIGHTLY - nightly_idx = sys.argv.index('--nightly') - sys.argv.pop(nightly_idx) -else: - project_name = TFA_RELEASE - -# Version version = {} base_dir = os.path.dirname(os.path.abspath(__file__)) with open(os.path.join(base_dir, "tensorflow_addons", "version.py")) as fp: @@ -58,26 +45,17 @@ exec(fp.read(), version) # yapf: enable -if project_name == TFA_NIGHTLY: - version['__version__'] += datetime.strftime(datetime.today(), "%Y%m%d") - -# Dependencies REQUIRED_PACKAGES = [ 'six >= 1.10.0', ] -if project_name == TFA_RELEASE: - # TODO: remove if-else condition when tf supports package consolidation. - if platform.system() == 'Linux': - REQUIRED_PACKAGES.append('tensorflow-gpu == 2.0.0-rc0') - else: - REQUIRED_PACKAGES.append('tensorflow == 2.0.0-rc0') -elif project_name == TFA_NIGHTLY: - # TODO: remove if-else condition when tf-nightly supports package consolidation. - if platform.system() == 'Linux': - REQUIRED_PACKAGES.append('tf-nightly-gpu-2.0-preview') - else: - REQUIRED_PACKAGES.append('tf-nightly-2.0-preview') +if '--nightly' in sys.argv: + project_name = 'tfa-nightly' + nightly_idx = sys.argv.index('--nightly') + sys.argv.pop(nightly_idx) + version['__version__'] += datetime.strftime(datetime.today(), "%Y%m%d") +else: + project_name = 'tensorflow-addons' class BinaryDistribution(Distribution): @@ -95,7 +73,6 @@ def has_ext_modules(self): author='Google Inc.', author_email='opensource@google.com', packages=find_packages(), - ext_modules=[Extension('_foo', ['stub.cc'])], install_requires=REQUIRED_PACKAGES, include_package_data=True, zip_safe=False, @@ -107,9 +84,9 @@ def has_ext_modules(self): 'Intended Audience :: Science/Research', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', 'Topic :: Scientific/Engineering :: Mathematics', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Software Development :: Libraries', diff --git a/tensorflow_addons/__init__.py b/tensorflow_addons/__init__.py index 76b48a8940..2a9f4df814 100644 --- a/tensorflow_addons/__init__.py +++ b/tensorflow_addons/__init__.py @@ -17,6 +17,59 @@ from __future__ import division from __future__ import print_function +# We need to put some imports inside a function call below, and the function +# call needs to come before the *actual* imports that populate the +# tensorflow_probability namespace. Hence, we disable this lint check throughout +# the file. +# + + +# Ensure TensorFlow is importable and its version is sufficiently recent. This +# needs to happen before anything else, since the imports below will try to +# import tensorflow, too. +def _ensure_tf_install(): + """Attempt to import tensorflow, and ensure its version is sufficient. + + Raises: + ImportError: if either tensorflow is not importable or its version is + inadequate. + """ + try: + import tensorflow as tf + except ImportError: + # Print more informative error message, then reraise. + print("\n\nFailed to import TensorFlow. Please note that TensorFlow is" + " not installed by default when you install TensorFlow Addons." + " This is so that users can decide whether to install the" + " GPU-enabled TensorFlow package. To use TensorFlow Addons," + " please install the most recent version of TensorFlow, by" + " following instructions at https://tensorflow.org/install.\n\n") + raise + + import distutils.version + + # + # Update this whenever we need to depend on a newer TensorFlow release. + # + required_tensorflow_version = "2" + + if (distutils.version.LooseVersion(tf.__version__) < + distutils.version.LooseVersion(required_tensorflow_version)): + raise ImportError( + "This version of TensorFlow Addons requires TensorFlow " + "version >= {required}; Detected an installation of version " + "{present}. Please upgrade TensorFlow to proceed.".format( + required=required_tensorflow_version, present=tf.__version__)) + + +_ensure_tf_install() + +# Cleanup symbols to avoid polluting namespace. +del _ensure_tf_install +del absolute_import +del division +del print_function + # Local project imports from tensorflow_addons import activations from tensorflow_addons import callbacks @@ -30,8 +83,3 @@ from tensorflow_addons import text from tensorflow_addons.version import __version__ - -# Cleanup symbols to avoid polluting namespace. -del absolute_import -del division -del print_function diff --git a/tensorflow_addons/activations/BUILD b/tensorflow_addons/activations/BUILD index 34e87c6298..d454860322 100644 --- a/tensorflow_addons/activations/BUILD +++ b/tensorflow_addons/activations/BUILD @@ -6,14 +6,12 @@ py_library( name = "activations", srcs = [ "__init__.py", - "gelu.py", "sparsemax.py", ], - data = [ - "//tensorflow_addons/custom_ops/activations:_activation_ops.so", + srcs_version = "PY2AND3", + deps = [ "//tensorflow_addons/utils", ], - srcs_version = "PY2AND3", ) py_test( @@ -28,16 +26,3 @@ py_test( ":activations", ], ) - -py_test( - name = "gelu_test", - size = "large", - srcs = [ - "gelu_test.py", - ], - main = "gelu_test.py", - srcs_version = "PY2AND3", - deps = [ - ":activations", - ], -) diff --git a/tensorflow_addons/activations/README.md b/tensorflow_addons/activations/README.md index 500eee194b..4ab59b23bb 100644 --- a/tensorflow_addons/activations/README.md +++ b/tensorflow_addons/activations/README.md @@ -1,16 +1,14 @@ # Addons - Activations ## Maintainers -| Submodule | Maintainers | Contact Info | -|:----------|:--------------------------|:-----------------------------------------| -| gelu | @AakashKumarNain @WindQAQ | aakashnain@outlook.com windqaq@gmail.com | -| sparsemax | @AndreasMadsen | amwwebdk+github@gmail.com | +| Submodule | Maintainers | Contact Info | +|:---------- |:------------- |:--------------| +| sparsemax | @AndreasMadsen | amwwebdk+github@gmail.com | ## Contents -| Submodule | Activation | Reference | -|:----------|:-----------|:---------------------------------| -| gelu | gelu | https://arxiv.org/abs/1606.08415 | -| sparsemax | Sparsemax | https://arxiv.org/abs/1602.02068 | +| Submodule | Activation | Reference | +|:----------------------- |:-------------------|:---------------| +| sparsemax | Sparsemax | https://arxiv.org/abs/1602.02068 | ## Contribution Guidelines diff --git a/tensorflow_addons/activations/__init__.py b/tensorflow_addons/activations/__init__.py index 45903a3975..5792d00356 100644 --- a/tensorflow_addons/activations/__init__.py +++ b/tensorflow_addons/activations/__init__.py @@ -18,5 +18,4 @@ from __future__ import division from __future__ import print_function -from tensorflow_addons.activations.gelu import gelu from tensorflow_addons.activations.sparsemax import sparsemax diff --git a/tensorflow_addons/activations/gelu.py b/tensorflow_addons/activations/gelu.py deleted file mode 100644 index 539afbbe1c..0000000000 --- a/tensorflow_addons/activations/gelu.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright 2019 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import tensorflow as tf -from tensorflow_addons.utils import keras_utils -from tensorflow_addons.utils.resource_loader import get_path_to_datafile - -_activation_ops_so = tf.load_op_library( - get_path_to_datafile("custom_ops/activations/_activation_ops.so")) - - -@keras_utils.register_keras_custom_object -@tf.function -def gelu(x, approximate=True): - """Gaussian Error Linear Unit. - - Computes gaussian error linear: - `0.5 * x * (1 + tanh(sqrt(2 / pi) * (x + 0.044715 * x^3)))` or - `x * P(X <= x) = 0.5 * x * (1 + erf(x / sqrt(2)))`, where P(X) ~ N(0, 1), - depending on whether approximation is enabled. - - See [Gaussian Error Linear Units (GELUs)](https://arxiv.org/abs/1606.08415) - and [BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding](https://arxiv.org/abs/1810.04805). - - Args: - x: A `Tensor`. Must be one of the following types: - `float16`, `float32`, `float64`. - approximate: bool, whether to enable approximation. - Returns: - A `Tensor`. Has the same type as `x`. - """ - x = tf.convert_to_tensor(x) - return _activation_ops_so.gelu(x, approximate) - - -@tf.RegisterGradient("Gelu") -def _gelu_grad(op, grad): - return _activation_ops_so.gelu_grad(grad, op.inputs[0], - op.get_attr("approximate")) diff --git a/tensorflow_addons/activations/gelu_test.py b/tensorflow_addons/activations/gelu_test.py deleted file mode 100644 index f510715593..0000000000 --- a/tensorflow_addons/activations/gelu_test.py +++ /dev/null @@ -1,106 +0,0 @@ -# Copyright 2019 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from absl.testing import parameterized - -import math - -import numpy as np -import tensorflow as tf -from tensorflow_addons.activations import gelu -from tensorflow_addons.utils import test_utils - - -def _ref_gelu(x, approximate=True): - x = tf.convert_to_tensor(x) - if approximate: - pi = tf.cast(math.pi, x.dtype) - coeff = tf.cast(0.044715, x.dtype) - return 0.5 * x * ( - 1.0 + tf.tanh(tf.sqrt(2.0 / pi) * (x + coeff * tf.pow(x, 3)))) - else: - return 0.5 * x * ( - 1.0 + tf.math.erf(x / tf.cast(tf.sqrt(2.0), x.dtype))) - - -@test_utils.run_all_in_graph_and_eager_modes -class GeluTest(tf.test.TestCase, parameterized.TestCase): - @parameterized.named_parameters(("float16", np.float16), - ("float32", np.float32), - ("float64", np.float64)) - def test_gelu(self, dtype): - x = np.random.rand(2, 3, 4).astype(dtype) - self.assertAllCloseAccordingToType(gelu(x), _ref_gelu(x)) - self.assertAllCloseAccordingToType(gelu(x, False), _ref_gelu(x, False)) - - @parameterized.named_parameters(("float16", np.float16), - ("float32", np.float32), - ("float64", np.float64)) - def test_gradients(self, dtype): - x = tf.constant([1.0, 2.0, 3.0], dtype=dtype) - - for approximate in [True, False]: - with self.subTest(approximate=approximate): - with tf.GradientTape(persistent=True) as tape: - tape.watch(x) - y_ref = _ref_gelu(x, approximate) - y = gelu(x, approximate) - grad_ref = tape.gradient(y_ref, x) - grad = tape.gradient(y, x) - self.assertAllCloseAccordingToType(grad, grad_ref) - - @parameterized.named_parameters(("float32", np.float32), - ("float64", np.float64)) - def test_theoretical_gradients(self, dtype): - # Only test theoretical gradients for float32 and float64 - # because of the instability of float16 while computing jacobian - x = tf.constant([1.0, 2.0, 3.0], dtype=dtype) - - for approximate in [True, False]: - with self.subTest(approximate=approximate): - theoretical, numerical = tf.test.compute_gradient( - lambda x: gelu(x, approximate=approximate), [x]) - self.assertAllCloseAccordingToType( - theoretical, numerical, atol=1e-4) - - def test_unknown_shape(self): - fn = gelu.get_concrete_function( - tf.TensorSpec(shape=None, dtype=tf.float32)) - - for shape in [(1,), (1, 2), (1, 2, 3), (1, 2, 3, 4)]: - x = tf.ones(shape=shape, dtype=tf.float32) - self.assertAllClose(fn(x), gelu(x)) - - def test_serialization(self): - ref_fn = gelu - config = tf.keras.activations.serialize(ref_fn) - fn = tf.keras.activations.deserialize(config) - self.assertEqual(fn, ref_fn) - - def test_serialization_with_layers(self): - layer = tf.keras.layers.Dense(3, activation=gelu) - config = tf.keras.layers.serialize(layer) - deserialized_layer = tf.keras.layers.deserialize(config) - self.assertEqual(deserialized_layer.__class__.__name__, - layer.__class__.__name__) - self.assertEqual(deserialized_layer.activation.__name__, "gelu") - - -if __name__ == "__main__": - tf.test.main() diff --git a/tensorflow_addons/activations/sparsemax.py b/tensorflow_addons/activations/sparsemax.py index a72a5d5ba0..e269cf0b60 100644 --- a/tensorflow_addons/activations/sparsemax.py +++ b/tensorflow_addons/activations/sparsemax.py @@ -22,8 +22,8 @@ from tensorflow_addons.utils import keras_utils -@keras_utils.register_keras_custom_object @tf.function +@keras_utils.register_keras_custom_object def sparsemax(logits, axis=-1, name=None): """Sparsemax activation function [1]. diff --git a/tensorflow_addons/activations/sparsemax_test.py b/tensorflow_addons/activations/sparsemax_test.py index 62e03f0184..3abe28fbb1 100644 --- a/tensorflow_addons/activations/sparsemax_test.py +++ b/tensorflow_addons/activations/sparsemax_test.py @@ -274,20 +274,6 @@ def test_gradient_against_estimate(self, dtype=None): lambda logits: sparsemax(logits), [z], delta=1e-6) self.assertAllCloseAccordingToType(jacob_sym, jacob_num) - def test_serialization(self, dtype=None): - ref_fn = sparsemax - config = tf.keras.activations.serialize(ref_fn) - fn = tf.keras.activations.deserialize(config) - self.assertEqual(fn, ref_fn) - - def test_serialization_with_layers(self, dtype=None): - layer = tf.keras.layers.Dense(3, activation=sparsemax) - config = tf.keras.layers.serialize(layer) - deserialized_layer = tf.keras.layers.deserialize(config) - self.assertEqual(deserialized_layer.__class__.__name__, - layer.__class__.__name__) - self.assertEqual(deserialized_layer.activation.__name__, "sparsemax") - if __name__ == '__main__': tf.test.main() diff --git a/tensorflow_addons/custom_ops/activations/BUILD b/tensorflow_addons/custom_ops/activations/BUILD deleted file mode 100644 index a199fbc689..0000000000 --- a/tensorflow_addons/custom_ops/activations/BUILD +++ /dev/null @@ -1,47 +0,0 @@ -licenses(["notice"]) # Apache 2.0 - -package(default_visibility = ["//visibility:public"]) - -load("@local_config_tf//:build_defs.bzl", "D_GLIBCXX_USE_CXX11_ABI") -load("@local_config_cuda//cuda:build_defs.bzl", "if_cuda_is_configured", "if_cuda") - -cc_library( - name = "gelu_op_gpu", - srcs = [ - "cc/kernels/gelu_op.h", - "cc/kernels/gelu_op_gpu.cu.cc", - ], - copts = if_cuda_is_configured([ - "-DGOOGLE_CUDA=1", - "-x cuda", - "-nvcc_options=relaxed-constexpr", - "-nvcc_options=ftz=true", - ]), - deps = [ - "@local_config_tf//:libtensorflow_framework", - "@local_config_tf//:tf_header_lib", - ] + if_cuda_is_configured([ - "@local_config_cuda//cuda:cuda_libs", - "@local_config_cuda//cuda:cuda_headers", - ]), - alwayslink = 1, -) - -cc_binary( - name = "_activation_ops.so", - srcs = [ - "cc/kernels/gelu_op.cc", - "cc/kernels/gelu_op.h", - "cc/ops/gelu_op.cc", - ], - copts = [ - "-pthread", - "-std=c++11", - D_GLIBCXX_USE_CXX11_ABI, - ] + if_cuda(["-DGOOGLE_CUDA=1"]), - linkshared = 1, - deps = [ - "@local_config_tf//:libtensorflow_framework", - "@local_config_tf//:tf_header_lib", - ] + if_cuda_is_configured([":gelu_op_gpu"]), -) diff --git a/tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op.cc b/tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op.cc deleted file mode 100644 index a48cd652ac..0000000000 --- a/tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op.cc +++ /dev/null @@ -1,77 +0,0 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#define EIGEN_USE_THREADS - -#include "tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op.h" -#include "tensorflow/core/framework/op_kernel.h" -#include "tensorflow/core/framework/register_types.h" -#include "third_party/eigen3/unsupported/Eigen/CXX11/Tensor" - -namespace tensorflow { - -using CPUDevice = Eigen::ThreadPoolDevice; - -#define REGISTER_GELU_KERNELS(type) \ - REGISTER_KERNEL_BUILDER( \ - Name("Gelu").Device(DEVICE_CPU).TypeConstraint("T"), \ - GeluOp); \ - REGISTER_KERNEL_BUILDER( \ - Name("GeluGrad").Device(DEVICE_CPU).TypeConstraint("T"), \ - GeluGradOp); - -// Gelu only makes sense with floating points. -TF_CALL_GPU_NUMBER_TYPES(REGISTER_GELU_KERNELS); -#undef REGISTER_GELU_KERNELS - -#if GOOGLE_CUDA - -using GPUDevice = Eigen::GpuDevice; - -// Forward declarations of the functor specializations for GPU. -namespace functor { -#define DECLARE_GPU_SPEC(T) \ - template <> \ - void Gelu::operator()( \ - const GPUDevice& d, typename TTypes::ConstTensor features, \ - bool approximate, typename TTypes::Tensor activations); \ - extern template struct Gelu; \ - \ - template <> \ - void GeluGrad::operator()( \ - const GPUDevice& d, typename TTypes::ConstTensor gradients, \ - typename TTypes::ConstTensor features, bool approximate, \ - typename TTypes::Tensor backprops); \ - extern template struct GeluGrad; - -TF_CALL_GPU_NUMBER_TYPES(DECLARE_GPU_SPEC); -#undef DECLARE_GPU_SPEC -} // namespace functor - -// Registration of the GPU implementations. -#define REGISTER_GELU_GPU_KERNELS(type) \ - REGISTER_KERNEL_BUILDER( \ - Name("Gelu").Device(DEVICE_GPU).TypeConstraint("T"), \ - GeluOp); \ - REGISTER_KERNEL_BUILDER( \ - Name("GeluGrad").Device(DEVICE_GPU).TypeConstraint("T"), \ - GeluGradOp); - -TF_CALL_GPU_NUMBER_TYPES(REGISTER_GELU_GPU_KERNELS); -#undef REGISTER_GELU_GPU_KERNELS - -#endif // GOOGLE_CUDA - -} // namespace tensorflow diff --git a/tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op.h b/tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op.h deleted file mode 100644 index a0469f3571..0000000000 --- a/tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op.h +++ /dev/null @@ -1,144 +0,0 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#ifndef TENSORFLOW_ADDONS_GELU_OP_H_ -#define TENSORFLOW_ADDONS_GELU_OP_H_ - -#define EIGEN_USE_THREADS - -#include "tensorflow/core/framework/numeric_op.h" -#include "tensorflow/core/framework/op_kernel.h" -#include "third_party/eigen3/unsupported/Eigen/CXX11/Tensor" - -namespace tensorflow { -namespace functor { - -// Functor used by GeluOp to do the computations. -template -struct Gelu { - // Computes Gelu activation. - // - // features: any shape. - // approximate: whether to enable approximation. - // activations: same shape as "features". - void operator()(const Device& d, typename TTypes::ConstTensor features, - bool approximate, typename TTypes::Tensor activations) { - if (approximate) { - // y = 0.5 * x * (1 + tanh(sqrt(2 / pi) * (x + 0.044715 * x^3))) - activations.device(d) = - static_cast(0.5) * features * - (static_cast(1) + - (static_cast(M_2_SQRTPI * M_SQRT1_2) * - (features + static_cast(0.044715) * features.cube())) - .tanh()); - } else { - // y = x * normcdf(x) = 0.5 * x * (1 + erf(x / sqrt(2))) - activations.device(d) = - static_cast(0.5) * features * - (static_cast(1) + (features * static_cast(M_SQRT1_2)).erf()); - } - } -}; - -// Functor used by GeluGradOp to do the computations. -template -struct GeluGrad { - // Computes GeluGrad backprops. - // - // gradients: gradients backpropagated to the Gelu op. - // features: the inputs that were passed to the Gelu op. - // approximate: whether to enable approximation. - // backprops: gradients to backpropagate to the Gelu inputs. - void operator()(const Device& d, typename TTypes::ConstTensor gradients, - typename TTypes::ConstTensor features, bool approximate, - typename TTypes::Tensor backprops) { - if (approximate) { - const T kAlpha = static_cast(M_2_SQRTPI * M_SQRT1_2); - const T kBeta = kAlpha * static_cast(0.044715) * static_cast(3); - const auto y = - (kAlpha * ((static_cast(0.044715) * features.cube()) + features)) - .tanh(); - backprops.device(d) = ((-features * y.square() + features) * - (kBeta * features.square() + kAlpha) + - static_cast(1) + y) * - gradients * static_cast(0.5); - } else { - backprops.device(d) = - gradients * (static_cast(M_2_SQRTPI * M_SQRT1_2 * 0.5) * features * - (-features.square() * static_cast(0.5)).exp() + - (static_cast(0.5) * - (static_cast(1) + - (features * static_cast(M_SQRT1_2)).erf()))); - } - } -}; - -} // namespace functor - -template -class GeluOp : public UnaryElementWiseOp> { - public: - explicit GeluOp(OpKernelConstruction* context) - : UnaryElementWiseOp>::UnaryElementWiseOp(context) { - OP_REQUIRES_OK(context, context->GetAttr("approximate", &approximate_)); - } - - void Operate(OpKernelContext* context, const Tensor& input, Tensor* output) { - functor::Gelu functor; - functor(context->eigen_device(), input.flat(), approximate_, - output->flat()); - } - - private: - bool approximate_; -}; - -template -class GeluGradOp : public BinaryElementWiseOp> { - public: - explicit GeluGradOp(OpKernelConstruction* context) - : BinaryElementWiseOp>::BinaryElementWiseOp( - context) { - OP_REQUIRES_OK(context, context->GetAttr("approximate", &approximate_)); - } - - void OperateNoTemplate(OpKernelContext* context, const Tensor& g, - const Tensor& a, bool approximate, Tensor* output); - - template - void Operate(OpKernelContext* context, const Tensor& g, const Tensor& a, - Tensor* output) { - OperateNoTemplate(context, g, a, approximate_, output); - } - - private: - bool approximate_; -}; - -template -void GeluGradOp::OperateNoTemplate(OpKernelContext* context, - const Tensor& g, const Tensor& a, - bool approximate, - Tensor* output) { - functor::GeluGrad functor; - functor(context->eigen_device(), g.flat(), a.flat(), - approximate, output->flat()); -} - -} // namespace tensorflow - -#undef EIGEN_USE_THREADS - -#endif // TENSORFLOW_ADDONS_GELU_OP_H_ diff --git a/tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op_gpu.cu.cc b/tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op_gpu.cu.cc deleted file mode 100644 index 37d21e66e0..0000000000 --- a/tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op_gpu.cu.cc +++ /dev/null @@ -1,36 +0,0 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#if GOOGLE_CUDA - -#define EIGEN_USE_GPU - -#include "tensorflow_addons/custom_ops/activations/cc/kernels/gelu_op.h" -#include "tensorflow/core/framework/register_types.h" -#include "third_party/eigen3/Eigen/Core" - -namespace tensorflow { - -using GPUDevice = Eigen::GpuDevice; - -#define DEFINE_GPU_KERNELS(T) \ - template struct functor::Gelu; \ - template struct functor::GeluGrad; - -TF_CALL_GPU_NUMBER_TYPES(DEFINE_GPU_KERNELS); - -} // namespace tensorflow - -#endif // GOOGLE_CUDA diff --git a/tensorflow_addons/custom_ops/activations/cc/ops/gelu_op.cc b/tensorflow_addons/custom_ops/activations/cc/ops/gelu_op.cc deleted file mode 100644 index 03406894b8..0000000000 --- a/tensorflow_addons/custom_ops/activations/cc/ops/gelu_op.cc +++ /dev/null @@ -1,37 +0,0 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#include "tensorflow/core/framework/common_shape_fns.h" -#include "tensorflow/core/framework/op.h" -#include "tensorflow/core/framework/shape_inference.h" - -namespace tensorflow { - -REGISTER_OP("Gelu") - .Input("features: T") - .Output("activations: T") - .Attr("T: {half, float, double}") - .Attr("approximate: bool = true") - .SetShapeFn(shape_inference::UnchangedShape); - -REGISTER_OP("GeluGrad") - .Input("gradients: T") - .Input("features: T") - .Output("backprops: T") - .Attr("T: {half, float, double}") - .Attr("approximate: bool = true") - .SetShapeFn(shape_inference::MergeBothInputsShapeFn); - -} // namespace tensorflow diff --git a/tensorflow_addons/custom_ops/image/BUILD b/tensorflow_addons/custom_ops/image/BUILD index a0fdbc4da5..1f4236dc9f 100644 --- a/tensorflow_addons/custom_ops/image/BUILD +++ b/tensorflow_addons/custom_ops/image/BUILD @@ -68,33 +68,9 @@ cc_library( alwayslink = 1, ) -cc_library( - name = "euclidean_distance_transform_op_gpu", - srcs = [ - "cc/kernels/euclidean_distance_transform_op.h", - "cc/kernels/euclidean_distance_transform_op_gpu.cu.cc", - ], - copts = if_cuda_is_configured([ - "-DGOOGLE_CUDA=1", - "-x cuda", - "-nvcc_options=relaxed-constexpr", - "-nvcc_options=ftz=true", - ]), - deps = [ - "@local_config_tf//:libtensorflow_framework", - "@local_config_tf//:tf_header_lib", - ] + if_cuda_is_configured([ - "@local_config_cuda//cuda:cuda_libs", - "@local_config_cuda//cuda:cuda_headers", - ]), - alwayslink = 1, -) - cc_binary( name = "_image_ops.so", srcs = [ - "cc/kernels/connected_components.cc", - "cc/kernels/connected_components.h", "cc/kernels/euclidean_distance_transform_op.cc", "cc/kernels/euclidean_distance_transform_op.h", "cc/kernels/image_projective_transform_op.cc", @@ -110,8 +86,5 @@ cc_binary( deps = [ "@local_config_tf//:libtensorflow_framework", "@local_config_tf//:tf_header_lib", - ] + if_cuda_is_configured([ - ":image_projective_transform_op_gpu", - ":euclidean_distance_transform_op_gpu", - ]), + ] + if_cuda_is_configured([":image_projective_transform_op_gpu"]), ) diff --git a/tensorflow_addons/custom_ops/image/cc/kernels/connected_components.cc b/tensorflow_addons/custom_ops/image/cc/kernels/connected_components.cc deleted file mode 100644 index 1dbe83fe2a..0000000000 --- a/tensorflow_addons/custom_ops/image/cc/kernels/connected_components.cc +++ /dev/null @@ -1,138 +0,0 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ -// See docs for ImageConnectedComponents in ../ops/image_ops.cc, and description -// of the algorithm in connected_components.h. - -#define EIGEN_USE_THREADS - -#include "tensorflow_addons/custom_ops/image/cc/kernels/connected_components.h" -#include "tensorflow/core/framework/op_kernel.h" -#include "tensorflow/core/framework/register_types.h" -#include "tensorflow/core/framework/types.h" -#include "tensorflow/core/platform/types.h" - -namespace tensorflow { - -using tensorflow::functor::BlockedImageUnionFindFunctor; -using tensorflow::functor::FindRootFunctor; -using tensorflow::functor::ImageConnectedComponentsFunctor; -using tensorflow::functor::TensorRangeFunctor; - -using OutputType = typename BlockedImageUnionFindFunctor::OutputType; - -// Computes connected components on batches of 2D images. -template -class ImageConnectedComponents : public OpKernel { - public: - explicit ImageConnectedComponents(OpKernelConstruction* ctx) - : OpKernel(ctx) {} - - void Compute(OpKernelContext* ctx) override { - const Tensor& images_t = ctx->input(0); - OP_REQUIRES(ctx, images_t.shape().dims() == 3, - errors::InvalidArgument("Input images must have rank 3")); - Tensor forest_t, rank_t; - OP_REQUIRES_OK(ctx, ctx->allocate_temp(tensorflow::DT_INT64, - images_t.shape(), &forest_t)); - OP_REQUIRES_OK(ctx, ctx->allocate_temp(tensorflow::DT_INT64, - images_t.shape(), &rank_t)); - Tensor* output_t; - OP_REQUIRES_OK(ctx, ctx->allocate_output(0, images_t.shape(), &output_t)); - - // Fill forest with values from 0 to n - 1, so that each node points to - // itself. - TensorRangeFunctor()(ctx->eigen_device(), - forest_t.flat()); - auto rank = rank_t.tensor(); - rank.device(ctx->eigen_device()) = rank.constant(OutputType(0)); - - const auto images = images_t.tensor(); - auto forest = forest_t.tensor(); - ImageConnectedComponentsFunctor()( - ctx, output_t->flat(), images, forest, rank); - } -}; - -using CPUDevice = Eigen::ThreadPoolDevice; - -namespace functor { - -// Connected components CPU implementation. See `connected_components.h` for a -// description of the algorithm. -template -struct ImageConnectedComponentsFunctor { - void operator()(OpKernelContext* ctx, - typename TTypes::Flat output, - typename TTypes::ConstTensor images, - typename TTypes::Tensor forest, - typename TTypes::Tensor rank) { - const int64 num_images = images.dimension(0), - num_rows = images.dimension(1), num_cols = images.dimension(2), - num_elements = images.size(); - // Bail out early for an empty image--no work to do. - if (num_elements == 0) { - return; - } - auto worker_threads = ctx->device()->tensorflow_cpu_worker_threads(); - BlockedImageUnionFindFunctor union_find( - images.data(), num_rows, num_cols, forest.data(), rank.data()); - while (union_find.can_merge()) { - union_find.merge_blocks(); - int64 num_blocks_vertically = union_find.num_blocks_vertically(); - int64 num_blocks_horizontally = union_find.num_blocks_horizontally(); - // Merging each block calls union_down for each pixel in a row of the - // block, and union_right for each pixel in a column of the block. Assume - // 20 instructions for each call to union_down or union_right. find() may - // loop more while searching for the root, but this should not be very - // significant. - int cost = (union_find.block_height() + union_find.block_width()) * 20; - Shard(worker_threads->num_threads, worker_threads->workers, - num_images * num_blocks_vertically * num_blocks_horizontally, cost, - [&union_find, num_blocks_vertically, num_blocks_horizontally]( - int64 start_block, int64 limit_block) { - for (int64 i = start_block; i < limit_block; i++) { - int64 block_x = i % num_blocks_horizontally; - int64 block_y = - (i / num_blocks_horizontally) % num_blocks_vertically; - int64 image = - i / (num_blocks_horizontally * num_blocks_vertically); - union_find.merge_internal_block_edges(image, block_y, block_x); - } - }); - } - FindRootFunctor()(ctx->eigen_device(), output, - images.data(), union_find); - } -}; - -} // end namespace functor - -#define REGISTER_IMAGE_CONNECTED_COMPONENTS(TYPE) \ - REGISTER_KERNEL_BUILDER(Name("ImageConnectedComponents") \ - .Device(DEVICE_CPU) \ - .TypeConstraint("dtype"), \ - ImageConnectedComponents) -// Connected components (arguably) make sense for number, bool, and string types -TF_CALL_NUMBER_TYPES(REGISTER_IMAGE_CONNECTED_COMPONENTS); -TF_CALL_bool(REGISTER_IMAGE_CONNECTED_COMPONENTS); -TF_CALL_string(REGISTER_IMAGE_CONNECTED_COMPONENTS); -#undef REGISTER_IMAGE_CONNECTED_COMPONENTS - -// TODO(ringwalt): Implement on GPU. We probably want to stick to the original -// algorithm by Stava and Benes there for efficiency (computing small blocks in -// shared memory in CUDA thread blocks, instead of starting with single-pixel -// blocks). - -} // end namespace tensorflow \ No newline at end of file diff --git a/tensorflow_addons/custom_ops/image/cc/kernels/connected_components.h b/tensorflow_addons/custom_ops/image/cc/kernels/connected_components.h deleted file mode 100644 index 7d645641bc..0000000000 --- a/tensorflow_addons/custom_ops/image/cc/kernels/connected_components.h +++ /dev/null @@ -1,305 +0,0 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ -// See docs for ImageConnectedComponents in ../ops/image_ops.cc, and description -// of the algorithm in connected_components.h. - -#ifndef TENSORFLOW_ADDONS_CONNECTED_COMPONENTS_H_ -#define TENSORFLOW_ADDONS_CONNECTED_COMPONENTS_H_ - -// Connected component analysis. The op is described in ../ops/image_ops.cc. A -// description of the algorithm appears below. - -#define EIGEN_USE_THREADS - -#include "tensorflow/core/framework/op_kernel.h" -#include "tensorflow/core/framework/tensor_types.h" -#include "tensorflow/core/platform/types.h" -#include "tensorflow/core/util/work_sharder.h" -#include "third_party/eigen3/unsupported/Eigen/CXX11/Tensor" - -namespace tensorflow { - -namespace functor { - -template -bool is_nonzero(T value) { - return value != T(0); -} - -template <> -bool is_nonzero(string value) { - return value.size() != 0; -} - -// Processes each pixel of an image for union-find, in parallel blocks. This is -// loosely based on the algorithm in "GPU Computing Gems" by Ondrej Stava and -// Bedrich Benes, available here: -// http://hpcg.purdue.edu/bbenes/papers/Stava2011CCL.pdf -// The bulk of the process uses blocks of each image, which have each been -// processed separately. As long as there are multiple blocks in the image, we -// double the height and width of the blocks, creating new blocks which each -// consist of 2x2 previous sub-blocks. On each new block, we process adjacent -// pixels from the previous sub-blocks serially. However, the new blocks are not -// connected, so we can process each block in parallel. -// The GPU algorithm first processes blocks of a fixed size in GPU shared -// memory, with one image block per CUDA thread block. On the CPU, we just start -// with a block size of a single pixel, and borrow the rest of the algorithm -// unchanged. -template -class BlockedImageUnionFindFunctor { - public: - using OutputType = int64; - - EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE BlockedImageUnionFindFunctor( - const T* images, const int64 num_rows, const int64 num_cols, - OutputType* forest, OutputType* rank) - : images_(images), - num_rows_(num_rows), - num_cols_(num_cols), - block_height_(1), - block_width_(1), - forest_(forest), - rank_(rank) {} - - // Returns the root of the tree that the pixel at the given index belongs to. - EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE OutputType - find(OutputType index) const { - while (forest_[index] != index) { - index = forest_[index]; - } - return index; - } - - // Returns the number of blocks along the y axis. - EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE int64 num_blocks_vertically() const { - return (num_rows_ + block_height_ - 1) / block_height_; - } - - // Returns the number of blocks along the x axis. - EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE int64 num_blocks_horizontally() const { - return (num_cols_ + block_width_ - 1) / block_width_; - } - - // Returns the total number of blocks in each image. - EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE int64 num_blocks() const { - return num_blocks_vertically() * num_blocks_horizontally(); - } - - EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE int64 block_height() const { - return block_height_; - } - - EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE int64 block_width() const { - return block_width_; - } - - // Returns whether we may merge again (the image contains more than one - // block). - EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE bool can_merge() const { - return block_height_ < num_rows_ || block_width_ < num_cols_; - } - - // Doubles the block size. After this method, you must call - // `merge_internal_block_edges` for each image and each *new* block's xy - // coordinates (typically in parallel). - EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE void merge_blocks() { - block_height_ *= 2; - block_width_ *= 2; - } - - // Processes pairs of pixels within the block which were adjacent in the four - // sub-blocks. This must be done at each stage so that the connected - // components in each block are joined correctly. - EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE void merge_internal_block_edges( - int64 image_index, int64 block_vertical_index, - int64 block_horizontal_index) const { - int64 block_start_y = block_vertical_index * block_height_; - int64 block_start_x = block_horizontal_index * block_width_; - // Merge the 4 sub-blocks horizontally (fixing the vertical seam). - int64 block_center_x = block_start_x + block_width_ / 2 - 1; - if (0 <= block_center_x && block_center_x + 1 < num_cols_) { - int64 merge_blocks_limit_y = - std::min(num_rows_, block_start_y + block_height_); - for (int64 y = block_start_y; y < merge_blocks_limit_y; y++) { - union_right(image_index, y, block_center_x); - } - } - // Merge the 4 sub-blocks vertically (fixing the horizontal seam). - int64 block_center_y = block_start_y + block_height_ / 2 - 1; - if (0 <= block_center_y && block_center_y + 1 < num_rows_) { - int64 merge_blocks_limit_x = - std::min(num_cols_, block_start_x + block_width_); - for (int64 x = block_start_x; x < merge_blocks_limit_x; x++) { - union_down(image_index, block_center_y, x); - } - } - } - - private: - // The input image(s). - const T* const images_; - const int64 num_rows_; - const int64 num_cols_; - // Current height of each sub-block of the image. - int64 block_height_; - // Current width of each sub-block of the image. - int64 block_width_; - // Union-find forest. This has the same size as `images_`, and each entry - // holds the index of its parent in `images_` (roots hold their own index). - // Cycles should not occur. - OutputType* const forest_; - // Union-find rank of each pixel. - OutputType* const rank_; - - // Unions the pixel with the pixel below it if applicable (both pixels are - // true, and the pixel is not in the last row). - EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE void union_down(OutputType batch, - OutputType row, - OutputType col) const { - T pixel = read_pixel(batch, row, col); - if (is_nonzero(pixel)) { - const int64 index_a = col + num_cols_ * (row + num_rows_ * batch); - if (row + 1 < num_rows_ && read_pixel(batch, row + 1, col) == pixel) { - const int64 index_b = col + num_cols_ * (row + 1 + num_rows_ * batch); - do_union(index_a, index_b); - } - } - } - - // Unions the pixel with the pixel to the right of it if applicable. - EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE void union_right(OutputType batch, - OutputType row, - OutputType col) const { - T pixel = read_pixel(batch, row, col); - if (is_nonzero(pixel)) { - const int64 index_a = col + num_cols_ * (row + num_rows_ * batch); - if (col + 1 < num_cols_ && read_pixel(batch, row, col + 1) == pixel) { - const int64 index_b = col + 1 + num_cols_ * (row + num_rows_ * batch); - do_union(index_a, index_b); - } - } - } - - // Reads a pixel value in the images. - EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE T - read_pixel(const OutputType batch, const OutputType row, - const OutputType col) const { - return images_[col + num_cols_ * (row + num_rows_ * batch)]; - } - - // Unions the trees that the two pixels belong to, using their index in the - // `images_` array. - EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE void do_union( - OutputType index_a, OutputType index_b) const { - // Find the roots of index_a and index_b in the forest, and make one the - // child of the other. - index_a = find(index_a); - index_b = find(index_b); - const OutputType rank_a = rank_[index_a]; - const OutputType rank_b = rank_[index_b]; - OutputType parent, child; - if (index_a == index_b) { - return; - } else if (rank_a < rank_b) { - parent = index_a; - child = index_b; - } else { - parent = index_b; - child = index_a; - rank_[parent]++; - } - forest_[child] = parent; - } -}; - -// Runs the ImageUnionFindFunctor on all pixels. Will require different CPU and -// GPU implementations. -template -class ImageConnectedComponentsFunctor { - public: - using OutputType = typename BlockedImageUnionFindFunctor::OutputType; - - void operator()(OpKernelContext* ctx, - typename TTypes::ConstTensor images, - typename TTypes::Tensor forest, - typename TTypes::Tensor rank); -}; - -// Fills a flat Tensor with indices from 0 to n - 1. -template -class TensorRangeFunctor { - public: - using OutputType = typename BlockedImageUnionFindFunctor::OutputType; - - void operator()(const Device& device, - typename TTypes::Flat tensor) { - tensor.device(device) = tensor.generate(TensorRangeGenerator()); - } - - private: - class TensorRangeGenerator { - public: - EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE OutputType - operator()(const Eigen::array& coords) const { - return coords[0]; - } - }; -}; - -// Given the union-find forest, generates the root index for each node. This -// gives us arbitrary, usually non-consecutive ids for each connected component. -// The ids are massaged in Python to get deterministic, consecutive ids. -template -class FindRootFunctor { - public: - using OutputType = typename BlockedImageUnionFindFunctor::OutputType; - - void operator()(const Device& device, - typename TTypes::Flat component_ids, - const T* images, - const BlockedImageUnionFindFunctor& union_find) { - component_ids.device(device) = - component_ids.generate(FindRootGenerator(images, union_find)); - } - - private: - class FindRootGenerator { - const T* const images_; - const BlockedImageUnionFindFunctor union_find_; - - public: - EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE FindRootGenerator( - const T* images, BlockedImageUnionFindFunctor union_find) - : images_(images), union_find_(union_find) {} - - EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE OutputType - operator()(const Eigen::array& coords) const { - if (is_nonzero(images_[coords[0]])) { - // True pixels have an arbitrary segment id > 0. The segment ids will be - // made contiguous later. - return union_find_.find(coords[0]) + 1; - } else { - // False pixels have a segment of 0. - return 0; - } - } - }; -}; - -} // end namespace functor - -} // namespace tensorflow - -#endif // TENSORFLOW_ADDONS_CONNECTED_COMPONENTS_H_ diff --git a/tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op.cc b/tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op.cc index 5c6928b35f..8b5923edeb 100644 --- a/tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op.cc +++ b/tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op.cc @@ -77,7 +77,8 @@ TF_CALL_double(REGISTER); #undef REGISTER -#if GOOGLE_CUDA +// TODO: fix compile issue #349 of the gpu kernel. +#if 0 && GOOGLE_CUDA typedef Eigen::GpuDevice GPUDevice; @@ -86,7 +87,7 @@ namespace functor { #define DECLARE_FUNCTOR(TYPE) \ template <> \ void EuclideanDistanceTransformFunctor::operator()( \ - const GPUDevice &device, OutputType *output, const InputType &images) \ + const GPUDevice &device, OutputType *output, const InputType *images) \ const; \ extern template struct EuclideanDistanceTransformFunctor @@ -100,6 +101,7 @@ TF_CALL_double(DECLARE_FUNCTOR); REGISTER_KERNEL_BUILDER(Name("EuclideanDistanceTransform") \ .Device(DEVICE_GPU) \ .TypeConstraint("dtype"), \ + .HostMemory("output_shape"), \ EuclideanDistanceTransform) TF_CALL_half(REGISTER); diff --git a/tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op.h b/tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op.h index 740c239a27..65940fd099 100644 --- a/tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op.h +++ b/tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op.h @@ -30,8 +30,6 @@ namespace generator { using Eigen::array; using Eigen::DenseIndex; -using Eigen::numext::sqrt; -using Eigen::numext::mini; template class EuclideanDistanceTransformGenerator { @@ -43,8 +41,8 @@ class EuclideanDistanceTransformGenerator { EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE EuclideanDistanceTransformGenerator( typename TTypes::ConstTensor input) : input_(input) { - height_ = input_.dimension(1); - width_ = input_.dimension(2); + height_ = input_.dimensions()[1]; + width_ = input_.dimensions()[2]; } EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE T @@ -54,17 +52,17 @@ class EuclideanDistanceTransformGenerator { if (input_(coords) == T(0)) return T(0); - T minDistance = Eigen::NumTraits::highest(); + float minDistance = static_cast(std::numeric_limits::max()); for (int h = 0; h < height_; ++h) { for (int w = 0; w < width_; ++w) { if (input_({coords[0], h, w, coords[3]}) == T(0)) { - T dist = sqrt(T((x - h) * (x - h) + (y - w) * (y - w))); - minDistance = mini(minDistance, dist); + float dist = std::sqrt((x - h) * (x - h) + (y - w) * (y - w)); + minDistance = std::min(minDistance, dist); } } } - return minDistance; + return T(minDistance); } }; diff --git a/tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op_gpu.cu.cc b/tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op_gpu.cu.cc deleted file mode 100644 index 47e0b45194..0000000000 --- a/tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op_gpu.cu.cc +++ /dev/null @@ -1,40 +0,0 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#if GOOGLE_CUDA - -#define EIGEN_USE_GPU - -#include "tensorflow_addons/custom_ops/image/cc/kernels/euclidean_distance_transform_op.h" -#include "tensorflow/core/framework/register_types.h" -#include "tensorflow/core/framework/types.h" -#include "tensorflow/core/platform/types.h" - -namespace tensorflow { - -namespace functor { - -// Explicit instantiation of the GPU functor. -typedef Eigen::GpuDevice GPUDevice; - -template struct EuclideanDistanceTransformFunctor; -template struct EuclideanDistanceTransformFunctor; -template struct EuclideanDistanceTransformFunctor; - -} // end namespace functor - -} // end namespace tensorflow - -#endif // GOOGLE_CUDA diff --git a/tensorflow_addons/custom_ops/image/cc/ops/image_ops.cc b/tensorflow_addons/custom_ops/image/cc/ops/image_ops.cc index 35aa4295f1..0477dfb29f 100644 --- a/tensorflow_addons/custom_ops/image/cc/ops/image_ops.cc +++ b/tensorflow_addons/custom_ops/image/cc/ops/image_ops.cc @@ -101,23 +101,6 @@ transformed_images: 4D `Tensor`, image(s) in NHWC format, generated by applying the `transforms` to the `images`. Satisfies the description above. )doc"; -static const char ImageConnectedComponentsDoc[] = R"doc( -Find the connected components of image(s). -For each image (along the 0th axis), all connected components of adjacent pixels -with the same non-zero value are detected and given unique ids. -The returned `components` tensor has 0s for the zero pixels of `images`, and -arbitrary nonzero ids for the connected components of nonzero values. Ids are -unique across all of the images, and are in row-major order by the first pixel -in the component. -Uses union-find with union by rank but not path compression, giving a runtime of -`O(n log n)`. See: - https://en.wikipedia.org/wiki/Disjoint-set_data_structure#Time_Complexity -image: Image(s) with shape (N, H, W). -components: Component ids for each pixel in "image". Same shape as "image". Zero - pixels all have an output of 0, and all components of adjacent pixels with - the same value are given consecutive ids, starting from 1. -)doc"; - } // namespace REGISTER_OP("EuclideanDistanceTransform") @@ -137,16 +120,4 @@ REGISTER_OP("ImageProjectiveTransformV2") .Output("transformed_images: dtype") .SetShapeFn(ResizeShapeFn) .Doc(kImageProjectiveTransformDoc); - -REGISTER_OP("ImageConnectedComponents") - .Input("image: dtype") - .Output("components: int64") - .Attr( - "dtype: {int64, int32, uint16, int16, uint8, int8, half, float, " - "double, bool, string}") - .SetShapeFn([](InferenceContext *c) { - return shape_inference::UnchangedShape(c); - }) - .Doc(ImageConnectedComponentsDoc); - -} // namespace tensorflow \ No newline at end of file +} // namespace tensorflow diff --git a/tensorflow_addons/custom_ops/layers/BUILD b/tensorflow_addons/custom_ops/layers/BUILD index bb9c00ddf5..ed0c567f59 100644 --- a/tensorflow_addons/custom_ops/layers/BUILD +++ b/tensorflow_addons/custom_ops/layers/BUILD @@ -10,13 +10,14 @@ cc_binary( srcs = [ "cc/kernels/correlation_cost_op.cc", "cc/kernels/correlation_cost_op.h", + "cc/kernels/correlation_cost_op_gpu.cu.cc", "cc/ops/correlation_cost_op.cc", ], copts = [ "-pthread", "-std=c++11", D_GLIBCXX_USE_CXX11_ABI, - ] + if_cuda(["-DGOOGLE_CUDA=1"]), + ], linkshared = 1, deps = [ "@local_config_tf//:libtensorflow_framework", diff --git a/tensorflow_addons/custom_ops/layers/cc/kernels/correlation_cost_op.cc b/tensorflow_addons/custom_ops/layers/cc/kernels/correlation_cost_op.cc index 1c3580e9e1..e1f4b1cdbc 100644 --- a/tensorflow_addons/custom_ops/layers/cc/kernels/correlation_cost_op.cc +++ b/tensorflow_addons/custom_ops/layers/cc/kernels/correlation_cost_op.cc @@ -331,7 +331,7 @@ TF_CALL_float(REGISTER_CORRELATIONCOST_OP_CPU); #undef REGISTER_CORRELATIONCOST_OP_CPU // Register the GPU kernels. -#if GOOGLE_CUDA +#ifdef GOOGLE_CUDA #define REGISTER_CORRELATIONCOST_OP_GPU(T) \ REGISTER_KERNEL_BUILDER( \ diff --git a/tensorflow_addons/image/BUILD b/tensorflow_addons/image/BUILD index 27b42df54d..79f5b1fcd5 100644 --- a/tensorflow_addons/image/BUILD +++ b/tensorflow_addons/image/BUILD @@ -15,7 +15,6 @@ py_library( "utils.py", "sparse_image_warp.py", "interpolate_spline.py", - "connected_components.py", ]), data = [ ":sparse_image_warp_test_data", @@ -83,6 +82,7 @@ py_test( ], ) +# TODO: use cuda_test later. py_test( name = "transform_ops_test", size = "medium", @@ -147,16 +147,3 @@ py_test( ":image", ], ) - -py_test( - name = "connected_components_test", - size = "medium", - srcs = [ - "connected_components_test.py", - ], - main = "connected_components_test.py", - srcs_version = "PY2AND3", - deps = [ - ":image", - ], -) diff --git a/tensorflow_addons/image/README.md b/tensorflow_addons/image/README.md index 6742c14792..57f6fede11 100644 --- a/tensorflow_addons/image/README.md +++ b/tensorflow_addons/image/README.md @@ -3,7 +3,6 @@ ## Maintainers | Submodule | Maintainers | Contact Info | |:---------- |:----------- |:--------------| -| connected_components | @sayoojbk | sayoojbk@gmail.com | | dense_image_warp | @WindQAQ | windqaq@gmail.com | | distance_transform_ops | @mels630 | mels630@gmail.com | | distort_image_ops | @WindQAQ | windqaq@gmail.com | @@ -14,7 +13,6 @@ ## Components | Submodule | Image Processing Function | Reference | |:---------- |:----------- |:----------- | -| connected_components | connected_components | | | dense_image_warp | dense_image_warp | | | dense_image_warp | interpolate_bilinear | | | distance_transform_ops | euclidean_distance_transform | | @@ -30,7 +28,6 @@ | translate_ops | translate | | | translate_ops | translations_to_projective_transforms | | - ## Contribution Guidelines #### Standard API In order to conform with the current API standard, all image ops diff --git a/tensorflow_addons/image/__init__.py b/tensorflow_addons/image/__init__.py index 21a8b77c35..d0d886735d 100644 --- a/tensorflow_addons/image/__init__.py +++ b/tensorflow_addons/image/__init__.py @@ -29,4 +29,3 @@ from tensorflow_addons.image.sparse_image_warp import sparse_image_warp from tensorflow_addons.image.interpolate_spline import interpolate_spline from tensorflow_addons.image.translate_ops import translate -from tensorflow_addons.image.connected_components import connected_components diff --git a/tensorflow_addons/image/connected_components.py b/tensorflow_addons/image/connected_components.py deleted file mode 100644 index 921b8841fe..0000000000 --- a/tensorflow_addons/image/connected_components.py +++ /dev/null @@ -1,96 +0,0 @@ -# Copyright 2019 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Connected Components.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import tensorflow as tf - -from tensorflow_addons.utils.resource_loader import get_path_to_datafile - -_image_ops_so = tf.load_op_library( - get_path_to_datafile("custom_ops/image/_image_ops.so")) - - -@tf.function -def connected_components(images, name=None): - """Labels the connected components in a batch of images. - - A component is a set of pixels in a single input image, which are - all adjacent and all have the same non-zero value. The components - using a squared connectivity of one (all True entries are joined with - their neighbors above,below, left, and right). Components across all - images have consecutive ids 1 through n. - Components are labeled according to the first pixel of the - component appearing in row-major order (lexicographic order by - image_index_in_batch, row, col). - Zero entries all have an output id of 0. - This op is equivalent with `scipy.ndimage.measurements.label` - on a 2D array with the default structuring element - (which is the connectivity used here). - Args: - images: A 2D (H, W) or 3D (N, H, W) Tensor of boolean image(s). - name: The name of the op. - Returns: - Components with the same shape as `images`. - False entries in `images` have value 0, and - all True entries map to a component id > 0. - Raises: - TypeError: if `images` is not 2D or 3D. - """ - with tf.name_scope(name or "connected_components"): - image_or_images = tf.convert_to_tensor(images, name="images") - if len(image_or_images.get_shape()) == 2: - images = image_or_images[None, :, :] - elif len(image_or_images.get_shape()) == 3: - images = image_or_images - else: - raise TypeError( - "images should have rank 2 (HW) or 3 (NHW). Static shape is %s" - % image_or_images.get_shape()) - components = _image_ops_so.image_connected_components(images) - - # TODO(ringwalt): Component id renaming should be done in the op, - # to avoid constructing multiple additional large tensors. - components_flat = tf.reshape(components, [-1]) - unique_ids, id_index = tf.unique(components_flat) - id_is_zero = tf.where(tf.equal(unique_ids, 0))[:, 0] - # Map each nonzero id to consecutive values. - nonzero_consecutive_ids = tf.range( - tf.shape(unique_ids)[0] - tf.shape(id_is_zero)[0]) + 1 - - def no_zero(): - # No need to insert a zero into the ids. - return nonzero_consecutive_ids - - def has_zero(): - # Insert a zero in the consecutive ids - # where zero appears in unique_ids. - # id_is_zero has length 1. - zero_id_ind = tf.cast(id_is_zero[0], tf.int32) - ids_before = nonzero_consecutive_ids[:zero_id_ind] - ids_after = nonzero_consecutive_ids[zero_id_ind:] - return tf.concat([ids_before, [0], ids_after], axis=0) - - new_ids = tf.cond( - tf.equal(tf.shape(id_is_zero)[0], 0), no_zero, has_zero) - components = tf.reshape( - tf.gather(new_ids, id_index), tf.shape(components)) - if len(image_or_images.get_shape()) == 2: - return components[0, :, :] - else: - return components diff --git a/tensorflow_addons/image/connected_components_test.py b/tensorflow_addons/image/connected_components_test.py deleted file mode 100644 index 97d6b8b29b..0000000000 --- a/tensorflow_addons/image/connected_components_test.py +++ /dev/null @@ -1,157 +0,0 @@ -# Copyright 2019 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Tests for connected component analysis.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import logging -import tensorflow as tf -import numpy as np - -from tensorflow_addons.image.connected_components import connected_components -from tensorflow_addons.utils import test_utils - -# Image for testing connected_components, with a single, winding component. -SNAKE = np.asarray([[0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 1, 1, 1, 0, 0, 0, 0], - [0, 0, 0, 0, 1, 1, 1, 1, 0], [0, 0, 0, 0, 0, 0, 0, 1, 0], - [0, 1, 1, 1, 1, 1, 1, 1, 0], [0, 1, 0, 0, 0, 0, 0, 0, 0], - [0, 1, 0, 1, 1, 1, 1, 1, 0], [0, 1, 0, 0, 0, 0, 0, 1, 0], - [0, 1, 1, 1, 1, 1, 1, 1, 0], [0, 0, 0, 0, 0, 0, 0, 0, - 0]]) # pylint: disable - - -@test_utils.run_all_in_graph_and_eager_modes -class ConnectedComponentsTest(tf.test.TestCase): - def testDisconnected(self): - arr = tf.cast( - [[1, 0, 0, 1, 0, 0, 0, 0, 1], [0, 1, 0, 0, 0, 1, 0, 1, 0], - [1, 0, 1, 0, 0, 0, 1, 0, 0], [0, 0, 0, 0, 1, 0, 0, 0, 0], - [0, 0, 1, 0, 0, 0, 0, 0, 0]], tf.bool) # pylint: disable - expected = ([[1, 0, 0, 2, 0, 0, 0, 0, 3], [0, 4, 0, 0, 0, 5, 0, 6, 0], - [7, 0, 8, 0, 0, 0, 9, 0, 0], [0, 0, 0, 0, 10, 0, 0, 0, 0], - [0, 0, 11, 0, 0, 0, 0, 0, 0]]) # pylint: disable - self.assertAllEqual(self.evaluate(connected_components(arr)), expected) - - def testSimple(self): - arr = [[0, 1, 0], [1, 1, 1], [0, 1, 0]] - - # Single component with id 1. - self.assertAllEqual( - self.evaluate(connected_components(tf.cast(arr, tf.bool))), arr) - - def testSnake(self): - - # Single component with id 1. - self.assertAllEqual( - self.evaluate(connected_components(tf.cast(SNAKE, tf.bool))), - SNAKE) - - def testSnake_disconnected(self): - for i in range(SNAKE.shape[0]): - for j in range(SNAKE.shape[1]): - - # If we disconnect any part of the snake except for the endpoints, - # there will be 2 components. - if SNAKE[i, j] and (i, j) not in [(1, 1), (6, 3)]: - disconnected_snake = SNAKE.copy() - disconnected_snake[i, j] = 0 - components = self.evaluate( - connected_components( - tf.cast(disconnected_snake, tf.bool))) - self.assertEqual(components.max(), 2, - 'disconnect (%d, %d)' % (i, j)) - bins = np.bincount(components.ravel()) - # Nonzero number of pixels labeled 0, 1, or 2. - self.assertGreater(bins[0], 0) - self.assertGreater(bins[1], 0) - self.assertGreater(bins[2], 0) - - def testMultipleImages(self): - images = [[[1, 1, 1, 1], [1, 0, 0, 1], [1, 0, 0, 1], [1, 1, 1, 1]], - [[1, 0, 0, 1], [0, 0, 0, 0], [0, 0, 0, 0], [1, 0, 0, 1]], - [[1, 1, 0, 1], [0, 1, 1, 0], [1, 0, 1, 0], - [0, 0, 1, 1]]] # pylint: disable - expected = [[[1, 1, 1, 1], [1, 0, 0, 1], [1, 0, 0, 1], [1, 1, 1, 1]], - [[2, 0, 0, 3], [0, 0, 0, 0], [0, 0, 0, 0], [4, 0, 0, 5]], - [[6, 6, 0, 7], [0, 6, 6, 0], [8, 0, 6, 0], - [0, 0, 6, 6]]] # pylint: disable - - self.assertAllEqual( - self.evaluate(connected_components(tf.cast(images, tf.bool))), - expected) - - def testZeros(self): - - self.assertAllEqual( - connected_components( - self.evaluate(tf.zeros((100, 20, 50), tf.bool))), - np.zeros((100, 20, 50))) - - def testOnes(self): - - self.assertAllEqual( - self.evaluate( - connected_components(tf.ones((100, 20, 50), tf.bool))), - np.tile(np.arange(100)[:, None, None] + 1, [1, 20, 50])) - - def testOnes_small(self): - - self.assertAllEqual( - self.evaluate(connected_components(tf.ones((3, 5), tf.bool))), - np.ones((3, 5))) - - def testRandom_scipy(self): - np.random.seed(42) - images = np.random.randint(0, 2, size=(10, 100, 200)).astype(np.bool) - expected = connected_components_reference_implementation(images) - if expected is None: - return - - self.assertAllEqual( - self.evaluate(connected_components(images)), expected) - - -def connected_components_reference_implementation(images): - try: - # pylint disable=g-import-not-at-top - from scipy.ndimage import measurements - except ImportError: - logging.exception( - 'Skipping test method because scipy could not be loaded') - return - image_or_images = np.asarray(images) - if len(image_or_images.shape) == 2: - images = image_or_images[None, :, :] - elif len(image_or_images.shape) == 3: - images = image_or_images - components = np.asarray([measurements.label(image)[0] for image in images]) - # Get the count of nonzero ids for each image, and offset each image's nonzero - # ids using the cumulative sum. - num_ids_per_image = components.reshape( - [-1, components.shape[1] * components.shape[2]]).max(axis=-1) - positive_id_start_per_image = np.cumsum(num_ids_per_image) - for i in range(components.shape[0]): - new_id_start = positive_id_start_per_image[i - 1] if i > 0 else 0 - components[i, components[i] > 0] += new_id_start - if len(image_or_images.shape) == 2: - return components[0, :, :] - else: - return components - - -if __name__ == '__main__': - tf.test.main() diff --git a/tensorflow_addons/image/interpolate_spline_test.py b/tensorflow_addons/image/interpolate_spline_test.py index 33dcf0fc26..106edf8beb 100644 --- a/tensorflow_addons/image/interpolate_spline_test.py +++ b/tensorflow_addons/image/interpolate_spline_test.py @@ -21,8 +21,9 @@ from scipy import interpolate as sc_interpolate import tensorflow as tf -from tensorflow_addons.image import interpolate_spline +import tensorflow.compat.v1 as tf1 # TODO: locate placeholder from tensorflow_addons.utils import test_utils +from tensorflow_addons.image import interpolate_spline class _InterpolationProblem(object): @@ -33,14 +34,14 @@ def get_problem(self, optimizable=False, extrapolate=True, """Make data for an interpolation problem where all x vectors are n-d. Args: - optimizable: If True, then make train_points a tf.Variable. - extrapolate: If False, then clamp the query_points values to be - within the max and min of train_points. - dtype: The data type to use. + optimizable: If True, then make train_points a tf.Variable. + extrapolate: If False, then clamp the query_points values to be within + the max and min of train_points. + dtype: The data type to use. Returns: - query_points, query_values, train_points, train_values: training - and test tensors for interpolation problem. + query_points, query_values, train_points, train_values: training and + test tensors for interpolation problem """ # The values generated here depend on a seed of 0. @@ -56,7 +57,7 @@ def get_problem(self, optimizable=False, extrapolate=True, init_points = init_points.astype(dtype) train_points = (tf.Variable(init_points) if optimizable else tf.constant(init_points)) - train_values = self.test_function(train_points) + train_values = self.tf_function(train_points) query_points_np = np.random.uniform( size=[batch_size, num_query_points, self.DATA_DIM]) @@ -66,7 +67,7 @@ def get_problem(self, optimizable=False, extrapolate=True, np.max(init_points)) query_points = tf.constant(query_points_np) - query_values = self.test_function(query_points_np) + query_values = self.np_function(query_points_np) return query_points, query_values, train_points, train_values @@ -98,8 +99,16 @@ class _QuadraticPlusSinProblem1D(_InterpolationProblem): ] } - def test_function(self, x): - """Takes a tensor, evaluates the test function, and returns a + def np_function(self, x): + """Takes np array, evaluates the test function, and returns np + array.""" + return np.sum( + np.power((x - 0.5), 3) - 0.25 * x + 10 * np.sin(x * 10), + axis=2, + keepdims=True) + + def tf_function(self, x): + """Takes tf tensor, evaluates the test function, and returns tf tensor.""" return tf.reduce_mean( tf.pow((x - 0.5), 3) - 0.25 * x + 10 * tf.sin(x * 10), @@ -135,8 +144,16 @@ class _QuadraticPlusSinProblemND(_InterpolationProblem): ], } - def test_function(self, x): - """Takes a tensor, evaluates the test function, and returns a + def np_function(self, x): + """Takes np array, evaluates the test function, and returns np + array.""" + return np.sum( + np.square(x - 0.5) + 0.25 * x + 1 * np.sin(x * 15), + axis=2, + keepdims=True) + + def tf_function(self, x): + """Takes tf tensor, evaluates the test function, and returns tf tensor.""" return tf.reduce_sum( tf.square(x - 0.5) + 0.25 * x + 1 * tf.sin(x * 15), @@ -144,7 +161,6 @@ def test_function(self, x): keepdims=True) -@test_utils.run_all_in_graph_and_eager_modes class InterpolateSplineTest(tf.test.TestCase): def test_1d_linear_interpolation(self): """For 1d linear interpolation, we can compare directly to scipy.""" @@ -155,35 +171,41 @@ def test_1d_linear_interpolation(self): interpolation_order = 1 with tf.name_scope('interpolator'): - interp = self.evaluate( - interpolate_spline(train_points, train_values, query_points, - interpolation_order)) - - query_points, train_points, train_values, = self.evaluate( - [query_points, train_points, train_values]) - - # Just look at the first element of the minibatch. - # Also, trim the final singleton dimension. - interp = interp[0, :, 0] - query_points = query_points[0, :, 0] - train_points = train_points[0, :, 0] - train_values = train_values[0, :, 0] - - # Compute scipy interpolation. - scipy_interp_function = sc_interpolate.interp1d( - train_points, train_values, kind='linear') - - scipy_interpolation = scipy_interp_function(query_points) - scipy_interpolation_on_train = scipy_interp_function(train_points) - - # Even with float64 precision, the interpolants disagree with scipy a - # bit due to the fact that we add the EPSILON to prevent sqrt(0), etc. - tol = 1e-3 - - self.assertAllClose( - train_values, scipy_interpolation_on_train, atol=tol, rtol=tol) - self.assertAllClose( - interp, scipy_interpolation, atol=tol, rtol=tol) + interpolator = interpolate_spline( + train_points, train_values, query_points, interpolation_order) + with self.cached_session() as sess: + fetches = [ + query_points, train_points, train_values, interpolator + ] + query_points_, train_points_, train_values_, interp_ = sess.run( # pylint: disable=C0301 + fetches) + + # Just look at the first element of the minibatch. + # Also, trim the final singleton dimension. + interp_ = interp_[0, :, 0] + query_points_ = query_points_[0, :, 0] + train_points_ = train_points_[0, :, 0] + train_values_ = train_values_[0, :, 0] + + # Compute scipy interpolation. + scipy_interp_function = sc_interpolate.interp1d( + train_points_, train_values_, kind='linear') + + scipy_interpolation = scipy_interp_function(query_points_) + scipy_interpolation_on_train = scipy_interp_function( + train_points_) + + # Even with float64 precision, the interpolants disagree with scipy a + # bit due to the fact that we add the EPSILON to prevent sqrt(0), etc. + tol = 1e-3 + + self.assertAllClose( + train_values_, + scipy_interpolation_on_train, + atol=tol, + rtol=tol) + self.assertAllClose( + interp_, scipy_interpolation, atol=tol, rtol=tol) def test_1d_interpolation(self): """Regression test for interpolation with 1-D points.""" @@ -194,15 +216,17 @@ def test_1d_interpolation(self): for order in (1, 2, 3): for reg_weight in (0, 0.01): - interp = self.evaluate( - interpolate_spline(train_points, train_values, - query_points, order, reg_weight)) + interpolator = interpolate_spline(train_points, train_values, + query_points, order, + reg_weight) target_interpolation = tp.HARDCODED_QUERY_VALUES[(order, reg_weight)] target_interpolation = np.array(target_interpolation) - - self.assertAllClose(interp[0, :, 0], target_interpolation) + with self.cached_session() as sess: + interp_val = sess.run(interpolator) + self.assertAllClose(interp_val[0, :, 0], + target_interpolation) def test_nd_linear_interpolation(self): """Regression test for interpolation with N-D points.""" @@ -213,16 +237,19 @@ def test_nd_linear_interpolation(self): for order in (1, 2, 3): for reg_weight in (0, 0.01): - interp = self.evaluate( - interpolate_spline(train_points, train_values, - query_points, order, reg_weight)) + interpolator = interpolate_spline(train_points, train_values, + query_points, order, + reg_weight) target_interpolation = tp.HARDCODED_QUERY_VALUES[(order, reg_weight)] target_interpolation = np.array(target_interpolation) + with self.cached_session() as sess: + interp_val = sess.run(interpolator) + self.assertAllClose(interp_val[0, :, 0], + target_interpolation) - self.assertAllClose(interp[0, :, 0], target_interpolation) - + @test_utils.run_deprecated_v1 def test_nd_linear_interpolation_unspecified_shape(self): """Ensure that interpolation supports dynamic batch_size and num_points.""" @@ -230,64 +257,72 @@ def test_nd_linear_interpolation_unspecified_shape(self): (query_points, _, train_points, train_values) = tp.get_problem(dtype='float64') + # Construct placeholders such that the batch size, number of train points, + # and number of query points are not known at graph construction time. feature_dim = query_points.shape[-1] value_dim = train_values.shape[-1] + train_points_ph = tf1.placeholder( + dtype=train_points.dtype, shape=[None, None, feature_dim]) + train_values_ph = tf1.placeholder( + dtype=train_values.dtype, shape=[None, None, value_dim]) + query_points_ph = tf1.placeholder( + dtype=query_points.dtype, shape=[None, None, feature_dim]) order = 1 reg_weight = 0.01 - # Get concrete functions such that the batch size, number of train points, - # and number of query points are not known at graph construction time. - fn = tf.function(interpolate_spline).get_concrete_function( - tf.TensorSpec( - shape=[None, None, feature_dim], dtype=train_points.dtype), - tf.TensorSpec( - shape=[None, None, value_dim], dtype=train_values.dtype), - tf.TensorSpec( - shape=[None, None, feature_dim], dtype=query_points.dtype), - order, reg_weight) + interpolator = interpolate_spline(train_points_ph, train_values_ph, + query_points_ph, order, reg_weight) target_interpolation = tp.HARDCODED_QUERY_VALUES[(order, reg_weight)] target_interpolation = np.array(target_interpolation) + with self.cached_session() as sess: - interp_val = self.evaluate( - fn(train_points, train_values, query_points)) + (train_points_value, train_values_value, + query_points_value) = sess.run( + [train_points, train_values, query_points]) - self.assertAllClose(interp_val[0, :, 0], target_interpolation) + interp_val = sess.run( + interpolator, + feed_dict={ + train_points_ph: train_points_value, + train_values_ph: train_values_value, + query_points_ph: query_points_value + }) + self.assertAllClose(interp_val[0, :, 0], target_interpolation) def test_fully_unspecified_shape(self): """Ensure that erreor is thrown when input/output dim unspecified.""" + self.skipTest("TODO: port to tf2.0 / eager") tp = _QuadraticPlusSinProblemND() (query_points, _, train_points, train_values) = tp.get_problem(dtype='float64') + # Construct placeholders such that the batch size, number of train points, + # and number of query points are not known at graph construction time. feature_dim = query_points.shape[-1] value_dim = train_values.shape[-1] + train_points_ph = tf1.placeholder( + dtype=train_points.dtype, shape=[None, None, feature_dim]) + train_points_ph_invalid = tf1.placeholder( + dtype=train_points.dtype, shape=[None, None, None]) + train_values_ph = tf1.placeholder( + dtype=train_values.dtype, shape=[None, None, value_dim]) + train_values_ph_invalid = tf1.placeholder( + dtype=train_values.dtype, shape=[None, None, None]) + query_points_ph = tf1.placeholder( + dtype=query_points.dtype, shape=[None, None, feature_dim]) order = 1 reg_weight = 0.01 - # Get concrete functions such that the batch size, number of train points, - # and number of query points are not known at graph construction time. with self.assertRaises(ValueError): - fn = tf.function(interpolate_spline).get_concrete_function( - tf.TensorSpec( - shape=[None, None, None], dtype=train_points.dtype), - tf.TensorSpec( - shape=[None, None, value_dim], dtype=train_values.dtype), - tf.TensorSpec( - shape=[None, None, feature_dim], dtype=query_points.dtype), - order, reg_weight) + _ = interpolate_spline(train_points_ph_invalid, train_values_ph, + query_points_ph, order, reg_weight) with self.assertRaises(ValueError): - fn = tf.function(interpolate_spline).get_concrete_function( - tf.TensorSpec( - shape=[None, None, feature_dim], dtype=train_points.dtype), - tf.TensorSpec( - shape=[None, None, None], dtype=train_values.dtype), - tf.TensorSpec( - shape=[None, None, feature_dim], dtype=query_points.dtype), - order, reg_weight) + _ = interpolate_spline(train_points_ph, train_values_ph_invalid, + query_points_ph, order, reg_weight) def test_interpolation_gradient(self): """Make sure that backprop can run. Correctness of gradients is @@ -307,21 +342,22 @@ def test_interpolation_gradient(self): regularization = 0.001 for interpolation_order in (1, 2, 3, 4): - - def loss_fn(): - interpolator = interpolate_spline( - train_points, train_values, query_points, - interpolation_order, regularization) - loss = tf.reduce_mean(tf.square(query_values - interpolator)) - return loss - - optimizer = tf.keras.optimizers.SGD( - learning_rate=0.001, momentum=0.9, clipnorm=1.0) - opt_op = optimizer.minimize(loss_fn, [train_points]) - - self.evaluate(tf.compat.v1.global_variables_initializer()) - for _ in range(100): - self.evaluate(opt_op) + optimizer = tf1.train.MomentumOptimizer(0.001, 0.9) + + @tf.function + def train_step(): + with tf.GradientTape() as gt: + interpolator = interpolate_spline( + train_points, train_values, query_points, + interpolation_order, regularization) + loss = tf.reduce_mean( + tf.square(query_values - interpolator)) + grad = gt.gradient(loss, [train_points]) + grad, _ = tf.clip_by_global_norm(grad, 1.0) + opt_func = optimizer.apply_gradients(zip(grad, [train_points])) + + for epoch in range(100): + train_step() if __name__ == '__main__': diff --git a/tensorflow_addons/image/sparse_image_warp_test.py b/tensorflow_addons/image/sparse_image_warp_test.py index 7fdf2aad6e..4c2659c7d5 100644 --- a/tensorflow_addons/image/sparse_image_warp_test.py +++ b/tensorflow_addons/image/sparse_image_warp_test.py @@ -19,14 +19,13 @@ import numpy as np import tensorflow as tf -from tensorflow_addons.image import sparse_image_warp +import tensorflow.compat.v1 as tf1 # TODO: port TF1 test files? from tensorflow_addons.image.sparse_image_warp import _get_boundary_locations from tensorflow_addons.image.sparse_image_warp import _get_grid_locations -from tensorflow_addons.utils import test_utils +from tensorflow_addons.image import sparse_image_warp from tensorflow_addons.utils.resource_loader import get_path_to_datafile -@test_utils.run_all_in_graph_and_eager_modes class SparseImageWarpTest(tf.test.TestCase): def setUp(self): np.random.seed(0) @@ -83,7 +82,7 @@ def assertZeroShift(self, order, regularization, num_boundary_points): image = np.random.uniform( size=[batch_size, image_height, image_width, channels]) - input_image = tf.constant(np.float32(image)) + input_image_op = tf.constant(np.float32(image)) control_point_locations = [[1., 1.], [2., 2.], [2., 1.]] control_point_locations = tf.constant( @@ -94,16 +93,19 @@ def assertZeroShift(self, order, regularization, num_boundary_points): control_point_displacements = tf.constant( np.float32(control_point_displacements)) - (warped_image, flow) = sparse_image_warp( - input_image, + (warped_image_op, flow_field) = sparse_image_warp( + input_image_op, control_point_locations, control_point_locations + control_point_displacements, interpolation_order=order, regularization_weight=regularization, num_boundary_points=num_boundary_points) - warped_image, input_image = self.evaluate([warped_image, input_image]) - self.assertAllClose(warped_image, input_image) + with self.cached_session() as sess: + warped_image, input_image, _ = sess.run( + [warped_image_op, input_image_op, flow_field]) + + self.assertAllClose(warped_image, input_image) def testMoveSinglePixel(self): """Run assertMoveSinglePixel for various hyperparameters and data @@ -123,7 +125,7 @@ def assertMoveSinglePixel(self, order, num_boundary_points, type_to_use): image = np.zeros([batch_size, image_height, image_width, channels]) image[:, 3, 3, :] = 1.0 - input_image = tf.constant(image, dtype=type_to_use) + input_image_op = tf.constant(image, dtype=type_to_use) # Place a control point at the one white pixel. control_point_locations = [[3., 3.]] @@ -136,110 +138,116 @@ def assertMoveSinglePixel(self, order, num_boundary_points, type_to_use): np.float32(np.expand_dims(control_point_displacements, 0)), dtype=type_to_use) - (warped_image, flow) = sparse_image_warp( - input_image, + (warped_image_op, flow_field) = sparse_image_warp( + input_image_op, control_point_locations, control_point_locations + control_point_displacements, interpolation_order=order, num_boundary_points=num_boundary_points) - warped_image, input_image, flow = self.evaluate( - [warped_image, input_image, flow]) - # Check that it moved the pixel correctly. - self.assertAllClose( - warped_image[0, 4, 5, :], - input_image[0, 4, 4, :], - atol=1e-5, - rtol=1e-5) - - # Test that there is no flow at the corners. - for i in (0, image_height - 1): - for j in (0, image_width - 1): - self.assertAllClose( - flow[0, i, j, :], np.zeros([2]), atol=1e-5, rtol=1e-5) + with self.cached_session() as sess: + warped_image, input_image, flow = sess.run( + [warped_image_op, input_image_op, flow_field]) + # Check that it moved the pixel correctly. + self.assertAllClose( + warped_image[0, 4, 5, :], + input_image[0, 4, 4, :], + atol=1e-5, + rtol=1e-5) + + # Test that there is no flow at the corners. + for i in (0, image_height - 1): + for j in (0, image_width - 1): + self.assertAllClose( + flow[0, i, j, :], np.zeros([2]), atol=1e-5, rtol=1e-5) - def load_image(self, image_file): - image = tf.image.decode_png( + def load_image(self, image_file, sess): + image_op = tf.image.decode_png( tf.io.read_file(image_file), dtype=tf.dtypes.uint8, channels=4)[:, :, 0:3] - return self.evaluate(image) + return sess.run(image_op) def testSmileyFace(self): """Check warping accuracy by comparing to hardcoded warped images.""" input_file = get_path_to_datafile( "image/test_data/Yellow_Smiley_Face.png") - input_image = self.load_image(input_file) + with self.cached_session() as sess: + input_image = self.load_image(input_file, sess) control_points = np.asarray([[64, 59], [180 - 64, 59], [39, 111], [180 - 39, 111], [90, 143], [58, 134], [180 - 58, 134]]) # pyformat: disable control_point_displacements = np.asarray([[-10.5, 10.5], [10.5, 10.5], [0, 0], [0, 0], [0, -10], [-20, 10.25], [10, 10.75]]) - control_points = tf.constant( + control_points_op = tf.constant( np.expand_dims(np.float32(control_points[:, [1, 0]]), 0)) - control_point_displacements = tf.constant( + control_point_displacements_op = tf.constant( np.expand_dims( np.float32(control_point_displacements[:, [1, 0]]), 0)) float_image = np.expand_dims(np.float32(input_image) / 255, 0) - input_image = tf.constant(float_image) + input_image_op = tf.constant(float_image) for interpolation_order in (1, 2, 3): for num_boundary_points in (0, 1, 4): - warped_image, _ = sparse_image_warp( - input_image, - control_points, - control_points + control_point_displacements, + warp_op, _ = sparse_image_warp( + input_image_op, + control_points_op, + control_points_op + control_point_displacements_op, interpolation_order=interpolation_order, num_boundary_points=num_boundary_points) - - warped_image = self.evaluate(warped_image) - out_image = np.uint8(warped_image[0, :, :, :] * 255) - target_file = get_path_to_datafile( - "image/test_data/Yellow_Smiley_Face_Warp-interp" + - "-{}-clamp-{}.png".format(interpolation_order, - num_boundary_points)) - - target_image = self.load_image(target_file) - - # Check that the target_image and out_image difference is no - # bigger than 2 (on a scale of 0-255). Due to differences in - # floating point computation on different devices, the float - # output in warped_image may get rounded to a different int - # than that in the saved png file loaded into target_image. - self.assertAllClose(target_image, out_image, atol=2, rtol=1e-3) + with self.cached_session() as sess: + warped_image = sess.run(warp_op) + out_image = np.uint8(warped_image[0, :, :, :] * 255) + target_file = get_path_to_datafile( + "image/test_data/Yellow_Smiley_Face_Warp-interp" + + "-{}-clamp-{}.png".format(interpolation_order, + num_boundary_points)) + + target_image = self.load_image(target_file, sess) + + # Check that the target_image and out_image difference is no + # bigger than 2 (on a scale of 0-255). Due to differences in + # floating point computation on different devices, the float + # output in warped_image may get rounded to a different int + # than that in the saved png file loaded into target_image. + self.assertAllClose( + target_image, out_image, atol=2, rtol=1e-3) def testThatBackpropRuns(self): """Run optimization to ensure that gradients can be computed.""" + self.skipTest("TODO: port to tf2.0 / eager") batch_size = 1 image_height = 9 image_width = 12 image = tf.Variable( - np.random.uniform(size=[batch_size, image_height, image_width, 3]), - dtype=tf.float32) + np.float32( + np.random.uniform( + size=[batch_size, image_height, image_width, 3]))) control_point_locations = [[3., 3.]] control_point_locations = tf.constant( np.float32(np.expand_dims(control_point_locations, 0))) control_point_displacements = [[0.25, -0.5]] control_point_displacements = tf.constant( np.float32(np.expand_dims(control_point_displacements, 0))) - - def loss_fn(): - warped_image, _ = sparse_image_warp( - image, - control_point_locations, - control_point_locations + control_point_displacements, - num_boundary_points=3) - loss = tf.reduce_mean(tf.abs(warped_image - image)) - return loss - - optimizer = tf.keras.optimizers.SGD( - learning_rate=0.001, momentum=0.9, clipnorm=1.0) - opt_op = optimizer.minimize(loss_fn, [image]) - - self.evaluate(tf.compat.v1.global_variables_initializer()) - for _ in range(5): - self.evaluate(opt_op) + warped_image, _ = sparse_image_warp( + image, + control_point_locations, + control_point_locations + control_point_displacements, + num_boundary_points=3) + + loss = tf.reduce_mean(tf.abs(warped_image - image)) + optimizer = tf1.train.MomentumOptimizer(0.001, 0.9) + grad = tf.gradients(loss, [image]) + grad, _ = tf.clip_by_global_norm(grad, 1.0) + opt_func = optimizer.apply_gradients(zip(grad, [image])) + init_op = tf1.variables.global_variables_initializer( + ) # TODO: fix TF1 ref. + + with self.cached_session() as sess: + sess.run(init_op) + for _ in range(5): + sess.run([loss, opt_func]) if __name__ == "__main__": diff --git a/tensorflow_addons/layers/BUILD b/tensorflow_addons/layers/BUILD index 01f475a2fb..59aeb562b5 100644 --- a/tensorflow_addons/layers/BUILD +++ b/tensorflow_addons/layers/BUILD @@ -6,7 +6,6 @@ py_library( name = "layers", srcs = [ "__init__.py", - "gelu.py", "maxout.py", "normalizations.py", "optical_flow.py", @@ -24,19 +23,6 @@ py_library( ], ) -py_test( - name = "gelu_test", - size = "small", - srcs = [ - "gelu_test.py", - ], - main = "gelu_test.py", - srcs_version = "PY2AND3", - deps = [ - ":layers", - ], -) - py_test( name = "layers_wrappers_test", size = "small", diff --git a/tensorflow_addons/layers/README.md b/tensorflow_addons/layers/README.md index ab28337966..4e4e4b48dc 100644 --- a/tensorflow_addons/layers/README.md +++ b/tensorflow_addons/layers/README.md @@ -3,7 +3,6 @@ ## Maintainers | Submodule | Maintainers | Contact Info | |:---------- |:----------- |:------------- | -| gelu | @AakashKumarNain | aakashnain@outlook.com | | maxout | | | | normalizations | @smokrow | moritz.kroeger@tu-dortmund.de | | opticalflow | | | @@ -14,7 +13,6 @@ ## Components | Submodule | Layer | Reference | |:---------- |:----------- |:------------- | -| gelu | GeLU | https://arxiv.org/abs/1606.08415 | | maxout | Maxout | https://arxiv.org/abs/1302.4389 | | normalizations | GroupNormalization | https://arxiv.org/abs/1803.08494 | | normalizations | InstanceNormalization | https://arxiv.org/abs/1607.08022 | diff --git a/tensorflow_addons/layers/__init__.py b/tensorflow_addons/layers/__init__.py index d527e16362..382f2aa80e 100644 --- a/tensorflow_addons/layers/__init__.py +++ b/tensorflow_addons/layers/__init__.py @@ -18,11 +18,10 @@ from __future__ import division from __future__ import print_function -from tensorflow_addons.layers.gelu import GeLU from tensorflow_addons.layers.maxout import Maxout from tensorflow_addons.layers.normalizations import GroupNormalization from tensorflow_addons.layers.normalizations import InstanceNormalization from tensorflow_addons.layers.optical_flow import CorrelationCost from tensorflow_addons.layers.poincare import PoincareNormalize from tensorflow_addons.layers.sparsemax import Sparsemax -from tensorflow_addons.layers.wrappers import WeightNormalization \ No newline at end of file +from tensorflow_addons.layers.wrappers import WeightNormalization diff --git a/tensorflow_addons/layers/gelu.py b/tensorflow_addons/layers/gelu.py deleted file mode 100644 index 159e00f729..0000000000 --- a/tensorflow_addons/layers/gelu.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright 2019 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Implements GeLU activation.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import tensorflow as tf -from tensorflow_addons.utils import keras_utils -from tensorflow_addons.activations import gelu - - -@keras_utils.register_keras_custom_object -class GeLU(tf.keras.layers.Layer): - """Gaussian Error Linear Unit. - - A smoother version of ReLU generally used - in the BERT or BERT architecture based models. - Original paper: https://arxiv.org/abs/1606.08415 - - Input shape: - Arbitrary. Use the keyword argument `input_shape` - (tuple of integers, does not include the samples axis) - when using this layer as the first layer in a model. - - Output shape: - Same shape as the input. - """ - - def __init__(self, approximate=True, **kwargs): - super(GeLU, self).__init__(**kwargs) - self.approximate = approximate - self.supports_masking = True - - def call(self, inputs): - return gelu(inputs, approximate=self.approximate) - - def get_config(self): - config = {'approximate': self.approximate} - base_config = super(GeLU, self).get_config() - return dict(list(base_config.items()) + list(config.items())) - - def compute_output_shape(self, input_shape): - return input_shape diff --git a/tensorflow_addons/layers/gelu_test.py b/tensorflow_addons/layers/gelu_test.py deleted file mode 100644 index 99331fb44e..0000000000 --- a/tensorflow_addons/layers/gelu_test.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2019 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""Tests for GeLU activation.""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import numpy as np -import tensorflow as tf -from absl.testing import parameterized -from tensorflow_addons.layers.gelu import GeLU -from tensorflow_addons.utils import test_utils - - -@parameterized.parameters([np.float16, np.float32, np.float64]) -@test_utils.run_all_in_graph_and_eager_modes -class TestGeLU(tf.test.TestCase): - def test_random(self, dtype): - x = np.array([[0.5, 1.2, -0.3]]).astype(dtype) - val = np.array([[0.345714, 1.0617027, -0.11462909]]).astype(dtype) - test_utils.layer_test( - GeLU, kwargs={'dtype': dtype}, input_data=x, expected_output=val) - - -if __name__ == '__main__': - tf.test.main() diff --git a/tensorflow_addons/layers/optical_flow_test.py b/tensorflow_addons/layers/optical_flow_test.py index 7dedd49f50..060f572c5f 100644 --- a/tensorflow_addons/layers/optical_flow_test.py +++ b/tensorflow_addons/layers/optical_flow_test.py @@ -159,7 +159,7 @@ def _keras(self, data_format): x = [input_a, input_b] y = layer(x) - model = tf.keras.models.Model(x, y) + model = tf.python.keras.models.Model(x, y) actual_output = model.predict([val_a, val_b]) expected_output_type = 'float32' diff --git a/tensorflow_addons/layers/sparsemax_test.py b/tensorflow_addons/layers/sparsemax_test.py index 7c60fab575..4f7367b430 100644 --- a/tensorflow_addons/layers/sparsemax_test.py +++ b/tensorflow_addons/layers/sparsemax_test.py @@ -20,7 +20,6 @@ import numpy as np import tensorflow as tf -from absl.testing import parameterized from tensorflow_addons.layers import Sparsemax from tensorflow_addons.utils import test_utils @@ -51,10 +50,10 @@ def _np_sparsemax(z): return np.maximum(0, z - tau_z) -@parameterized.parameters([np.float32, np.float64]) +@test_utils.run_all_with_types(['float32', 'float64']) @test_utils.run_all_in_graph_and_eager_modes class SparsemaxTest(tf.test.TestCase): - def test_sparsemax_layer_against_numpy(self, dtype): + def test_sparsemax_layer_against_numpy(self, dtype=None): """check sparsemax kernel against numpy.""" random = np.random.RandomState(1) @@ -62,7 +61,6 @@ def test_sparsemax_layer_against_numpy(self, dtype): test_utils.layer_test( Sparsemax, - kwargs={'dtype': dtype}, input_data=z, expected_output=_np_sparsemax(z).astype(dtype)) diff --git a/tensorflow_addons/layers/wrappers.py b/tensorflow_addons/layers/wrappers.py index a5df48664e..084532e542 100644 --- a/tensorflow_addons/layers/wrappers.py +++ b/tensorflow_addons/layers/wrappers.py @@ -58,6 +58,7 @@ class WeightNormalization(tf.keras.layers.Wrapper): def __init__(self, layer, data_init=True, **kwargs): super(WeightNormalization, self).__init__(layer, **kwargs) self.data_init = data_init + self._initialized = False self._track_trackable(layer, name='layer') def build(self, input_shape): @@ -68,67 +69,48 @@ def build(self, input_shape): if not self.layer.built: self.layer.build(input_shape) - if not hasattr(self.layer, 'kernel'): - raise ValueError('`WeightNormalization` must wrap a layer that' - ' contains a `kernel` for weights') - - # The kernel's filter or unit dimension is -1 - self.layer_depth = int(self.layer.kernel.shape[-1]) - self.kernel_norm_axes = list(range(self.layer.kernel.shape.rank - 1)) - - self.g = self.add_variable( - name='g', - shape=(self.layer_depth,), - initializer='ones', - dtype=self.layer.kernel.dtype, - trainable=True) - self.v = self.layer.kernel - - self._initialized = self.add_variable( - name='initialized', - shape=None, - initializer='zeros', - dtype=tf.dtypes.bool, - trainable=False) + if not hasattr(self.layer, 'kernel'): + raise ValueError('`WeightNormalization` must wrap a layer that' + ' contains a `kernel` for weights') - if self.data_init: - # Used for data initialization in self._data_dep_init. - layer_config = tf.keras.layers.serialize(self.layer) - layer_config['config']['trainable'] = False - self._naked_clone_layer = tf.keras.layers.deserialize(layer_config) - self._naked_clone_layer.build(input_shape) - self._naked_clone_layer.set_weights(self.layer.get_weights()) - self._naked_clone_layer.activation = None + # The kernel's filter or unit dimension is -1 + self.layer_depth = int(self.layer.kernel.shape[-1]) + self.kernel_norm_axes = list( + range(self.layer.kernel.shape.rank - 1)) + + self.v = self.layer.kernel + self.g = self.add_variable( + name="g", + shape=(self.layer_depth,), + initializer=tf.keras.initializers.get('ones'), + dtype=self.layer.kernel.dtype, + trainable=True) - self.built = True + super(WeightNormalization, self).build() + @tf.function def call(self, inputs): """Call `Layer`""" + if not self._initialized: + self._initialize_weights(inputs) - def _do_nothing(): - return tf.identity(self.g) + self._compute_weights() # Recompute weights for each forward pass + output = self.layer(inputs) + return output - def _update_weights(): - # Ensure we read `self.g` after _update_weights. - with tf.control_dependencies(self._initialize_weights(inputs)): - return tf.identity(self.g) + def compute_output_shape(self, input_shape): + return tf.TensorShape( + self.layer.compute_output_shape(input_shape).as_list()) - g = tf.cond(self._initialized, _do_nothing, _update_weights) + def _compute_weights(self): + """Generate normalized weights. + This method will update the value of self.layer.kernel with the + normalized value, so that the layer is ready for call(). + """ with tf.name_scope('compute_weights'): - # Replace kernel by normalized weight variable. self.layer.kernel = tf.nn.l2_normalize( - self.v, axis=self.kernel_norm_axes) * g - - # Ensure we calculate result after updating kernel. - update_kernel = tf.identity(self.layer.kernel) - with tf.control_dependencies([update_kernel]): - outputs = self.layer(inputs) - return outputs - - def compute_output_shape(self, input_shape): - return tf.TensorShape( - self.layer.compute_output_shape(input_shape).as_list()) + self.v, axis=self.kernel_norm_axes) * self.g def _initialize_weights(self, inputs): """Initialize weight g. @@ -136,43 +118,36 @@ def _initialize_weights(self, inputs): The initial value of g could either from the initial value in v, or by the input value if self.data_init is True. """ - with tf.control_dependencies([ - tf.debugging.assert_equal( # pylint: disable=bad-continuation - self._initialized, - False, - message='The layer has been initialized.') - ]): - if self.data_init: - assign_tensors = self._data_dep_init(inputs) - else: - assign_tensors = self._init_norm() - assign_tensors.append(self._initialized.assign(True)) - return assign_tensors + if self.data_init: + self._data_dep_init(inputs) + else: + self._init_norm() + self._initialized = True def _init_norm(self): """Set the weight g with the norm of the weight vector.""" with tf.name_scope('init_norm'): - v_flat = tf.reshape(self.v, [-1, self.layer_depth]) - v_norm = tf.linalg.norm(v_flat, axis=0) - g_tensor = self.g.assign(tf.reshape(v_norm, (self.layer_depth,))) - return [g_tensor] + flat = tf.reshape(self.v, [-1, self.layer_depth]) + self.g.assign( + tf.reshape(tf.linalg.norm(flat, axis=0), (self.layer_depth,))) def _data_dep_init(self, inputs): """Data dependent initialization.""" + with tf.name_scope('data_dep_init'): # Generate data dependent init values - x_init = self._naked_clone_layer(inputs) + existing_activation = self.layer.activation + self.layer.activation = None + x_init = self.layer(inputs) data_norm_axes = list(range(x_init.shape.rank - 1)) m_init, v_init = tf.nn.moments(x_init, data_norm_axes) scale_init = 1. / tf.math.sqrt(v_init + 1e-10) - # Assign data dependent init values - g_tensor = self.g.assign(self.g * scale_init) - if hasattr(self.layer, 'bias'): - bias_tensor = self.layer.bias.assign(-m_init * scale_init) - return [g_tensor, bias_tensor] - else: - return [g_tensor] + # Assign data dependent init values + self.g = self.g * scale_init + if hasattr(self.layer, 'bias'): + self.layer.bias = -m_init * scale_init + self.layer.activation = existing_activation def get_config(self): config = {'data_init': self.data_init} diff --git a/tensorflow_addons/layers/wrappers_test.py b/tensorflow_addons/layers/wrappers_test.py index b4bdb9c494..9d83bbec50 100644 --- a/tensorflow_addons/layers/wrappers_test.py +++ b/tensorflow_addons/layers/wrappers_test.py @@ -26,52 +26,82 @@ @test_utils.run_all_in_graph_and_eager_modes class WeightNormalizationTest(tf.test.TestCase): - def test_weightnorm(self): - test_utils.layer_test( - wrappers.WeightNormalization, - kwargs={ - 'layer': tf.keras.layers.Conv2D(5, (2, 2)), - }, - input_shape=(2, 4, 4, 3)) - - def _check_data_init(self, data_init, input_data, expected_output): - layer = tf.keras.layers.Dense( - input_data.shape[-1], - activation=None, - kernel_initializer='identity', - bias_initializer='zeros') - test_utils.layer_test( - wrappers.WeightNormalization, - kwargs={ - 'layer': layer, - 'data_init': data_init, - }, - input_data=input_data, - expected_output=expected_output) - - def test_weightnorm_with_data_init_is_false(self): - input_data = np.array([[[-4, -4], [4, 4]]], dtype=np.float32) - self._check_data_init( - data_init=False, input_data=input_data, expected_output=input_data) - - def test_weightnorm_with_data_init_is_true(self): - input_data = np.array([[[-4, -4], [4, 4]]], dtype=np.float32) - self._check_data_init( - data_init=True, - input_data=input_data, - expected_output=input_data / 4) - - def test_weightnorm_non_layer(self): + def test_weightnorm_dense_train(self): + model = tf.keras.models.Sequential() + model.add( + wrappers.WeightNormalization( + tf.keras.layers.Dense(2), input_shape=(3, 4))) + model.compile( + optimizer=tf.keras.optimizers.RMSprop(learning_rate=0.001), + loss='mse') + model.fit( + np.random.random((10, 3, 4)), + np.random.random((10, 3, 2)), + epochs=3, + batch_size=10) + self.assertTrue(hasattr(model.layers[0], 'g')) + + def test_weightnorm_dense_train_notinit(self): + model = tf.keras.models.Sequential() + model.add( + wrappers.WeightNormalization( + tf.keras.layers.Dense(2), input_shape=(3, 4), data_init=False)) + + model.compile( + optimizer=tf.keras.optimizers.RMSprop(learning_rate=0.001), + loss='mse') + model.fit( + np.random.random((10, 3, 4)), + np.random.random((10, 3, 2)), + epochs=3, + batch_size=10) + self.assertTrue(hasattr(model.layers[0], 'g')) + + def test_weightnorm_conv2d(self): + model = tf.keras.models.Sequential() + model.add( + wrappers.WeightNormalization( + tf.keras.layers.Conv2D(5, (2, 2), padding='same'), + input_shape=(4, 4, 3))) + + model.add(tf.keras.layers.Activation('relu')) + model.compile( + optimizer=tf.keras.optimizers.RMSprop(learning_rate=0.001), + loss='mse') + model.fit( + np.random.random((2, 4, 4, 3)), + np.random.random((2, 4, 4, 5)), + epochs=3, + batch_size=10) + + self.assertTrue(hasattr(model.layers[0], 'g')) + + def test_weightnorm_applylayer(self): + images = tf.random.uniform((2, 4, 4, 3)) + wn_wrapper = wrappers.WeightNormalization( + tf.keras.layers.Conv2D(32, [2, 2]), input_shape=(4, 4, 3)) + wn_wrapper.apply(images) + self.assertTrue(hasattr(wn_wrapper, 'g')) + + def test_weightnorm_nonlayer(self): images = tf.random.uniform((2, 4, 43)) with self.assertRaises(AssertionError): wrappers.WeightNormalization(images) - def test_weightnorm_non_kernel_layer(self): - images = tf.random.uniform((2, 2, 2)) - with self.assertRaisesRegexp(ValueError, 'contains a `kernel`'): - non_kernel_layer = tf.keras.layers.MaxPooling2D(2, 2) - wn_wrapper = wrappers.WeightNormalization(non_kernel_layer) - wn_wrapper(images) + def test_weightnorm_nokernel(self): + with self.assertRaises(ValueError): + wrappers.WeightNormalization(tf.keras.layers.MaxPooling2D( + 2, 2)).build((2, 2)) + + def test_weightnorm_keras(self): + input_data = np.random.random((10, 3, 4)).astype(np.float32) + test_utils.layer_test( + wrappers.WeightNormalization, + kwargs={ + 'layer': tf.keras.layers.Dense(2), + 'input_shape': (3, 4) + }, + input_data=input_data) if __name__ == "__main__": diff --git a/tensorflow_addons/losses/README.md b/tensorflow_addons/losses/README.md index c1ed11c35d..e0951d41c1 100644 --- a/tensorflow_addons/losses/README.md +++ b/tensorflow_addons/losses/README.md @@ -17,7 +17,6 @@ | focal_loss | SigmoidFocalCrossEntropy | https://arxiv.org/abs/1708.02002 | | lifted | LiftedStructLoss | https://arxiv.org/abs/1511.06452 | | npairs | NpairsLoss | http://www.nec-labs.com/uploads/images/Department-Images/MediaAnalytics/papers/nips16_npairmetriclearning.pdf | -| npairs | NpairsMultilabelLoss | http://www.nec-labs.com/uploads/images/Department-Images/MediaAnalytics/papers/nips16_npairmetriclearning.pdf | | sparsemax_loss | SparsemaxLoss | https://arxiv.org/abs/1602.02068 | | triplet | TripletSemiHardLoss | https://arxiv.org/abs/1503.03832 | diff --git a/tensorflow_addons/losses/__init__.py b/tensorflow_addons/losses/__init__.py index ff8e5094fa..ce94d7b91e 100644 --- a/tensorflow_addons/losses/__init__.py +++ b/tensorflow_addons/losses/__init__.py @@ -21,6 +21,6 @@ from tensorflow_addons.losses.contrastive import contrastive_loss, ContrastiveLoss from tensorflow_addons.losses.focal_loss import sigmoid_focal_crossentropy, SigmoidFocalCrossEntropy from tensorflow_addons.losses.lifted import lifted_struct_loss, LiftedStructLoss -from tensorflow_addons.losses.npairs import npairs_loss, NpairsLoss, npairs_multilabel_loss, NpairsMultilabelLoss +from tensorflow_addons.losses.npairs import npairs_loss, NpairsLoss from tensorflow_addons.losses.sparsemax_loss import sparsemax_loss, SparsemaxLoss from tensorflow_addons.losses.triplet import triplet_semihard_loss, TripletSemiHardLoss diff --git a/tensorflow_addons/losses/npairs.py b/tensorflow_addons/losses/npairs.py index 319fba0fc9..adba81566e 100644 --- a/tensorflow_addons/losses/npairs.py +++ b/tensorflow_addons/losses/npairs.py @@ -64,71 +64,6 @@ def npairs_loss(y_true, y_pred): return tf.math.reduce_mean(loss) -@keras_utils.register_keras_custom_object -@tf.function -def npairs_multilabel_loss(y_true, y_pred): - """Computes the npairs loss between multilabel data `y_true` and `y_pred`. - - Npairs loss expects paired data where a pair is composed of samples from - the same labels and each pairs in the minibatch have different labels. - The loss takes each row of the pair-wise similarity matrix, `y_pred`, - as logits and the remapped multi-class labels, `y_true`, as labels. - - To deal with multilabel inputs, the count of label intersection - is computed as follows: - - ``` - L_{i,j} = | set_of_labels_for(i) \cap set_of_labels_for(j) | - ``` - - Each row of the count based label matrix is further normalized so that - each row sums to one. - - `y_true` should be a binary indicator for classes. - That is, if `y_true[i, j] = 1`, then `i`th sample is in `j`th class; - if `y_true[i, j] = 0`, then `i`th sample is not in `j`th class. - - The similarity matrix `y_pred` between two embedding matrices `a` and `b` - with shape `[batch_size, hidden_size]` can be computed as follows: - - ```python - # y_pred = a * b^T - y_pred = tf.matmul(a, b, transpose_a=False, transpose_b=True) - ``` - - See: http://www.nec-labs.com/uploads/images/Department-Images/MediaAnalytics/papers/nips16_npairmetriclearning.pdf - - Args: - y_true: Either 2-D integer `Tensor` with shape - `[batch_size, num_classes]`, or `SparseTensor` with dense shape - `[batch_size, num_classes]`. If `y_true` is a `SparseTensor`, then - it will be converted to `Tensor` via `tf.sparse.to_dense` first. - - y_pred: 2-D float `Tensor` with shape `[batch_size, batch_size]` of - similarity matrix between embedding matrices. - - Returns: - npairs_multilabel_loss: float scalar. - """ - y_pred = tf.convert_to_tensor(y_pred) - y_true = tf.cast(y_true, y_pred.dtype) - - # Convert to dense tensor if `y_true` is a `SparseTensor` - if isinstance(y_true, tf.SparseTensor): - y_true = tf.sparse.to_dense(y_true) - - # Enable efficient multiplication because y_true contains lots of zeros - # https://www.tensorflow.org/api_docs/python/tf/linalg/matmul - y_true = tf.linalg.matmul( - y_true, y_true, transpose_b=True, a_is_sparse=True, b_is_sparse=True) - y_true /= tf.math.reduce_sum(y_true, 1, keepdims=True) - - loss = tf.nn.softmax_cross_entropy_with_logits( - logits=y_pred, labels=y_true) - - return tf.math.reduce_mean(loss) - - @keras_utils.register_keras_custom_object class NpairsLoss(tf.keras.losses.Loss): """Computes the npairs loss between `y_true` and `y_pred`. @@ -158,48 +93,3 @@ def __init__(self, name="npairs_loss"): def call(self, y_true, y_pred): return npairs_loss(y_true, y_pred) - - -@keras_utils.register_keras_custom_object -class NpairsMultilabelLoss(tf.keras.losses.Loss): - """Computes the npairs loss between multilabel data `y_true` and `y_pred`. - - Npairs loss expects paired data where a pair is composed of samples from - the same labels and each pairs in the minibatch have different labels. - The loss takes each row of the pair-wise similarity matrix, `y_pred`, - as logits and the remapped multi-class labels, `y_true`, as labels. - - To deal with multilabel inputs, the count of label intersection - is computed as follows: - - ``` - L_{i,j} = | set_of_labels_for(i) \cap set_of_labels_for(j) | - ``` - - Each row of the count based label matrix is further normalized so that - each row sums to one. - - `y_true` should be a binary indicator for classes. - That is, if `y_true[i, j] = 1`, then `i`th sample is in `j`th class; - if `y_true[i, j] = 0`, then `i`th sample is not in `j`th class. - - The similarity matrix `y_pred` between two embedding matrices `a` and `b` - with shape `[batch_size, hidden_size]` can be computed as follows: - - ```python - # y_pred = a * b^T - y_pred = tf.matmul(a, b, transpose_a=False, transpose_b=True) - ``` - - See: http://www.nec-labs.com/uploads/images/Department-Images/MediaAnalytics/papers/nips16_npairmetriclearning.pdf - - Args: - name: (Optional) name for the loss. - """ - - def __init__(self, name="npairs_multilabel_loss"): - super(NpairsMultilabelLoss, self).__init__( - reduction=tf.keras.losses.Reduction.NONE, name=name) - - def call(self, y_true, y_pred): - return npairs_multilabel_loss(y_true, y_pred) diff --git a/tensorflow_addons/losses/npairs_test.py b/tensorflow_addons/losses/npairs_test.py index 043c7d983d..0f0ecc12b3 100644 --- a/tensorflow_addons/losses/npairs_test.py +++ b/tensorflow_addons/losses/npairs_test.py @@ -54,84 +54,5 @@ def test_unweighted(self): self.assertAllClose(loss, 0.253856) -@test_utils.run_all_in_graph_and_eager_modes -class NpairsMultilabelLossTest(tf.test.TestCase): - def config(self): - nml_obj = npairs.NpairsMultilabelLoss(name="nml") - self.assertEqual(nml_obj.name, "nml") - self.assertEqual(nml_obj.reduction, tf.keras.losses.Reduction.NONE) - - def test_single_label(self): - """Test single label, which is the same with `NpairsLoss`.""" - nml_obj = npairs.NpairsMultilabelLoss() - # batch size = 4, hidden size = 2 - y_true = tf.constant( - [[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]], - dtype=tf.int64) - # features of anchors - f = tf.constant([[1., 1.], [1., -1.], [-1., 1.], [-1., -1.]], - dtype=tf.float32) - # features of positive samples - fp = tf.constant([[1., 1.], [1., -1.], [-1., 1.], [-1., -1.]], - dtype=tf.float32) - # similarity matrix - y_pred = tf.matmul(f, fp, transpose_a=False, transpose_b=True) - loss = nml_obj(y_true, y_pred) - - # Loss = 1/4 * \sum_i log(1 + \sum_{j != i} exp(f_i*fp_j^T-f_i*f_i^T)) - # Compute loss for i = 0, 1, 2, 3 without multiplier 1/4 - # i = 0 => log(1 + sum([exp(-2), exp(-2), exp(-4)])) = 0.253846 - # i = 1 => log(1 + sum([exp(-2), exp(-4), exp(-2)])) = 0.253846 - # i = 2 => log(1 + sum([exp(-2), exp(-4), exp(-2)])) = 0.253846 - # i = 3 => log(1 + sum([exp(-4), exp(-2), exp(-2)])) = 0.253846 - # Loss = (0.253856 + 0.253856 + 0.253856 + 0.253856) / 4 = 0.253856 - - self.assertAllClose(loss, 0.253856) - - # Test sparse tensor - y_true = tf.sparse.from_dense(y_true) - loss = nml_obj(y_true, y_pred) - self.assertAllClose(loss, 0.253856) - - def test_multilabel(self): - nml_obj = npairs.NpairsMultilabelLoss() - # batch size = 4, hidden size = 2 - y_true = tf.constant( - [[1, 1, 0, 0], [0, 1, 1, 0], [0, 0, 1, 1], [0, 0, 0, 1]], - dtype=tf.int64) - # features of anchors - f = tf.constant([[1., 1.], [1., -1.], [-1., 1.], [-1., -1.]], - dtype=tf.float32) - # features of positive samples - fp = tf.constant([[1., 1.], [1., -1.], [-1., 1.], [-1., -1.]], - dtype=tf.float32) - # similarity matrix - y_pred = tf.matmul(f, fp, transpose_a=False, transpose_b=True) - loss = nml_obj(y_true, y_pred) - - # Loss = \sum_i log(1 + \sum_{j != i} exp(f_i*fp_j^T-f_i*f_i^T)) - # Because of multilabel, the label matrix is normalized so that each - # row sums to one. That's why the multiplier before log exists. - # Compute loss for i = 0, 1, 2, 3 without multiplier 1/4 - # i = 0 => 2/3 * log(1 + sum([exp(-2), exp(-2), exp(-4)])) + - # 1/3 * log(1 + sum([exp(2) , exp(0) , exp(-2)])) = 0.920522 - # i = 1 => 1/4 * log(1 + sum([exp(2) , exp(-2), exp(0) ])) + - # 1/2 * log(1 + sum([exp(-2), exp(-4), exp(-2)])) + - # 1/4 * log(1 + sum([exp(2) , exp(4) , exp(2) ])) = 1.753856 - # i = 2 => 1/4 * log(1 + sum([exp(2) , exp(4) , exp(2) ])) + - # 1/2 * log(1 + sum([exp(-2), exp(-4), exp(-2)])) + - # 1/4 * log(1 + sum([exp(0) , exp(-2), exp(2) ])) = 1.753856 - # i = 4 => 1/2 * log(1 + sum([exp(-2), exp(0) , exp(2) ])) + - # 1/2 * log(1 + sum([exp(-4), exp(-2), exp(-2)])) = 1.253856 - # Loss = (0.920522 + 1.753856 + 1.753856 + 1.253856) / 4 = 1.420522 - - self.assertAllClose(loss, 1.420522) - - # Test sparse tensor - y_true = tf.sparse.from_dense(y_true) - loss = nml_obj(y_true, y_pred) - self.assertAllClose(loss, 1.420522) - - if __name__ == "__main__": tf.test.main() diff --git a/tensorflow_addons/losses/sparsemax_loss.py b/tensorflow_addons/losses/sparsemax_loss.py index 6d6f0e1364..82e175ad80 100644 --- a/tensorflow_addons/losses/sparsemax_loss.py +++ b/tensorflow_addons/losses/sparsemax_loss.py @@ -23,8 +23,8 @@ from tensorflow_addons.utils import keras_utils -@keras_utils.register_keras_custom_object @tf.function +@keras_utils.register_keras_custom_object def sparsemax_loss(logits, sparsemax, labels, name=None): """Sparsemax loss function [1]. diff --git a/tensorflow_addons/losses/sparsemax_loss_test.py b/tensorflow_addons/losses/sparsemax_loss_test.py index 6788480951..b29f70cb8b 100644 --- a/tensorflow_addons/losses/sparsemax_loss_test.py +++ b/tensorflow_addons/losses/sparsemax_loss_test.py @@ -226,12 +226,6 @@ def test_gradient_against_estimate(self, dtype=None): lambda logits: sparsemax_loss(logits, sparsemax(logits), q), [z]) self.assertAllCloseAccordingToType(jacob_sym, jacob_num) - def test_serialization(self, dtype=None): - ref_fn = sparsemax_loss - config = tf.keras.losses.serialize(ref_fn) - fn = tf.keras.losses.deserialize(config) - self.assertEqual(ref_fn, fn) - if __name__ == '__main__': tf.test.main() diff --git a/tensorflow_addons/metrics/cohens_kappa.py b/tensorflow_addons/metrics/cohens_kappa.py index 40a7680922..477cee1330 100644 --- a/tensorflow_addons/metrics/cohens_kappa.py +++ b/tensorflow_addons/metrics/cohens_kappa.py @@ -41,21 +41,15 @@ class CohenKappa(Metric): ```python actuals = np.array([4, 4, 3, 4, 2, 4, 1, 1], dtype=np.int32) preds = np.array([4, 4, 3, 4, 4, 2, 1, 1], dtype=np.int32) - weights = np.array([1, 1, 2, 5, 10, 2, 3, 3], dtype=np.int32) - - m = tfa.metrics.CohenKappa(num_classes=5) - m.update_state(actuals, preds) - print('Final result: ', m.result().numpy()) # Result: 0.61904764 - - # To use this with weights, sample_weight argument can be used. - m = tfa.metrics.CohenKappa(num_classes=5) - m.update_state(actuals, preds, sample_weight=weights) - print('Final result: ', m.result().numpy()) # Result: 0.37209308 + + m = tf.keras.metrics.CohenKappa(num_classes=5) + m.update_state(actuals, preds, "quadratic") + print('Final result: ', m.result().numpy()) # Result: 0.68932 ``` Usage with tf.keras API: ```python model = keras.models.Model(inputs, outputs) - model.add_metric(tfa.metrics.CohenKappa(num_classes=5)(outputs)) + model.add_metric(tf.keras.metrics.CohenKappa(num_classes=5)(outputs)) model.compile('sgd', loss='mse') ``` @@ -186,4 +180,4 @@ def reset_states(self): for v in self.variables: K.set_value( - v, np.zeros((self.num_classes, self.num_classes), np.int32)) + v, np.zeros((self.num_classes, self.num_classes), np.int32)) \ No newline at end of file diff --git a/tensorflow_addons/metrics/f1_test.py b/tensorflow_addons/metrics/f1_test.py index e11165bb2c..9ea1ccd2e1 100755 --- a/tensorflow_addons/metrics/f1_test.py +++ b/tensorflow_addons/metrics/f1_test.py @@ -21,8 +21,6 @@ import tensorflow as tf from tensorflow_addons.metrics import F1Score from tensorflow_addons.utils import test_utils -from tensorflow.keras import layers -import numpy as np @test_utils.run_all_in_graph_and_eager_modes @@ -108,23 +106,6 @@ def test_f1_random_score_none(self): for avg, res in test_params: self._test_f1(avg, actuals, preds, res) - # Keras model check - def test_keras_model(self): - model = tf.keras.Sequential() - model.add(layers.Dense(64, activation='relu')) - model.add(layers.Dense(64, activation='relu')) - model.add(layers.Dense(1, activation='softmax')) - fb = F1Score(1, 'macro') - model.compile( - optimizer='rmsprop', - loss='categorical_crossentropy', - metrics=['acc', fb]) - # data preparation - data = np.random.random((10, 3)) - labels = np.random.random((10, 1)) - labels = np.where(labels > 0.5, 1, 0) - model.fit(data, labels, epochs=1, batch_size=32, verbose=0) - if __name__ == '__main__': tf.test.main() diff --git a/tensorflow_addons/metrics/f_scores.py b/tensorflow_addons/metrics/f_scores.py index 5c7b283894..c6495528a9 100755 --- a/tensorflow_addons/metrics/f_scores.py +++ b/tensorflow_addons/metrics/f_scores.py @@ -175,9 +175,7 @@ def __init__(self, initializer='zeros', dtype=self.dtype) - # TODO: Add sample_weight support, currently it is - # ignored during calculations. - def update_state(self, y_true, y_pred, sample_weight=None): + def update_state(self, y_true, y_pred): y_true = tf.cast(y_true, tf.int32) y_pred = tf.cast(y_pred, tf.int32) @@ -340,8 +338,6 @@ def __init__(self, num_classes, average, name='f1_score', super(F1Score, self).__init__( num_classes, average, 1.0, name=name, dtype=dtype) - # TODO: Add sample_weight support, currently it is - # ignored during calculations. def get_config(self): base_config = super(F1Score, self).get_config() del base_config["beta"] diff --git a/tensorflow_addons/metrics/fbeta_test.py b/tensorflow_addons/metrics/fbeta_test.py index 69a5b730d1..d035c3de18 100644 --- a/tensorflow_addons/metrics/fbeta_test.py +++ b/tensorflow_addons/metrics/fbeta_test.py @@ -21,8 +21,6 @@ import tensorflow as tf from tensorflow_addons.metrics import FBetaScore from tensorflow_addons.utils import test_utils -from tensorflow.keras import layers -import numpy as np @test_utils.run_all_in_graph_and_eager_modes @@ -114,23 +112,6 @@ def test_fbeta_random_score_none(self): for beta, res in test_params: self._test_fbeta(None, beta, actuals, preds, res) - # Keras model check - def test_keras_model(self): - model = tf.keras.Sequential() - model.add(layers.Dense(64, activation='relu')) - model.add(layers.Dense(64, activation='relu')) - model.add(layers.Dense(1, activation='softmax')) - fb = FBetaScore(1, 'macro') - model.compile( - optimizer='rmsprop', - loss='categorical_crossentropy', - metrics=['acc', fb]) - # data preparation - data = np.random.random((10, 3)) - labels = np.random.random((10, 1)) - labels = np.where(labels > 0.5, 1, 0) - model.fit(data, labels, epochs=1, batch_size=32, verbose=0) - if __name__ == '__main__': tf.test.main() diff --git a/tensorflow_addons/optimizers/BUILD b/tensorflow_addons/optimizers/BUILD old mode 100755 new mode 100644 diff --git a/tensorflow_addons/optimizers/README.md b/tensorflow_addons/optimizers/README.md old mode 100755 new mode 100644 diff --git a/tensorflow_addons/optimizers/__init__.py b/tensorflow_addons/optimizers/__init__.py old mode 100755 new mode 100644 diff --git a/tensorflow_addons/optimizers/lazy_adam_test.py b/tensorflow_addons/optimizers/lazy_adam_test.py index b9ed5f8860..cea6484df5 100644 --- a/tensorflow_addons/optimizers/lazy_adam_test.py +++ b/tensorflow_addons/optimizers/lazy_adam_test.py @@ -322,7 +322,7 @@ def testSlotsUniqueEager(self): opt = lazy_adam.LazyAdam(1.) opt.minimize(lambda: v1 + v2, var_list=[v1, v2]) # There should be iteration, and two unique slot variables for v1 and v2. - self.assertEqual(5, len(opt.variables())) + self.assertEqual(5, len(set(opt.variables()))) self.assertEqual( self.evaluate(opt.variables()[0]), self.evaluate(opt.iterations)) diff --git a/tensorflow_addons/rnn/cell_test.py b/tensorflow_addons/rnn/cell_test.py index 82d09260e8..bfc7146aa0 100644 --- a/tensorflow_addons/rnn/cell_test.py +++ b/tensorflow_addons/rnn/cell_test.py @@ -156,7 +156,7 @@ def test_config(self): cell = rnn_cell.NASCell(10, projection=5, use_bias=True) expected_config = { - "dtype": "float32", + "dtype": None, "name": "nas_cell", "trainable": True, "units": 10, @@ -241,7 +241,7 @@ def test_config(self): cell = rnn_cell.LayerNormLSTMCell(10) expected_config = { - "dtype": "float32", + "dtype": None, "name": "layer_norm_lstm_cell", "trainable": True, "units": 10, diff --git a/tensorflow_addons/seq2seq/attention_wrapper.py b/tensorflow_addons/seq2seq/attention_wrapper.py index 275245fded..aa8b38a6b6 100644 --- a/tensorflow_addons/seq2seq/attention_wrapper.py +++ b/tensorflow_addons/seq2seq/attention_wrapper.py @@ -1538,8 +1538,7 @@ def __init__(self, initial_cell_state=None, name=None, attention_layer=None, - attention_fn=None, - **kwargs): + attention_fn=None): """Construct the `AttentionWrapper`. **NOTE** If you are using the `BeamSearchDecoder` with a cell wrapped @@ -1620,7 +1619,6 @@ def __init__(self, attention_layer) and outputs (attention, alignments, next_attention_state). If provided, the attention_layer_size should be the size of the outputs of attention_fn. - **kwargs: Other keyword arguments for layer creation. Raises: TypeError: `attention_layer_size` is not None and @@ -1631,7 +1629,7 @@ def __init__(self, of `attention_layer_size`; if `attention_layer_size` and `attention_layer` are set simultaneously. """ - super(AttentionWrapper, self).__init__(name=name, **kwargs) + super(AttentionWrapper, self).__init__(name=name) rnn_cell_impl.assert_like_rnncell("cell", cell) if isinstance(attention_mechanism, (list, tuple)): self._is_multi = True diff --git a/tensorflow_addons/seq2seq/attention_wrapper_test.py b/tensorflow_addons/seq2seq/attention_wrapper_test.py index b9adefbe82..5b4e724187 100644 --- a/tensorflow_addons/seq2seq/attention_wrapper_test.py +++ b/tensorflow_addons/seq2seq/attention_wrapper_test.py @@ -28,6 +28,10 @@ from tensorflow_addons.seq2seq import basic_decoder from tensorflow_addons.seq2seq import sampler as sampler_py +# TODO: Find public API alternatives to these +from tensorflow.python import keras +from tensorflow.python.keras import initializers + @test_utils.run_all_in_graph_and_eager_modes class AttentionMechanismTest(tf.test.TestCase, parameterized.TestCase): @@ -127,22 +131,22 @@ def test_passing_memory_from_call(self, attention_cls): def test_save_load_layer(self, attention_cls): vocab = 20 embedding_dim = 6 - inputs = tf.keras.Input(shape=[self.timestep]) - encoder_input = tf.keras.layers.Embedding( + inputs = keras.layers.Input(shape=[self.timestep]) + encoder_input = keras.layers.Embedding( vocab, embedding_dim, mask_zero=True)(inputs) - encoder_output = tf.keras.layers.LSTM( + encoder_output = keras.layers.LSTM( self.memory_size, return_sequences=True)(encoder_input) attention = attention_cls(self.units, encoder_output) - query = tf.keras.Input(shape=[self.units]) - state = tf.keras.Input(shape=[self.timestep]) + query = keras.layers.Input(shape=[self.units]) + state = keras.layers.Input(shape=[self.timestep]) score = attention([query, state]) x = np.random.randint(vocab, size=(self.batch, self.timestep)) x_test = np.random.randint(vocab, size=(self.batch, self.timestep)) y = np.random.randn(self.batch, self.timestep) - model = tf.keras.Model([inputs, query, state], score) + model = keras.models.Model([inputs, query, state], score) # Fall back to v1 style Keras training loop until issue with # using outputs of a layer in another layer's constructor. model.compile("rmsprop", "mse", experimental_run_tf_function=False) @@ -151,7 +155,7 @@ def test_save_load_layer(self, attention_cls): config = model.get_config() weights = model.get_weights() - loaded_model = tf.keras.Model.from_config( + loaded_model = keras.models.Model.from_config( config, custom_objects={attention_cls.__name__: attention_cls}) loaded_model.set_weights(weights) @@ -333,12 +337,11 @@ def _testWithMaybeMultiAttention(self, # Create a memory layer with deterministic initializer to avoid # randomness in the test between graph and eager. if create_query_layer: - create_attention_kwargs["query_layer"] = tf.keras.layers.Dense( + create_attention_kwargs["query_layer"] = keras.layers.Dense( depth, kernel_initializer="ones", use_bias=False) if create_memory_layer: - create_attention_kwargs["memory_layer"] = ( - tf.keras.layers.Dense( - depth, kernel_initializer="ones", use_bias=False)) + create_attention_kwargs["memory_layer"] = keras.layers.Dense( + depth, kernel_initializer="ones", use_bias=False) attention_mechanisms.append( creator( @@ -355,7 +358,7 @@ def _testWithMaybeMultiAttention(self, attention_layer_size = attention_layer_size[0] if attention_layer is not None: attention_layer = attention_layer[0] - cell = tf.keras.layers.LSTMCell( + cell = keras.layers.LSTMCell( cell_depth, recurrent_activation="sigmoid", kernel_initializer="ones", @@ -368,9 +371,8 @@ def _testWithMaybeMultiAttention(self, attention_layer=attention_layer) if cell._attention_layers is not None: for layer in cell._attention_layers: - layer.kernel_initializer = ( - tf.compat.v1.keras.initializers.glorot_uniform( - seed=1337)) + layer.kernel_initializer = initializers.glorot_uniform( + seed=1337) sampler = sampler_py.TrainingSampler() my_decoder = basic_decoder.BasicDecoder(cell=cell, sampler=sampler) @@ -474,13 +476,12 @@ def testBahdanauNormalizedDType(self, dtype): memory_sequence_length=self.encoder_sequence_length, normalize=True, dtype=dtype) - cell = tf.keras.layers.LSTMCell( - self.units, recurrent_activation="sigmoid", dtype=dtype) - cell = wrapper.AttentionWrapper(cell, attention_mechanism, dtype=dtype) + cell = keras.layers.LSTMCell( + self.units, recurrent_activation="sigmoid") + cell = wrapper.AttentionWrapper(cell, attention_mechanism) sampler = sampler_py.TrainingSampler() - my_decoder = basic_decoder.BasicDecoder( - cell=cell, sampler=sampler, dtype=dtype) + my_decoder = basic_decoder.BasicDecoder(cell=cell, sampler=sampler) final_outputs, final_state, _ = my_decoder( decoder_inputs, @@ -503,13 +504,12 @@ def testLuongScaledDType(self, dtype): scale=True, dtype=dtype, ) - cell = tf.keras.layers.LSTMCell( - self.units, recurrent_activation="sigmoid", dtype=dtype) - cell = wrapper.AttentionWrapper(cell, attention_mechanism, dtype=dtype) + cell = keras.layers.LSTMCell( + self.units, recurrent_activation="sigmoid") + cell = wrapper.AttentionWrapper(cell, attention_mechanism) sampler = sampler_py.TrainingSampler() - my_decoder = basic_decoder.BasicDecoder( - cell=cell, sampler=sampler, dtype=dtype) + my_decoder = basic_decoder.BasicDecoder(cell=cell, sampler=sampler) final_outputs, final_state, _ = my_decoder( decoder_inputs, diff --git a/tensorflow_addons/text/crf.py b/tensorflow_addons/text/crf.py index b20b0476ae..d8d97bf216 100644 --- a/tensorflow_addons/text/crf.py +++ b/tensorflow_addons/text/crf.py @@ -188,9 +188,8 @@ def crf_log_likelihood(inputs, # Get the transition matrix if not provided. if transition_params is None: - initializer = tf.keras.initializers.GlorotUniform() - transition_params = tf.Variable( - initializer([num_tags, num_tags]), "transitions") + transition_params = tf.get_variable("transitions", + [num_tags, num_tags]) sequence_scores = crf_sequence_score(inputs, tag_indices, sequence_lengths, transition_params) diff --git a/tensorflow_addons/text/crf_test.py b/tensorflow_addons/text/crf_test.py index 1c76d0b0ec..84c09b539b 100644 --- a/tensorflow_addons/text/crf_test.py +++ b/tensorflow_addons/text/crf_test.py @@ -229,12 +229,6 @@ def testCrfLogLikelihood(self): tf_total_log_likelihood = self.evaluate(total_log_likelihood) self.assertAllClose(tf_total_log_likelihood, 0.0) - # check if `transition_params = None` raises an error - text.crf_log_likelihood( - inputs=tf.expand_dims(inputs, 0), - tag_indices=tf.expand_dims(tag_indices, 0), - sequence_lengths=tf.expand_dims(sequence_lengths, 0)) - def testViterbiDecode(self): inputs = np.array([[4, 5, -3], [3, -1, 3], [-1, 2, 1], [0, 0, 0]], dtype=np.float32) diff --git a/tensorflow_addons/version.py b/tensorflow_addons/version.py index 3ed34d31fc..32892f5d79 100644 --- a/tensorflow_addons/version.py +++ b/tensorflow_addons/version.py @@ -19,7 +19,7 @@ # We follow Semantic Versioning (https://semver.org/) _MAJOR_VERSION = '0' -_MINOR_VERSION = '6' +_MINOR_VERSION = '5' _PATCH_VERSION = '0' # When building releases, we can update this value on the release branch to diff --git a/tools/ci_build/builds/release_linux.sh b/tools/ci_build/builds/release_linux.sh index 7f2ef964be..e4414926d7 100755 --- a/tools/ci_build/builds/release_linux.sh +++ b/tools/ci_build/builds/release_linux.sh @@ -15,16 +15,13 @@ # ============================================================================== set -e -x -PYTHON_VERSIONS="python2.7 python3.5 python3.6 python3.7" -ln -sf /usr/bin/python3.5 /usr/bin/python3 # Py36 has issues with add-apt +PYTHON_VERSIONS="python2.7 python3.5 python3.6" curl -sSOL https://bootstrap.pypa.io/get-pip.py add-apt-repository -y ppa:deadsnakes/ppa -apt-get -y -qq update - for version in ${PYTHON_VERSIONS}; do export PYTHON_VERSION=${version} - apt-get -y -qq install ${PYTHON_VERSION} + apt-get -y -qq update && apt-get -y -qq install ${PYTHON_VERSION} ${PYTHON_VERSION} get-pip.py -q ${PYTHON_VERSION} -m pip --version @@ -34,12 +31,10 @@ for version in ${PYTHON_VERSIONS}; do # Build bazel build \ - -c opt \ --noshow_progress \ --noshow_loading_progress \ --verbose_failures \ --test_output=errors \ - --crosstool_top=//build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0:toolchain \ build_pip_pkg # Package Whl @@ -49,8 +44,5 @@ for version in ${PYTHON_VERSIONS}; do #bazel-bin/build_pip_pkg artifacts done -# Clean up -rm get-pip.py - # Verify Wheels ./tools/ci_build/builds/wheel_verify.sh \ No newline at end of file diff --git a/tools/ci_build/builds/release_macos.sh b/tools/ci_build/builds/release_macos.sh old mode 100755 new mode 100644 index cc6806ed3f..c8065a6204 --- a/tools/ci_build/builds/release_macos.sh +++ b/tools/ci_build/builds/release_macos.sh @@ -15,7 +15,7 @@ # ============================================================================== set -e -x -PYTHON_VERSIONS="2.7.15 3.5.6 3.6.6 3.7.4" +PYTHON_VERSIONS="2.7.15 3.5.6 3.6.6" curl -sSOL https://bootstrap.pypa.io/get-pip.py # Install Bazel 0.24 @@ -24,10 +24,6 @@ chmod +x bazel-0.24.1-installer-darwin-x86_64.sh ./bazel-0.24.1-installer-darwin-x86_64.sh --user export PATH="$PATH:$HOME/bin" -# Install delocate -python3 -m pip install -q delocate - -brew update && brew upgrade pyenv eval "$(pyenv init -)" for version in ${PYTHON_VERSIONS}; do @@ -35,14 +31,13 @@ for version in ${PYTHON_VERSIONS}; do pyenv install -s $PYENV_VERSION python get-pip.py -q - python -m pip --version + python -m pip install -q delocate #Link TF dependency yes 'y' | sudo ./configure.sh --quiet # Build bazel build \ - -c opt \ --noshow_progress \ --noshow_loading_progress \ --verbose_failures \ @@ -56,8 +51,5 @@ for version in ${PYTHON_VERSIONS}; do #bazel-bin/build_pip_pkg artifacts done -# Clean up -rm get-pip.py - ## Verify Wheel ./tools/ci_build/builds/wheel_verify.sh \ No newline at end of file diff --git a/tools/ci_build/builds/wheel_verify.sh b/tools/ci_build/builds/wheel_verify.sh index e43042e281..a23388b32d 100755 --- a/tools/ci_build/builds/wheel_verify.sh +++ b/tools/ci_build/builds/wheel_verify.sh @@ -16,16 +16,12 @@ set -e -if [[ $(uname) == "Darwin" ]]; then - CMD="delocate-wheel -w wheelhouse" -else - pip3.6 install -U auditwheel==2.0.0 - tools/ci_build/builds/tf_auditwheel_patch.sh - CMD="auditwheel repair --plat manylinux2010_x86_64" -fi - ls artifacts/* for f in artifacts/*.whl; do - $CMD $f + if [[ $(uname) == "Darwin" ]]; then + delocate-wheel -w wheelhouse $f + else + auditwheel repair $f + fi done ls wheelhouse/* \ No newline at end of file diff --git a/tools/ci_build/ci_sanity.sh b/tools/ci_build/ci_sanity.sh index 642c31ae75..a4d5d3df84 100755 --- a/tools/ci_build/ci_sanity.sh +++ b/tools/ci_build/ci_sanity.sh @@ -227,13 +227,13 @@ do_bazel_nobuild() { } do_check_futures_test() { - cd "$ROOT_DIR/tools/ci_build/verify" - python check_futures.py + cd "$ROOT_DIR/tools/test" + python check_futures_test.py } do_check_file_name_test() { - cd "$ROOT_DIR/tools/ci_build/verify" - python check_file_name.py + cd "$ROOT_DIR/tools/test" + python file_name_test.py } do_check_code_format_test() { diff --git a/tools/ci_testing/addons_gpu.sh b/tools/ci_testing/addons_gpu.sh index 12b2cffdf4..ddaed75315 100755 --- a/tools/ci_testing/addons_gpu.sh +++ b/tools/ci_testing/addons_gpu.sh @@ -43,7 +43,6 @@ yes 'y' | ./configure.sh bazel test -c opt -k \ --jobs=${N_JOBS} --test_timeout 300,450,1200,3600 \ --test_output=errors --local_test_jobs=8 \ - --crosstool_top=//build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.0:toolchain \ //tensorflow_addons/... exit $? diff --git a/tools/ci_testing/install_py36.sh b/tools/ci_testing/install_py36.sh new file mode 100755 index 0000000000..77d4ba30ec --- /dev/null +++ b/tools/ci_testing/install_py36.sh @@ -0,0 +1,31 @@ +#!/usr/bin/env bash +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ============================================================================== +# Needed until docker image defaults to at least py35. + +set -e -x + +curl -sSOL https://bootstrap.pypa.io/get-pip.py +add-apt-repository -y ppa:deadsnakes/ppa + +apt-get -y -qq update && apt-get -y -qq install python3.6 + +python3.6 get-pip.py -q +python3.6 -m pip --version +rm get-pip.py + +ln -sfn /usr/bin/python3.6 /usr/bin/python3 +pip3 install scipy # Pre-installed in custom-op \ No newline at end of file diff --git a/tools/ci_build/builds/tf_auditwheel_patch.sh b/tools/ci_testing/run_tests.sh similarity index 71% rename from tools/ci_build/builds/tf_auditwheel_patch.sh rename to tools/ci_testing/run_tests.sh index 52f4c19500..d0c3608cee 100755 --- a/tools/ci_build/builds/tf_auditwheel_patch.sh +++ b/tools/ci_testing/run_tests.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -12,10 +12,15 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# # ============================================================================== -set -e +set -x + +## CPU Tests +/bin/bash ci_testing/addons_cpu.sh + +## GPU Tests +#/bin/bash ci_testing/addons_gpu.sh -TF_SHARED_LIBRARY_NAME=$(grep -r TF_SHARED_LIBRARY_NAME .bazelrc | awk -F= '{print$2}') -POLICY_JSON="/usr/local/lib/python3.6/dist-packages/auditwheel/policy/policy.json" -sed -i "s/libresolv.so.2\"/libresolv.so.2\", $TF_SHARED_LIBRARY_NAME/g" $POLICY_JSON +exit $? \ No newline at end of file diff --git a/tools/run_docker.sh b/tools/run_docker.sh index e03be614b5..4c91947250 100755 --- a/tools/run_docker.sh +++ b/tools/run_docker.sh @@ -62,10 +62,10 @@ fi DOCKER_OPTS='' case ${DEVICE} in cpu) - DOCKER_IMAGE=tensorflow/tensorflow:custom-op-ubuntu16 + DOCKER_IMAGE=tensorflow/tensorflow:custom-op ;; gpu) - DOCKER_IMAGE=tensorflow/tensorflow:custom-op-gpu-ubuntu16 + DOCKER_IMAGE=tensorflow/tensorflow:custom-op-gpu DOCKER_OPTS="--runtime=nvidia ${DOCKER_OPTS}" ;; *) @@ -75,8 +75,8 @@ case ${DEVICE} in esac case ${PYTHON} in - py2) ENVIRONMENT_CMD="ln -sf /usr/bin/python2 /usr/bin/python && python -m pip install -U pip";; - py3) ENVIRONMENT_CMD="ln -sf /usr/bin/python3.6 /usr/bin/python && python -m pip install -U pip";; + py2) ENVIRONMENT_CMD="ln -sf /usr/bin/python2 /usr/bin/python";; + py3) ENVIRONMENT_CMD="tools/ci_testing/install_py36.sh && ln -sf /usr/bin/python3.6 /usr/bin/python";; *) echo "Invalid or missing python $OPTARG" exit 1 diff --git a/tools/ci_build/verify/check_futures.py b/tools/test/check_futures_test.py similarity index 99% rename from tools/ci_build/verify/check_futures.py rename to tools/test/check_futures_test.py index c17af2dc94..05fd197a45 100644 --- a/tools/ci_build/verify/check_futures.py +++ b/tools/test/check_futures_test.py @@ -32,7 +32,7 @@ import six -BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..')) +BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../..')) FUTURES_PATTERN = re.compile(r'^from __future__ import (\w+)\s*$') FUTURES_PATTERN_2 = re.compile( r'^from __future__ import (\w+), (\w+), (\w+)\s*$') diff --git a/tools/ci_build/verify/check_file_name.py b/tools/test/file_name_test.py similarity index 95% rename from tools/ci_build/verify/check_file_name.py rename to tools/test/file_name_test.py index 2b290b0a2b..23867570a9 100644 --- a/tools/ci_build/verify/check_file_name.py +++ b/tools/test/file_name_test.py @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -22,7 +22,7 @@ import os -BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..')) +BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../..')) def main(): From c800f01f906540eb1988b879c6ed8f652ab7d840 Mon Sep 17 00:00:00 2001 From: pkan2 <34614124+pkan2@users.noreply.github.com> Date: Thu, 12 Sep 2019 09:06:40 -0500 Subject: [PATCH 17/20] Add files via upload --- .../optimizers/conditional_gradient.py | 10 +++++----- .../optimizers/conditional_gradient_test.py | 15 --------------- 2 files changed, 5 insertions(+), 20 deletions(-) diff --git a/tensorflow_addons/optimizers/conditional_gradient.py b/tensorflow_addons/optimizers/conditional_gradient.py index 43600b6c07..c5bfb23a3d 100644 --- a/tensorflow_addons/optimizers/conditional_gradient.py +++ b/tensorflow_addons/optimizers/conditional_gradient.py @@ -55,9 +55,9 @@ def __init__(self, def get_config(self): config = { - 'learning_rate': self._learning_rate, - 'lamda': self._lamda, - 'use_locking': self._use_locking + 'learning_rate': self._serialize_hyperparameter('learning_rate'), + 'lamda': self._serialize_hyperparameter('lamda'), + 'use_locking': self._serialize_hyperparameter('use_locking') } base_config = super(ConditionalGradient, self).get_config() return dict(list(base_config.items()) + list(config.items())) @@ -67,12 +67,12 @@ def _create_slots(self, var_list): self.add_slot(v, "conditional_gradient") def _prepare(self, var_list): - learning_rate = self.learning_rate + learning_rate = self._get_hyper('learning_rate') if callable(learning_rate): learning_rate = learning_rate() self._learning_rate_tensor = tf.convert_to_tensor( learning_rate, name="learning_rate") - lamda = self.lamda + lamda = self._get_hyper('lamda') if callable(lamda): lamda = lamda() self._lamda_tensor = tf.convert_to_tensor(lamda, name="lamda") diff --git a/tensorflow_addons/optimizers/conditional_gradient_test.py b/tensorflow_addons/optimizers/conditional_gradient_test.py index d2c96ad161..833b2c9f69 100644 --- a/tensorflow_addons/optimizers/conditional_gradient_test.py +++ b/tensorflow_addons/optimizers/conditional_gradient_test.py @@ -134,21 +134,6 @@ def testVariablesAcrossGraphs(self): self.assertStartsWith(optimizer_variables[2].name, "ConditionalGradient/var1") self.assertEqual(3, len(optimizer_variables)) - ''' - with tf.Graph().as_default(): - var2 = tf.Variable( - [1.0, 2.0], dtype=tf.float32, name="var2") - var3 = tf.Variable( - [3.0, 4.0], dtype=tf.float32, name="var3") - loss = lambda: tf.math.reduce_sum(var2 + var3) - optimizer.minimize(loss, var_list=[var2, var3]) - optimizer_variables = optimizer.variables() - self.assertStartsWith(optimizer_variables[1].name, - "ConditionalGraident/var2") - self.assertStartsWith(optimizer_variables[2].name, - "ConditionalGraident/var3") - self.assertEqual(3, len(optimizer_variables)) - ''' # Based on issue #347 in the following link, # "https://github.com/tensorflow/addons/issues/347" From 7d2b6c6844f0f4ae7f7e9288eb49ff0c6c5ca51a Mon Sep 17 00:00:00 2001 From: pkan2 <34614124+pkan2@users.noreply.github.com> Date: Thu, 12 Sep 2019 15:58:06 -0500 Subject: [PATCH 18/20] Add files via upload --- .../optimizers/conditional_gradient.py | 26 ++--- .../optimizers/conditional_gradient_test.py | 96 +++++++++---------- 2 files changed, 61 insertions(+), 61 deletions(-) diff --git a/tensorflow_addons/optimizers/conditional_gradient.py b/tensorflow_addons/optimizers/conditional_gradient.py index c5bfb23a3d..75395769ff 100644 --- a/tensorflow_addons/optimizers/conditional_gradient.py +++ b/tensorflow_addons/optimizers/conditional_gradient.py @@ -29,13 +29,13 @@ class ConditionalGradient(tf.keras.optimizers.Optimizer): See https://arxiv.org/pdf/1803.06453.pdf ``` variable -= (1-learning_rate) - * (variable + lamda * gradient / frobenius_norm(gradient)) + * (variable + Lambda * gradient / frobenius_norm(gradient)) ``` """ def __init__(self, learning_rate, - lamda, + Lambda, use_locking=False, name="ConditionalGradient", **kwargs): @@ -43,20 +43,20 @@ def __init__(self, Args: learning_rate: A `Tensor` or a floating point value. The learning rate. - lamda: A `Tensor` or a floating point value. The constraint. + Lambda: A `Tensor` or a floating point value. The constraint. use_locking: If `True` use locks for update operations. name: Optional name prefix for the operations created when applying gradients. Defaults to "ConditionalGradient" """ super(ConditionalGradient, self).__init__(name=name, **kwargs) self._set_hyper("learning_rate", learning_rate) - self._set_hyper("lamda", lamda) + self._set_hyper("Lambda", Lambda) self._set_hyper("use_locking", use_locking) def get_config(self): config = { 'learning_rate': self._serialize_hyperparameter('learning_rate'), - 'lamda': self._serialize_hyperparameter('lamda'), + 'Lambda': self._serialize_hyperparameter('Lambda'), 'use_locking': self._serialize_hyperparameter('use_locking') } base_config = super(ConditionalGradient, self).get_config() @@ -72,10 +72,10 @@ def _prepare(self, var_list): learning_rate = learning_rate() self._learning_rate_tensor = tf.convert_to_tensor( learning_rate, name="learning_rate") - lamda = self._get_hyper('lamda') - if callable(lamda): - lamda = lamda() - self._lamda_tensor = tf.convert_to_tensor(lamda, name="lamda") + Lambda = self._get_hyper('Lambda') + if callable(Lambda): + Lambda = Lambda() + self._Lambda_tensor = tf.convert_to_tensor(Lambda, name="Lambda") return super(ConditionalGradient, self)._prepare(var_list) def _resource_apply_dense(self, grad, var): @@ -85,9 +85,9 @@ def frobenius_norm(m): norm = tf.convert_to_tensor( frobenius_norm(grad), name="norm", dtype=var.dtype.base_dtype) lr = tf.dtypes.cast(self._learning_rate_tensor, var.dtype.base_dtype) - lamda = tf.dtypes.cast(self._lamda_tensor, var.dtype.base_dtype) + Lambda = tf.dtypes.cast(self._Lambda_tensor, var.dtype.base_dtype) var_update_tensor = ( - tf.math.multiply(var, lr) - (1 - lr) * lamda * grad / norm) + tf.math.multiply(var, lr) - (1 - lr) * Lambda * grad / norm) var_update_kwargs = { 'resource': var.handle, 'value': var_update_tensor, @@ -103,10 +103,10 @@ def frobenius_norm(m): norm = tf.convert_to_tensor( frobenius_norm(grad), name="norm", dtype=var.dtype.base_dtype) lr = tf.dtypes.cast(self._learning_rate_tensor, var.dtype.base_dtype) - lamda = tf.dtypes.cast(self._lamda_tensor, var.dtype.base_dtype) + Lambda = tf.dtypes.cast(self._Lambda_tensor, var.dtype.base_dtype) var_slice = tf.gather(var, indices) var_update_value = ( - tf.math.multiply(var_slice, lr) - (1 - lr) * lamda * grad / norm) + tf.math.multiply(var_slice, lr) - (1 - lr) * Lambda * grad / norm) var_update_kwargs = { 'resource': var.handle, 'indices': indices, diff --git a/tensorflow_addons/optimizers/conditional_gradient_test.py b/tensorflow_addons/optimizers/conditional_gradient_test.py index 833b2c9f69..15c3d9a7c1 100644 --- a/tensorflow_addons/optimizers/conditional_gradient_test.py +++ b/tensorflow_addons/optimizers/conditional_gradient_test.py @@ -27,8 +27,8 @@ @test_utils.run_all_in_graph_and_eager_modes class ConditionalGradientTest(tf.test.TestCase): - def _update_conditional_gradient_numpy(self, var, norm, g, lr, lamda): - var = var * lr - (1 - lr) * lamda * g / norm + def _update_conditional_gradient_numpy(self, var, norm, g, lr, Lambda): + var = var * lr - (1 - lr) * Lambda * g / norm return var def doTestBasic(self, use_resource=False, use_callable_params=False): @@ -44,12 +44,12 @@ def doTestBasic(self, use_resource=False, use_callable_params=False): norm0 = tf.math.reduce_sum(grads0**2)**0.5 norm1 = tf.math.reduce_sum(grads1**2)**0.5 learning_rate = lambda: 0.5 - lamda = lambda: 0.01 + Lambda = lambda: 0.01 if not use_callable_params: learning_rate = learning_rate() - lamda = lamda() + Lambda = Lambda() cg_opt = cg_lib.ConditionalGradient( - learning_rate=learning_rate, lamda=lamda) + learning_rate=learning_rate, Lambda=Lambda) cg_update = cg_opt.apply_gradients( zip([grads0, grads1], [var0, var1])) @@ -177,9 +177,9 @@ def loss(): # pylint: enable=cell-var-from-loop learning_rate = 0.1 - lamda = 0.1 + Lambda = 0.1 opt = cg_lib.ConditionalGradient( - learning_rate=learning_rate, lamda=lamda) + learning_rate=learning_rate, Lambda=Lambda) cg_op = opt.minimize(loss, var_list=[var0]) self.evaluate(tf.compat.v1.global_variables_initializer()) @@ -190,9 +190,9 @@ def loss(): norm0 = self.evaluate(norm0) self.assertAllCloseAccordingToType([[ 1.0 * learning_rate - - (1 - learning_rate) * lamda * grads0_0 / norm0, + (1 - learning_rate) * Lambda * grads0_0 / norm0, 2.0 * learning_rate - - (1 - learning_rate) * lamda * grads0_1 / norm0 + (1 - learning_rate) * Lambda * grads0_1 / norm0 ]], self.evaluate(var0)) @test_utils.run_in_graph_and_eager_modes(reset_test=True) @@ -209,9 +209,9 @@ def loss(): norm0 = tf.math.reduce_sum(grads0**2)**0.5 learning_rate = 0.1 - lamda = 0.1 + Lambda = 0.1 opt = cg_lib.ConditionalGradient( - learning_rate=learning_rate, lamda=lamda) + learning_rate=learning_rate, Lambda=Lambda) cg_op = opt.minimize(loss, var_list=[var0]) self.evaluate(tf.compat.v1.global_variables_initializer()) @@ -221,8 +221,8 @@ def loss(): self.assertAllCloseAccordingToType( [[1, 1], [ - learning_rate * 1 - (1 - learning_rate) * lamda * 1 / norm0, - learning_rate * 1 - (1 - learning_rate) * lamda * 1 / norm0 + learning_rate * 1 - (1 - learning_rate) * Lambda * 1 / norm0, + learning_rate * 1 - (1 - learning_rate) * Lambda * 1 / norm0 ]], self.evaluate(var0)) @test_utils.run_in_graph_and_eager_modes(reset_test=True) @@ -236,7 +236,7 @@ def testTensorLearningRateAndConditionalGradient(self): norm0 = tf.math.reduce_sum(grads0**2)**0.5 norm1 = tf.math.reduce_sum(grads1**2)**0.5 cg_opt = cg_lib.ConditionalGradient( - learning_rate=tf.constant(0.5), lamda=tf.constant(0.01)) + learning_rate=tf.constant(0.5), Lambda=tf.constant(0.01)) cg_update = cg_opt.apply_gradients( zip([grads0, grads1], [var0, var1])) if not tf.executing_eagerly(): @@ -300,7 +300,7 @@ def _dbParamsCG01(self): Return values been generated from the dist-belief conditional_gradient unittest, running with a learning rate of 0.1 - and a lamda of 0.1. + and a Lambda of 0.1. These values record how a parameter vector of size 10, initialized with 0.0, gets updated with 10 consecutive conditional_gradient @@ -405,7 +405,7 @@ def testLikeDistBeliefCG01(self): num_samples = len(db_grad) var0 = tf.Variable([0.0] * num_samples) grads0 = tf.constant([0.0] * num_samples) - cg_opt = cg_lib.ConditionalGradient(learning_rate=0.1, lamda=0.1) + cg_opt = cg_lib.ConditionalGradient(learning_rate=0.1, Lambda=0.1) if not tf.executing_eagerly(): cg_update = cg_opt.apply_gradients(zip([grads0], [var0])) self.evaluate(tf.compat.v1.global_variables_initializer()) @@ -437,9 +437,9 @@ def testSparse(self): norm1 = tf.math.reduce_sum(tf.math.multiply(grads1, grads1))**0.5 learning_rate = 0.1 - lamda = 0.1 + Lambda = 0.1 cg_opt = cg_lib.ConditionalGradient( - learning_rate=learning_rate, lamda=lamda) + learning_rate=learning_rate, Lambda=Lambda) cg_update = cg_opt.apply_gradients( zip([grads0, grads1], [var0, var1])) @@ -472,22 +472,22 @@ def testSparse(self): norm1 = self.evaluate(norm1) self.assertAllCloseAccordingToType( np.array([ - 0 - (1 - learning_rate) * lamda * 0 / norm0, - 0 - (1 - learning_rate) * lamda * 0 / norm0 + 0 - (1 - learning_rate) * Lambda * 0 / norm0, + 0 - (1 - learning_rate) * Lambda * 0 / norm0 ]), self.evaluate(var0)[0]) self.assertAllCloseAccordingToType( np.array([ - 0 - (1 - learning_rate) * lamda * 0.1 / norm0, - 0 - (1 - learning_rate) * lamda * 0.1 / norm0 + 0 - (1 - learning_rate) * Lambda * 0.1 / norm0, + 0 - (1 - learning_rate) * Lambda * 0.1 / norm0 ]), self.evaluate(var0)[1]) self.assertAllCloseAccordingToType( np.array([ 1.0 * learning_rate - - (1 - learning_rate) * lamda * 0.01 / norm1, + (1 - learning_rate) * Lambda * 0.01 / norm1, 1.0 * learning_rate - - (1 - learning_rate) * lamda * 0.01 / norm1 + (1 - learning_rate) * Lambda * 0.01 / norm1 ]), self.evaluate(var1)[2]) # Step 2: the conditional_gradient contain the @@ -499,22 +499,22 @@ def testSparse(self): # Check that the parameters have been updated. self.assertAllClose(np.array([0, 0]), self.evaluate(var0)[0]) self.assertAllCloseAccordingToType( - np.array([(0 - (1 - learning_rate) * lamda * 0.1 / norm0) * - learning_rate - - (1 - learning_rate) * lamda * 0.1 / norm0, - (0 - (1 - learning_rate) * lamda * 0.1 / norm0) * - learning_rate - - (1 - learning_rate) * lamda * 0.1 / norm0]), + np.array([(0 - (1 - learning_rate) * Lambda * 0.1 / norm0) + * learning_rate - + (1 - learning_rate) * Lambda * 0.1 / norm0, + (0 - (1 - learning_rate) * Lambda * 0.1 / norm0) + * learning_rate - + (1 - learning_rate) * Lambda * 0.1 / norm0]), self.evaluate(var0)[1]) self.assertAllCloseAccordingToType( np.array([(1.0 * learning_rate - - (1 - learning_rate) * lamda * 0.01 / norm1) * + (1 - learning_rate) * Lambda * 0.01 / norm1) * learning_rate - - (1 - learning_rate) * lamda * 0.01 / norm1, + (1 - learning_rate) * Lambda * 0.01 / norm1, (1.0 * learning_rate - - (1 - learning_rate) * lamda * 0.01 / norm1) * + (1 - learning_rate) * Lambda * 0.01 / norm1) * learning_rate - - (1 - learning_rate) * lamda * 0.01 / norm1]), + (1 - learning_rate) * Lambda * 0.01 / norm1]), self.evaluate(var1)[2]) @test_utils.run_in_graph_and_eager_modes(reset_test=True) @@ -528,9 +528,9 @@ def testSharing(self): norm0 = tf.math.reduce_sum(grads0**2)**0.5 norm1 = tf.math.reduce_sum(grads1**2)**0.5 learning_rate = 0.1 - lamda = 0.1 + Lambda = 0.1 cg_opt = cg_lib.ConditionalGradient( - learning_rate=learning_rate, lamda=lamda) + learning_rate=learning_rate, Lambda=Lambda) cg_update1 = cg_opt.apply_gradients( zip([grads0, grads1], [var0, var1])) cg_update2 = cg_opt.apply_gradients( @@ -566,16 +566,16 @@ def testSharing(self): self.assertAllCloseAccordingToType( np.array([ 1.0 * learning_rate - - (1 - learning_rate) * lamda * 0.1 / norm0, + (1 - learning_rate) * Lambda * 0.1 / norm0, 2.0 * learning_rate - - (1 - learning_rate) * lamda * 0.1 / norm0 + (1 - learning_rate) * Lambda * 0.1 / norm0 ]), self.evaluate(var0)) self.assertAllCloseAccordingToType( np.array([ 3.0 * learning_rate - - (1 - learning_rate) * lamda * 0.01 / norm1, + (1 - learning_rate) * Lambda * 0.01 / norm1, 4.0 * learning_rate - - (1 - learning_rate) * lamda * 0.01 / norm1 + (1 - learning_rate) * Lambda * 0.01 / norm1 ]), self.evaluate(var1)) # Step 2: the second conditional_gradient contain @@ -585,23 +585,23 @@ def testSharing(self): # Check that the parameters have been updated. self.assertAllCloseAccordingToType( np.array([(1.0 * learning_rate - - (1 - learning_rate) * lamda * 0.1 / norm0) * + (1 - learning_rate) * Lambda * 0.1 / norm0) * learning_rate - - (1 - learning_rate) * lamda * 0.1 / norm0, + (1 - learning_rate) * Lambda * 0.1 / norm0, (2.0 * learning_rate - - (1 - learning_rate) * lamda * 0.1 / norm0) * + (1 - learning_rate) * Lambda * 0.1 / norm0) * learning_rate - - (1 - learning_rate) * lamda * 0.1 / norm0]), + (1 - learning_rate) * Lambda * 0.1 / norm0]), self.evaluate(var0)) self.assertAllCloseAccordingToType( np.array([(3.0 * learning_rate - - (1 - learning_rate) * lamda * 0.01 / norm1) * + (1 - learning_rate) * Lambda * 0.01 / norm1) * learning_rate - - (1 - learning_rate) * lamda * 0.01 / norm1, + (1 - learning_rate) * Lambda * 0.01 / norm1, (4.0 * learning_rate - - (1 - learning_rate) * lamda * 0.01 / norm1) * + (1 - learning_rate) * Lambda * 0.01 / norm1) * learning_rate - - (1 - learning_rate) * lamda * 0.01 / norm1]), + (1 - learning_rate) * Lambda * 0.01 / norm1]), self.evaluate(var1)) From 02d421aa99fb0a9c696b00ba39e791b884b74182 Mon Sep 17 00:00:00 2001 From: pkan2 <34614124+pkan2@users.noreply.github.com> Date: Wed, 18 Sep 2019 11:41:23 -0500 Subject: [PATCH 19/20] Add files via upload --- .../optimizers/conditional_gradient.py | 78 +++++++++------- .../optimizers/conditional_gradient_test.py | 93 +++++++++---------- 2 files changed, 88 insertions(+), 83 deletions(-) diff --git a/tensorflow_addons/optimizers/conditional_gradient.py b/tensorflow_addons/optimizers/conditional_gradient.py index 75395769ff..892626693e 100644 --- a/tensorflow_addons/optimizers/conditional_gradient.py +++ b/tensorflow_addons/optimizers/conditional_gradient.py @@ -24,39 +24,43 @@ @keras_utils.register_keras_custom_object class ConditionalGradient(tf.keras.optimizers.Optimizer): """Optimizer that implements the Conditional Gradient optimization. - Helps handle constraints well. + + This optimizer helps handle constraints well. + Currently only supports frobenius norm constraint. See https://arxiv.org/pdf/1803.06453.pdf + ``` variable -= (1-learning_rate) - * (variable + Lambda * gradient / frobenius_norm(gradient)) + * (variable + l_ambda * gradient / frobenius_norm(gradient)) ``` """ def __init__(self, learning_rate, - Lambda, + l_ambda, use_locking=False, - name="ConditionalGradient", + name='ConditionalGradient', **kwargs): """Construct a conditional gradient optimizer. - Args: + + Args: learning_rate: A `Tensor` or a floating point value. - The learning rate. - Lambda: A `Tensor` or a floating point value. The constraint. + The learning rate. + l_ambda: A `Tensor` or a floating point value. The constraint. use_locking: If `True` use locks for update operations. name: Optional name prefix for the operations created when - applying gradients. Defaults to "ConditionalGradient" + applying gradients. Defaults to 'ConditionalGradient' """ super(ConditionalGradient, self).__init__(name=name, **kwargs) - self._set_hyper("learning_rate", learning_rate) - self._set_hyper("Lambda", Lambda) - self._set_hyper("use_locking", use_locking) + self._set_hyper('learning_rate', kwargs.get('lr', learning_rate)) + self._set_hyper('l_ambda', l_ambda) + self._set_hyper('use_locking', use_locking) def get_config(self): config = { 'learning_rate': self._serialize_hyperparameter('learning_rate'), - 'Lambda': self._serialize_hyperparameter('Lambda'), + 'l_ambda': self._serialize_hyperparameter('l_ambda'), 'use_locking': self._serialize_hyperparameter('use_locking') } base_config = super(ConditionalGradient, self).get_config() @@ -64,30 +68,29 @@ def get_config(self): def _create_slots(self, var_list): for v in var_list: - self.add_slot(v, "conditional_gradient") - - def _prepare(self, var_list): - learning_rate = self._get_hyper('learning_rate') - if callable(learning_rate): - learning_rate = learning_rate() - self._learning_rate_tensor = tf.convert_to_tensor( - learning_rate, name="learning_rate") - Lambda = self._get_hyper('Lambda') - if callable(Lambda): - Lambda = Lambda() - self._Lambda_tensor = tf.convert_to_tensor(Lambda, name="Lambda") - return super(ConditionalGradient, self)._prepare(var_list) - - def _resource_apply_dense(self, grad, var): + self.add_slot(v, 'conditional_gradient') + + def _prepare_local(self, var_device, var_dtype, apply_state): + super(ConditionalGradient, self)._prepare_local( + var_device, var_dtype, apply_state) + apply_state[(var_device, var_dtype)]['learning_rate'] = tf.identity( + self._get_hyper('learning_rate', var_dtype)) + apply_state[(var_device, var_dtype)]['l_ambda'] = tf.identity( + self._get_hyper('l_ambda', var_dtype)) + + def _resource_apply_dense(self, grad, var, apply_state=None): def frobenius_norm(m): return tf.math.reduce_sum(m**2)**0.5 + var_device, var_dtype = var.device, var.dtype.base_dtype + coefficients = ((apply_state or {}).get((var_device, var_dtype)) + or self._fallback_apply_state(var_device, var_dtype)) norm = tf.convert_to_tensor( - frobenius_norm(grad), name="norm", dtype=var.dtype.base_dtype) - lr = tf.dtypes.cast(self._learning_rate_tensor, var.dtype.base_dtype) - Lambda = tf.dtypes.cast(self._Lambda_tensor, var.dtype.base_dtype) + frobenius_norm(grad), name='norm', dtype=var.dtype.base_dtype) + lr = coefficients['learning_rate'] + l_ambda = coefficients['l_ambda'] var_update_tensor = ( - tf.math.multiply(var, lr) - (1 - lr) * Lambda * grad / norm) + tf.math.multiply(var, lr) - (1 - lr) * l_ambda * grad / norm) var_update_kwargs = { 'resource': var.handle, 'value': var_update_tensor, @@ -96,17 +99,20 @@ def frobenius_norm(m): var_update_op = tf.raw_ops.AssignVariableOp(**var_update_kwargs) return tf.group(var_update_op) - def _resource_apply_sparse(self, grad, var, indices): + def _resource_apply_sparse(self, grad, var, indices, apply_state=None): def frobenius_norm(m): return tf.reduce_sum(m**2)**0.5 + var_device, var_dtype = var.device, var.dtype.base_dtype + coefficients = ((apply_state or {}).get((var_device, var_dtype)) + or self._fallback_apply_state(var_device, var_dtype)) norm = tf.convert_to_tensor( - frobenius_norm(grad), name="norm", dtype=var.dtype.base_dtype) - lr = tf.dtypes.cast(self._learning_rate_tensor, var.dtype.base_dtype) - Lambda = tf.dtypes.cast(self._Lambda_tensor, var.dtype.base_dtype) + frobenius_norm(grad), name='norm', dtype=var.dtype.base_dtype) + lr = coefficients['learning_rate'] + l_ambda = coefficients['l_ambda'] var_slice = tf.gather(var, indices) var_update_value = ( - tf.math.multiply(var_slice, lr) - (1 - lr) * Lambda * grad / norm) + tf.math.multiply(var_slice, lr) - (1 - lr) * l_ambda * grad / norm) var_update_kwargs = { 'resource': var.handle, 'indices': indices, diff --git a/tensorflow_addons/optimizers/conditional_gradient_test.py b/tensorflow_addons/optimizers/conditional_gradient_test.py index 15c3d9a7c1..98fe40a375 100644 --- a/tensorflow_addons/optimizers/conditional_gradient_test.py +++ b/tensorflow_addons/optimizers/conditional_gradient_test.py @@ -25,10 +25,9 @@ import conditional_gradient as cg_lib -@test_utils.run_all_in_graph_and_eager_modes class ConditionalGradientTest(tf.test.TestCase): - def _update_conditional_gradient_numpy(self, var, norm, g, lr, Lambda): - var = var * lr - (1 - lr) * Lambda * g / norm + def _update_conditional_gradient_numpy(self, var, norm, g, lr, l_ambda): + var = var * lr - (1 - lr) * l_ambda * g / norm return var def doTestBasic(self, use_resource=False, use_callable_params=False): @@ -44,12 +43,12 @@ def doTestBasic(self, use_resource=False, use_callable_params=False): norm0 = tf.math.reduce_sum(grads0**2)**0.5 norm1 = tf.math.reduce_sum(grads1**2)**0.5 learning_rate = lambda: 0.5 - Lambda = lambda: 0.01 + l_ambda = lambda: 0.01 if not use_callable_params: learning_rate = learning_rate() - Lambda = Lambda() + l_ambda = l_ambda() cg_opt = cg_lib.ConditionalGradient( - learning_rate=learning_rate, Lambda=Lambda) + learning_rate=learning_rate, l_ambda=l_ambda) cg_update = cg_opt.apply_gradients( zip([grads0, grads1], [var0, var1])) @@ -177,9 +176,9 @@ def loss(): # pylint: enable=cell-var-from-loop learning_rate = 0.1 - Lambda = 0.1 + l_ambda = 0.1 opt = cg_lib.ConditionalGradient( - learning_rate=learning_rate, Lambda=Lambda) + learning_rate=learning_rate, l_ambda=l_ambda) cg_op = opt.minimize(loss, var_list=[var0]) self.evaluate(tf.compat.v1.global_variables_initializer()) @@ -190,9 +189,9 @@ def loss(): norm0 = self.evaluate(norm0) self.assertAllCloseAccordingToType([[ 1.0 * learning_rate - - (1 - learning_rate) * Lambda * grads0_0 / norm0, + (1 - learning_rate) * l_ambda * grads0_0 / norm0, 2.0 * learning_rate - - (1 - learning_rate) * Lambda * grads0_1 / norm0 + (1 - learning_rate) * l_ambda * grads0_1 / norm0 ]], self.evaluate(var0)) @test_utils.run_in_graph_and_eager_modes(reset_test=True) @@ -209,9 +208,9 @@ def loss(): norm0 = tf.math.reduce_sum(grads0**2)**0.5 learning_rate = 0.1 - Lambda = 0.1 + l_ambda = 0.1 opt = cg_lib.ConditionalGradient( - learning_rate=learning_rate, Lambda=Lambda) + learning_rate=learning_rate, l_ambda=l_ambda) cg_op = opt.minimize(loss, var_list=[var0]) self.evaluate(tf.compat.v1.global_variables_initializer()) @@ -221,8 +220,8 @@ def loss(): self.assertAllCloseAccordingToType( [[1, 1], [ - learning_rate * 1 - (1 - learning_rate) * Lambda * 1 / norm0, - learning_rate * 1 - (1 - learning_rate) * Lambda * 1 / norm0 + learning_rate * 1 - (1 - learning_rate) * l_ambda * 1 / norm0, + learning_rate * 1 - (1 - learning_rate) * l_ambda * 1 / norm0 ]], self.evaluate(var0)) @test_utils.run_in_graph_and_eager_modes(reset_test=True) @@ -236,7 +235,7 @@ def testTensorLearningRateAndConditionalGradient(self): norm0 = tf.math.reduce_sum(grads0**2)**0.5 norm1 = tf.math.reduce_sum(grads1**2)**0.5 cg_opt = cg_lib.ConditionalGradient( - learning_rate=tf.constant(0.5), Lambda=tf.constant(0.01)) + learning_rate=tf.constant(0.5), l_ambda=tf.constant(0.01)) cg_update = cg_opt.apply_gradients( zip([grads0, grads1], [var0, var1])) if not tf.executing_eagerly(): @@ -300,7 +299,7 @@ def _dbParamsCG01(self): Return values been generated from the dist-belief conditional_gradient unittest, running with a learning rate of 0.1 - and a Lambda of 0.1. + and a l_ambda of 0.1. These values record how a parameter vector of size 10, initialized with 0.0, gets updated with 10 consecutive conditional_gradient @@ -405,7 +404,7 @@ def testLikeDistBeliefCG01(self): num_samples = len(db_grad) var0 = tf.Variable([0.0] * num_samples) grads0 = tf.constant([0.0] * num_samples) - cg_opt = cg_lib.ConditionalGradient(learning_rate=0.1, Lambda=0.1) + cg_opt = cg_lib.ConditionalGradient(learning_rate=0.1, l_ambda=0.1) if not tf.executing_eagerly(): cg_update = cg_opt.apply_gradients(zip([grads0], [var0])) self.evaluate(tf.compat.v1.global_variables_initializer()) @@ -437,9 +436,9 @@ def testSparse(self): norm1 = tf.math.reduce_sum(tf.math.multiply(grads1, grads1))**0.5 learning_rate = 0.1 - Lambda = 0.1 + l_ambda = 0.1 cg_opt = cg_lib.ConditionalGradient( - learning_rate=learning_rate, Lambda=Lambda) + learning_rate=learning_rate, l_ambda=l_ambda) cg_update = cg_opt.apply_gradients( zip([grads0, grads1], [var0, var1])) @@ -472,22 +471,22 @@ def testSparse(self): norm1 = self.evaluate(norm1) self.assertAllCloseAccordingToType( np.array([ - 0 - (1 - learning_rate) * Lambda * 0 / norm0, - 0 - (1 - learning_rate) * Lambda * 0 / norm0 + 0 - (1 - learning_rate) * l_ambda * 0 / norm0, + 0 - (1 - learning_rate) * l_ambda * 0 / norm0 ]), self.evaluate(var0)[0]) self.assertAllCloseAccordingToType( np.array([ - 0 - (1 - learning_rate) * Lambda * 0.1 / norm0, - 0 - (1 - learning_rate) * Lambda * 0.1 / norm0 + 0 - (1 - learning_rate) * l_ambda * 0.1 / norm0, + 0 - (1 - learning_rate) * l_ambda * 0.1 / norm0 ]), self.evaluate(var0)[1]) self.assertAllCloseAccordingToType( np.array([ 1.0 * learning_rate - - (1 - learning_rate) * Lambda * 0.01 / norm1, + (1 - learning_rate) * l_ambda * 0.01 / norm1, 1.0 * learning_rate - - (1 - learning_rate) * Lambda * 0.01 / norm1 + (1 - learning_rate) * l_ambda * 0.01 / norm1 ]), self.evaluate(var1)[2]) # Step 2: the conditional_gradient contain the @@ -499,22 +498,22 @@ def testSparse(self): # Check that the parameters have been updated. self.assertAllClose(np.array([0, 0]), self.evaluate(var0)[0]) self.assertAllCloseAccordingToType( - np.array([(0 - (1 - learning_rate) * Lambda * 0.1 / norm0) + np.array([(0 - (1 - learning_rate) * l_ambda * 0.1 / norm0) * learning_rate - - (1 - learning_rate) * Lambda * 0.1 / norm0, - (0 - (1 - learning_rate) * Lambda * 0.1 / norm0) + (1 - learning_rate) * l_ambda * 0.1 / norm0, + (0 - (1 - learning_rate) * l_ambda * 0.1 / norm0) * learning_rate - - (1 - learning_rate) * Lambda * 0.1 / norm0]), + (1 - learning_rate) * l_ambda * 0.1 / norm0]), self.evaluate(var0)[1]) self.assertAllCloseAccordingToType( np.array([(1.0 * learning_rate - - (1 - learning_rate) * Lambda * 0.01 / norm1) * + (1 - learning_rate) * l_ambda * 0.01 / norm1) * learning_rate - - (1 - learning_rate) * Lambda * 0.01 / norm1, + (1 - learning_rate) * l_ambda * 0.01 / norm1, (1.0 * learning_rate - - (1 - learning_rate) * Lambda * 0.01 / norm1) * + (1 - learning_rate) * l_ambda * 0.01 / norm1) * learning_rate - - (1 - learning_rate) * Lambda * 0.01 / norm1]), + (1 - learning_rate) * l_ambda * 0.01 / norm1]), self.evaluate(var1)[2]) @test_utils.run_in_graph_and_eager_modes(reset_test=True) @@ -528,9 +527,9 @@ def testSharing(self): norm0 = tf.math.reduce_sum(grads0**2)**0.5 norm1 = tf.math.reduce_sum(grads1**2)**0.5 learning_rate = 0.1 - Lambda = 0.1 + l_ambda = 0.1 cg_opt = cg_lib.ConditionalGradient( - learning_rate=learning_rate, Lambda=Lambda) + learning_rate=learning_rate, l_ambda=l_ambda) cg_update1 = cg_opt.apply_gradients( zip([grads0, grads1], [var0, var1])) cg_update2 = cg_opt.apply_gradients( @@ -566,16 +565,16 @@ def testSharing(self): self.assertAllCloseAccordingToType( np.array([ 1.0 * learning_rate - - (1 - learning_rate) * Lambda * 0.1 / norm0, + (1 - learning_rate) * l_ambda * 0.1 / norm0, 2.0 * learning_rate - - (1 - learning_rate) * Lambda * 0.1 / norm0 + (1 - learning_rate) * l_ambda * 0.1 / norm0 ]), self.evaluate(var0)) self.assertAllCloseAccordingToType( np.array([ 3.0 * learning_rate - - (1 - learning_rate) * Lambda * 0.01 / norm1, + (1 - learning_rate) * l_ambda * 0.01 / norm1, 4.0 * learning_rate - - (1 - learning_rate) * Lambda * 0.01 / norm1 + (1 - learning_rate) * l_ambda * 0.01 / norm1 ]), self.evaluate(var1)) # Step 2: the second conditional_gradient contain @@ -585,23 +584,23 @@ def testSharing(self): # Check that the parameters have been updated. self.assertAllCloseAccordingToType( np.array([(1.0 * learning_rate - - (1 - learning_rate) * Lambda * 0.1 / norm0) * + (1 - learning_rate) * l_ambda * 0.1 / norm0) * learning_rate - - (1 - learning_rate) * Lambda * 0.1 / norm0, + (1 - learning_rate) * l_ambda * 0.1 / norm0, (2.0 * learning_rate - - (1 - learning_rate) * Lambda * 0.1 / norm0) * + (1 - learning_rate) * l_ambda * 0.1 / norm0) * learning_rate - - (1 - learning_rate) * Lambda * 0.1 / norm0]), + (1 - learning_rate) * l_ambda * 0.1 / norm0]), self.evaluate(var0)) self.assertAllCloseAccordingToType( np.array([(3.0 * learning_rate - - (1 - learning_rate) * Lambda * 0.01 / norm1) * + (1 - learning_rate) * l_ambda * 0.01 / norm1) * learning_rate - - (1 - learning_rate) * Lambda * 0.01 / norm1, + (1 - learning_rate) * l_ambda * 0.01 / norm1, (4.0 * learning_rate - - (1 - learning_rate) * Lambda * 0.01 / norm1) * + (1 - learning_rate) * l_ambda * 0.01 / norm1) * learning_rate - - (1 - learning_rate) * Lambda * 0.01 / norm1]), + (1 - learning_rate) * l_ambda * 0.01 / norm1]), self.evaluate(var1)) From c0dd7373ec46c316e63058c41c945e9059f9e132 Mon Sep 17 00:00:00 2001 From: pkan2 <34614124+pkan2@users.noreply.github.com> Date: Fri, 20 Sep 2019 09:14:15 -0500 Subject: [PATCH 20/20] Add files via upload --- tensorflow_addons/optimizers/README.md | 2 +- .../optimizers/conditional_gradient.py | 25 ++--- .../optimizers/conditional_gradient_test.py | 92 +++++++++---------- 3 files changed, 60 insertions(+), 59 deletions(-) diff --git a/tensorflow_addons/optimizers/README.md b/tensorflow_addons/optimizers/README.md index 8f087fd7eb..ea326f6e92 100644 --- a/tensorflow_addons/optimizers/README.md +++ b/tensorflow_addons/optimizers/README.md @@ -3,7 +3,7 @@ ## Maintainers | Submodule | Maintainers | Contact Info | |:---------- |:------------- |:--------------| -| conditional_gradient | Pengyu Kan | pkan2@wisc.edu | +| conditional_gradient | Pengyu Kan, Vishnu Lokhande | pkan2@wisc.edu, lokhande@cs.wisc.edu | | lazy_adam | Saishruthi Swaminathan | saishruthi.tn@gmail.com | | moving_average | Dheeraj R. Reddy | dheeraj98reddy@gmail.com | | weight_decay_optimizers | Phil Jund | ijund.phil@googlemail.com | diff --git a/tensorflow_addons/optimizers/conditional_gradient.py b/tensorflow_addons/optimizers/conditional_gradient.py index 892626693e..957e8f3f53 100644 --- a/tensorflow_addons/optimizers/conditional_gradient.py +++ b/tensorflow_addons/optimizers/conditional_gradient.py @@ -32,13 +32,15 @@ class ConditionalGradient(tf.keras.optimizers.Optimizer): ``` variable -= (1-learning_rate) - * (variable + l_ambda * gradient / frobenius_norm(gradient)) + * (variable + lambda_ * gradient / frobenius_norm(gradient)) ``` + + Note that we choose "lambda_" here to refer to the constraint "lambda" in the paper. """ def __init__(self, learning_rate, - l_ambda, + lambda_, use_locking=False, name='ConditionalGradient', **kwargs): @@ -47,20 +49,20 @@ def __init__(self, Args: learning_rate: A `Tensor` or a floating point value. The learning rate. - l_ambda: A `Tensor` or a floating point value. The constraint. + lambda_: A `Tensor` or a floating point value. The constraint. use_locking: If `True` use locks for update operations. name: Optional name prefix for the operations created when applying gradients. Defaults to 'ConditionalGradient' """ super(ConditionalGradient, self).__init__(name=name, **kwargs) self._set_hyper('learning_rate', kwargs.get('lr', learning_rate)) - self._set_hyper('l_ambda', l_ambda) + self._set_hyper('lambda_', lambda_) self._set_hyper('use_locking', use_locking) def get_config(self): config = { 'learning_rate': self._serialize_hyperparameter('learning_rate'), - 'l_ambda': self._serialize_hyperparameter('l_ambda'), + 'lambda_': self._serialize_hyperparameter('lambda_'), 'use_locking': self._serialize_hyperparameter('use_locking') } base_config = super(ConditionalGradient, self).get_config() @@ -75,8 +77,8 @@ def _prepare_local(self, var_device, var_dtype, apply_state): var_device, var_dtype, apply_state) apply_state[(var_device, var_dtype)]['learning_rate'] = tf.identity( self._get_hyper('learning_rate', var_dtype)) - apply_state[(var_device, var_dtype)]['l_ambda'] = tf.identity( - self._get_hyper('l_ambda', var_dtype)) + apply_state[(var_device, var_dtype)]['lambda_'] = tf.identity( + self._get_hyper('lambda_', var_dtype)) def _resource_apply_dense(self, grad, var, apply_state=None): def frobenius_norm(m): @@ -88,14 +90,13 @@ def frobenius_norm(m): norm = tf.convert_to_tensor( frobenius_norm(grad), name='norm', dtype=var.dtype.base_dtype) lr = coefficients['learning_rate'] - l_ambda = coefficients['l_ambda'] + lambda_ = coefficients['lambda_'] var_update_tensor = ( - tf.math.multiply(var, lr) - (1 - lr) * l_ambda * grad / norm) + tf.math.multiply(var, lr) - (1 - lr) * lambda_ * grad / norm) var_update_kwargs = { 'resource': var.handle, 'value': var_update_tensor, } - var_update_op = tf.raw_ops.AssignVariableOp(**var_update_kwargs) return tf.group(var_update_op) @@ -109,10 +110,10 @@ def frobenius_norm(m): norm = tf.convert_to_tensor( frobenius_norm(grad), name='norm', dtype=var.dtype.base_dtype) lr = coefficients['learning_rate'] - l_ambda = coefficients['l_ambda'] + lambda_ = coefficients['lambda_'] var_slice = tf.gather(var, indices) var_update_value = ( - tf.math.multiply(var_slice, lr) - (1 - lr) * l_ambda * grad / norm) + tf.math.multiply(var_slice, lr) - (1 - lr) * lambda_ * grad / norm) var_update_kwargs = { 'resource': var.handle, 'indices': indices, diff --git a/tensorflow_addons/optimizers/conditional_gradient_test.py b/tensorflow_addons/optimizers/conditional_gradient_test.py index 98fe40a375..c82bff3991 100644 --- a/tensorflow_addons/optimizers/conditional_gradient_test.py +++ b/tensorflow_addons/optimizers/conditional_gradient_test.py @@ -26,8 +26,8 @@ class ConditionalGradientTest(tf.test.TestCase): - def _update_conditional_gradient_numpy(self, var, norm, g, lr, l_ambda): - var = var * lr - (1 - lr) * l_ambda * g / norm + def _update_conditional_gradient_numpy(self, var, norm, g, lr, lambda_): + var = var * lr - (1 - lr) * lambda_ * g / norm return var def doTestBasic(self, use_resource=False, use_callable_params=False): @@ -43,12 +43,12 @@ def doTestBasic(self, use_resource=False, use_callable_params=False): norm0 = tf.math.reduce_sum(grads0**2)**0.5 norm1 = tf.math.reduce_sum(grads1**2)**0.5 learning_rate = lambda: 0.5 - l_ambda = lambda: 0.01 + lambda_ = lambda: 0.01 if not use_callable_params: learning_rate = learning_rate() - l_ambda = l_ambda() + lambda_ = lambda_() cg_opt = cg_lib.ConditionalGradient( - learning_rate=learning_rate, l_ambda=l_ambda) + learning_rate=learning_rate, lambda_=lambda_) cg_update = cg_opt.apply_gradients( zip([grads0, grads1], [var0, var1])) @@ -176,9 +176,9 @@ def loss(): # pylint: enable=cell-var-from-loop learning_rate = 0.1 - l_ambda = 0.1 + lambda_ = 0.1 opt = cg_lib.ConditionalGradient( - learning_rate=learning_rate, l_ambda=l_ambda) + learning_rate=learning_rate, lambda_=lambda_) cg_op = opt.minimize(loss, var_list=[var0]) self.evaluate(tf.compat.v1.global_variables_initializer()) @@ -189,9 +189,9 @@ def loss(): norm0 = self.evaluate(norm0) self.assertAllCloseAccordingToType([[ 1.0 * learning_rate - - (1 - learning_rate) * l_ambda * grads0_0 / norm0, + (1 - learning_rate) * lambda_ * grads0_0 / norm0, 2.0 * learning_rate - - (1 - learning_rate) * l_ambda * grads0_1 / norm0 + (1 - learning_rate) * lambda_ * grads0_1 / norm0 ]], self.evaluate(var0)) @test_utils.run_in_graph_and_eager_modes(reset_test=True) @@ -208,9 +208,9 @@ def loss(): norm0 = tf.math.reduce_sum(grads0**2)**0.5 learning_rate = 0.1 - l_ambda = 0.1 + lambda_ = 0.1 opt = cg_lib.ConditionalGradient( - learning_rate=learning_rate, l_ambda=l_ambda) + learning_rate=learning_rate, lambda_=lambda_) cg_op = opt.minimize(loss, var_list=[var0]) self.evaluate(tf.compat.v1.global_variables_initializer()) @@ -220,8 +220,8 @@ def loss(): self.assertAllCloseAccordingToType( [[1, 1], [ - learning_rate * 1 - (1 - learning_rate) * l_ambda * 1 / norm0, - learning_rate * 1 - (1 - learning_rate) * l_ambda * 1 / norm0 + learning_rate * 1 - (1 - learning_rate) * lambda_ * 1 / norm0, + learning_rate * 1 - (1 - learning_rate) * lambda_ * 1 / norm0 ]], self.evaluate(var0)) @test_utils.run_in_graph_and_eager_modes(reset_test=True) @@ -235,7 +235,7 @@ def testTensorLearningRateAndConditionalGradient(self): norm0 = tf.math.reduce_sum(grads0**2)**0.5 norm1 = tf.math.reduce_sum(grads1**2)**0.5 cg_opt = cg_lib.ConditionalGradient( - learning_rate=tf.constant(0.5), l_ambda=tf.constant(0.01)) + learning_rate=tf.constant(0.5), lambda_=tf.constant(0.01)) cg_update = cg_opt.apply_gradients( zip([grads0, grads1], [var0, var1])) if not tf.executing_eagerly(): @@ -299,7 +299,7 @@ def _dbParamsCG01(self): Return values been generated from the dist-belief conditional_gradient unittest, running with a learning rate of 0.1 - and a l_ambda of 0.1. + and a lambda_ of 0.1. These values record how a parameter vector of size 10, initialized with 0.0, gets updated with 10 consecutive conditional_gradient @@ -404,7 +404,7 @@ def testLikeDistBeliefCG01(self): num_samples = len(db_grad) var0 = tf.Variable([0.0] * num_samples) grads0 = tf.constant([0.0] * num_samples) - cg_opt = cg_lib.ConditionalGradient(learning_rate=0.1, l_ambda=0.1) + cg_opt = cg_lib.ConditionalGradient(learning_rate=0.1, lambda_=0.1) if not tf.executing_eagerly(): cg_update = cg_opt.apply_gradients(zip([grads0], [var0])) self.evaluate(tf.compat.v1.global_variables_initializer()) @@ -436,9 +436,9 @@ def testSparse(self): norm1 = tf.math.reduce_sum(tf.math.multiply(grads1, grads1))**0.5 learning_rate = 0.1 - l_ambda = 0.1 + lambda_ = 0.1 cg_opt = cg_lib.ConditionalGradient( - learning_rate=learning_rate, l_ambda=l_ambda) + learning_rate=learning_rate, lambda_=lambda_) cg_update = cg_opt.apply_gradients( zip([grads0, grads1], [var0, var1])) @@ -471,22 +471,22 @@ def testSparse(self): norm1 = self.evaluate(norm1) self.assertAllCloseAccordingToType( np.array([ - 0 - (1 - learning_rate) * l_ambda * 0 / norm0, - 0 - (1 - learning_rate) * l_ambda * 0 / norm0 + 0 - (1 - learning_rate) * lambda_ * 0 / norm0, + 0 - (1 - learning_rate) * lambda_ * 0 / norm0 ]), self.evaluate(var0)[0]) self.assertAllCloseAccordingToType( np.array([ - 0 - (1 - learning_rate) * l_ambda * 0.1 / norm0, - 0 - (1 - learning_rate) * l_ambda * 0.1 / norm0 + 0 - (1 - learning_rate) * lambda_ * 0.1 / norm0, + 0 - (1 - learning_rate) * lambda_ * 0.1 / norm0 ]), self.evaluate(var0)[1]) self.assertAllCloseAccordingToType( np.array([ 1.0 * learning_rate - - (1 - learning_rate) * l_ambda * 0.01 / norm1, + (1 - learning_rate) * lambda_ * 0.01 / norm1, 1.0 * learning_rate - - (1 - learning_rate) * l_ambda * 0.01 / norm1 + (1 - learning_rate) * lambda_ * 0.01 / norm1 ]), self.evaluate(var1)[2]) # Step 2: the conditional_gradient contain the @@ -498,22 +498,22 @@ def testSparse(self): # Check that the parameters have been updated. self.assertAllClose(np.array([0, 0]), self.evaluate(var0)[0]) self.assertAllCloseAccordingToType( - np.array([(0 - (1 - learning_rate) * l_ambda * 0.1 / norm0) + np.array([(0 - (1 - learning_rate) * lambda_ * 0.1 / norm0) * learning_rate - - (1 - learning_rate) * l_ambda * 0.1 / norm0, - (0 - (1 - learning_rate) * l_ambda * 0.1 / norm0) + (1 - learning_rate) * lambda_ * 0.1 / norm0, + (0 - (1 - learning_rate) * lambda_ * 0.1 / norm0) * learning_rate - - (1 - learning_rate) * l_ambda * 0.1 / norm0]), + (1 - learning_rate) * lambda_ * 0.1 / norm0]), self.evaluate(var0)[1]) self.assertAllCloseAccordingToType( np.array([(1.0 * learning_rate - - (1 - learning_rate) * l_ambda * 0.01 / norm1) * + (1 - learning_rate) * lambda_ * 0.01 / norm1) * learning_rate - - (1 - learning_rate) * l_ambda * 0.01 / norm1, + (1 - learning_rate) * lambda_ * 0.01 / norm1, (1.0 * learning_rate - - (1 - learning_rate) * l_ambda * 0.01 / norm1) * + (1 - learning_rate) * lambda_ * 0.01 / norm1) * learning_rate - - (1 - learning_rate) * l_ambda * 0.01 / norm1]), + (1 - learning_rate) * lambda_ * 0.01 / norm1]), self.evaluate(var1)[2]) @test_utils.run_in_graph_and_eager_modes(reset_test=True) @@ -527,9 +527,9 @@ def testSharing(self): norm0 = tf.math.reduce_sum(grads0**2)**0.5 norm1 = tf.math.reduce_sum(grads1**2)**0.5 learning_rate = 0.1 - l_ambda = 0.1 + lambda_ = 0.1 cg_opt = cg_lib.ConditionalGradient( - learning_rate=learning_rate, l_ambda=l_ambda) + learning_rate=learning_rate, lambda_=lambda_) cg_update1 = cg_opt.apply_gradients( zip([grads0, grads1], [var0, var1])) cg_update2 = cg_opt.apply_gradients( @@ -565,16 +565,16 @@ def testSharing(self): self.assertAllCloseAccordingToType( np.array([ 1.0 * learning_rate - - (1 - learning_rate) * l_ambda * 0.1 / norm0, + (1 - learning_rate) * lambda_ * 0.1 / norm0, 2.0 * learning_rate - - (1 - learning_rate) * l_ambda * 0.1 / norm0 + (1 - learning_rate) * lambda_ * 0.1 / norm0 ]), self.evaluate(var0)) self.assertAllCloseAccordingToType( np.array([ 3.0 * learning_rate - - (1 - learning_rate) * l_ambda * 0.01 / norm1, + (1 - learning_rate) * lambda_ * 0.01 / norm1, 4.0 * learning_rate - - (1 - learning_rate) * l_ambda * 0.01 / norm1 + (1 - learning_rate) * lambda_ * 0.01 / norm1 ]), self.evaluate(var1)) # Step 2: the second conditional_gradient contain @@ -584,23 +584,23 @@ def testSharing(self): # Check that the parameters have been updated. self.assertAllCloseAccordingToType( np.array([(1.0 * learning_rate - - (1 - learning_rate) * l_ambda * 0.1 / norm0) * + (1 - learning_rate) * lambda_ * 0.1 / norm0) * learning_rate - - (1 - learning_rate) * l_ambda * 0.1 / norm0, + (1 - learning_rate) * lambda_ * 0.1 / norm0, (2.0 * learning_rate - - (1 - learning_rate) * l_ambda * 0.1 / norm0) * + (1 - learning_rate) * lambda_ * 0.1 / norm0) * learning_rate - - (1 - learning_rate) * l_ambda * 0.1 / norm0]), + (1 - learning_rate) * lambda_ * 0.1 / norm0]), self.evaluate(var0)) self.assertAllCloseAccordingToType( np.array([(3.0 * learning_rate - - (1 - learning_rate) * l_ambda * 0.01 / norm1) * + (1 - learning_rate) * lambda_ * 0.01 / norm1) * learning_rate - - (1 - learning_rate) * l_ambda * 0.01 / norm1, + (1 - learning_rate) * lambda_ * 0.01 / norm1, (4.0 * learning_rate - - (1 - learning_rate) * l_ambda * 0.01 / norm1) * + (1 - learning_rate) * lambda_ * 0.01 / norm1) * learning_rate - - (1 - learning_rate) * l_ambda * 0.01 / norm1]), + (1 - learning_rate) * lambda_ * 0.01 / norm1]), self.evaluate(var1))