DL: Improve performance of mini-batch preprocessor (#467)

JIRA MADLIB-1334

This commit adds the following optimizations to minibatch preprocessor:

- Skip normalization if normalizing contant is 1.0

- Split the batching query to generate buffer_id's (based of row_id)
without moving around any data. Previously, calling `ROW_NUMBER()
OVER()` to add row_id's to the table was causing the data to be gathered
on the master node and then numbering the rows, which for large datasets
would be taking most of the time.

- Separate out the JOIN (called for even distribution) as well as
converting to bytea from the batching query. This avoids any VMEM limit
issues.

- num_buffers gets rounded up to the nearest multiple of num_segments
for even distribution across buffers on segments.

- Add new C function `array_to_bytea()` to convert array to bytea, and
some tests for it.  This is much faster than the python version we were
using, speeding up the query significantly.

Additionally, this commit adds a new function `plpy_execute_debug()`
in the utilities module that prints EXPLAIN plans and execution time
for debugging a specific query.

Co-authored-by: Ekta Khanna <ekhanna@pivotal.io>

Co-authored-by: Ekta Khanna <ekhanna@pivotal.io>
diff --git a/methods/array_ops/src/pg_gp/array_ops.c b/methods/array_ops/src/pg_gp/array_ops.c
index 48880a6..a842a60 100644
--- a/methods/array_ops/src/pg_gp/array_ops.c
+++ b/methods/array_ops/src/pg_gp/array_ops.c
@@ -2107,3 +2107,33 @@
 
     return pgarray;
 }
+
+PG_FUNCTION_INFO_V1(array_to_bytea);
+Datum
+array_to_bytea(PG_FUNCTION_ARGS)
+{
+    ArrayType *a = PG_GETARG_ARRAYTYPE_P(0);
+    Oid element_type = ARR_ELEMTYPE(a);
+    TypeCacheEntry * TI;
+    int data_length, nitems, items_avail;
+
+    data_length = VARSIZE(a) - ARR_DATA_OFFSET(a);
+    nitems = ArrayGetNItems(ARR_NDIM(a), ARR_DIMS(a));
+    TI = lookup_type_cache(element_type, TYPECACHE_CMP_PROC_FINFO);
+    items_avail = (data_length / TI->typlen);
+
+    if (nitems > items_avail) {
+        elog(ERROR, "Unexpected end of array:  expected %d elements but received only %d",  nitems,  data_length);
+    } else if (nitems < items_avail) {
+        elog(WARNING, "to_bytea(): Ignoring %d extra elements after end of %d-element array!", items_avail - nitems, nitems);
+        data_length = (nitems * TI->typlen);
+    }
+
+    bytea *ba = palloc(VARHDRSZ + data_length);
+
+    SET_VARSIZE(ba, VARHDRSZ + data_length);
+
+    memcpy(((char *)ba) + VARHDRSZ, ARR_DATA_PTR(a), data_length);
+
+    PG_RETURN_BYTEA_P(ba);
+}
diff --git a/methods/array_ops/src/pg_gp/array_ops.sql_in b/methods/array_ops/src/pg_gp/array_ops.sql_in
index e1aa368..c1ec853 100644
--- a/methods/array_ops/src/pg_gp/array_ops.sql_in
+++ b/methods/array_ops/src/pg_gp/array_ops.sql_in
@@ -733,3 +733,15 @@
         """.format(schema_madlib='MADLIB_SCHEMA')
 $$ LANGUAGE PLPYTHONU IMMUTABLE
 m4_ifdef(`__HAS_FUNCTION_PROPERTIES__', `CONTAINS SQL', `');
+
+m4_changequote(<!, !>)
+m4_ifelse(__PORT__ __DBMS_VERSION_MAJOR__, <!GREENPLUM 4!>,,
+<!
+CREATE OR REPLACE FUNCTION MADLIB_SCHEMA.array_to_bytea(ANYARRAY)
+RETURNS BYTEA
+AS
+'MODULE_PATHNAME', 'array_to_bytea'
+LANGUAGE C IMMUTABLE
+!>)
+m4_changequote(`,')
+m4_ifdef(__HAS_FUNCTION_PROPERTIES__', `NO SQL', `');
diff --git a/methods/array_ops/src/pg_gp/test/array_ops.sql_in b/methods/array_ops/src/pg_gp/test/array_ops.sql_in
index b05d0b7..511564f 100644
--- a/methods/array_ops/src/pg_gp/test/array_ops.sql_in
+++ b/methods/array_ops/src/pg_gp/test/array_ops.sql_in
@@ -7,6 +7,9 @@
 --    all objects created in the default schema will be cleaned-up outside.
 ---------------------------------------------------------------------------
 
+m4_include(`SQLCommon.m4')
+m4_changequote(`<!', `!>')
+
 ---------------------------------------------------------------------------
 -- Setup:
 ---------------------------------------------------------------------------
@@ -307,3 +310,71 @@
     unnest_2d_tbl05_groundtruth t2
     USING (id,unnest_row_id)
 ) t3;
+
+-- TESTING array_to_bytea() function - skip for gpdb 4.3
+m4_ifelse(__PORT__ __DBMS_VERSION_MAJOR__, <!GREENPLUM 4!>,,
+<!
+
+-- create input table ( n = 3 x 5 x 7 dim SMALLINT[], r =  2 x 3 x 5 dim REAL[] )
+DROP TABLE IF EXISTS array_input_tbl;
+CREATE TABLE array_input_tbl (id SMALLINT, n SMALLINT[], r REAL[]);
+INSERT INTO array_input_tbl SELECT generate_series(1, 10);
+SELECT id, count(*), array_agg(n) from (select id, unnest(n) as n from array_input_tbl) u group by id order by id;
+UPDATE array_input_tbl SET
+    n=array_fill(2*id, ARRAY[3, 5, 7]),
+    r=array_fill(id + 0.4, array[2, 3, 5]);
+
+-- create flattened input table
+DROP TABLE IF EXISTS flat_array_input_tbl;
+CREATE TABLE flat_array_input_tbl (id SMALLINT, n SMALLINT[], n_length SMALLINT, r REAL[], r_length SMALLINT);
+INSERT INTO flat_array_input_tbl
+    SELECT n.id, n, n_length, r, r_length
+    FROM
+    (
+        SELECT id, array_agg(n) AS n, 2*COUNT(*) AS n_length
+        FROM
+        (
+            SELECT id, unnest(n) AS n FROM array_input_tbl
+        ) n_values
+        GROUP BY id
+    ) n
+    JOIN
+    (
+        SELECT id, array_agg(r) AS r, 4*COUNT(*) AS r_length
+        FROM
+        (
+            SELECT id, unnest(r) AS r FROM array_input_tbl
+        ) r_values
+        GROUP BY id
+    ) r
+    USING (id);
+
+CREATE TABLE bytea_tbl AS SELECT id, array_to_bytea(n) AS n, array_to_bytea(r) AS r FROM array_input_tbl;
+
+    -- verify lengths of BYTEA output is correct for SMALLINT & REAL arrays
+    SELECT assert(
+        length(o.n) = i.n_length AND length(o.r) = i.r_length,
+        'array_to_bytea() returned incorrect lengths:\n' ||
+        '   Expected length(n) = ' || n_length::TEXT || ', got ' || length(o.n) ||
+        '   Expected ' || r_length::TEXT || ', got ' || length(o.r)
+    )
+    FROM flat_array_input_tbl i JOIN bytea_tbl o USING (id);
+
+    -- convert BYTEA back to flat arrays of SMALLINT's & REAL's
+
+    CREATE TABLE array_output_tbl AS
+    SELECT
+        id,
+        convert_bytea_to_smallint_array(n) AS n,
+        convert_bytea_to_real_array(r) AS r
+    FROM bytea_tbl;
+
+    -- verify that data in above table matches flattened input table exactly
+    SELECT assert(
+        i.n = o.n AND i.r = o.r,
+        'output of array_to_bytea() does not convert back to flattened input'
+    )
+    FROM flat_array_input_tbl i JOIN array_output_tbl o USING (id);
+!>)
+
+m4_changequote(,)
diff --git a/src/ports/postgres/modules/deep_learning/input_data_preprocessor.py_in b/src/ports/postgres/modules/deep_learning/input_data_preprocessor.py_in
index 757a5bc..351e6a5 100644
--- a/src/ports/postgres/modules/deep_learning/input_data_preprocessor.py_in
+++ b/src/ports/postgres/modules/deep_learning/input_data_preprocessor.py_in
@@ -29,6 +29,8 @@
 from internal.db_utils import quote_literal
 from internal.db_utils import get_product_of_dimensions
 from utilities.minibatch_preprocessing import MiniBatchBufferSizeCalculator
+from utilities.control import OptimizerControl
+from utilities.control import HashaggControl
 from utilities.utilities import _assert
 from utilities.utilities import add_postfix
 from utilities.utilities import is_platform_pg
@@ -46,6 +48,7 @@
 from utilities.validate_args import get_expr_type
 
 from madlib_keras_helper import *
+import time
 
 NUM_CLASSES_COLNAME = "num_classes"
 
@@ -59,9 +62,9 @@
         self.dependent_varname = dependent_varname
         self.independent_varname = independent_varname
         self.buffer_size = buffer_size
-        self.normalizing_const = normalizing_const if normalizing_const is not None else DEFAULT_NORMALIZING_CONST
+        self.normalizing_const = normalizing_const
         self.num_classes = num_classes
-        self.distribution_rules = distribution_rules if distribution_rules else DEFAULT_GPU_CONFIG
+        self.distribution_rules = distribution_rules if distribution_rules else 'all_segments'
         self.module_name = module_name
         self.output_summary_table = None
         self.dependent_vartype = None
@@ -73,7 +76,6 @@
         ## Validating input args prior to using them in _set_validate_vartypes()
         self._validate_args()
         self._set_validate_vartypes()
-        self.num_of_buffers = self._get_num_buffers()
         self.dependent_levels = None
         # The number of padded zeros to include in 1-hot vector
         self.padding_size = 0
@@ -199,9 +201,212 @@
             3) One-hot encodes the dependent variable.
             4) Minibatches the one-hot encoded dependent variable.
         """
+        # setup for 1-hot encoding
         self._set_one_hot_encoding_variables()
-        # Create a temp table that has independent var normalized.
-        norm_tbl = unique_string(desp='normalized')
+
+        # Generate random strings for TEMP tables
+        series_tbl = unique_string(desp='series')
+        dist_key_tbl = unique_string(desp='dist_key')
+        normalized_tbl = unique_string(desp='normalized_table')
+        batched_table = unique_string(desp='batched_table')
+
+        # Used later in locals() for formatting queries
+        x=MINIBATCH_OUTPUT_INDEPENDENT_COLNAME_DL
+        y=MINIBATCH_OUTPUT_DEPENDENT_COLNAME_DL
+        float32=FLOAT32_SQL_TYPE
+        dep_shape_col = add_postfix(y, "_shape")
+        ind_shape_col = add_postfix(x, "_shape")
+
+        ind_shape = self._get_independent_var_shape()
+        ind_shape = ','.join([str(i) for i in ind_shape])
+        dep_shape = self._get_dependent_var_shape()
+        dep_shape = ','.join([str(i) for i in dep_shape])
+
+        one_hot_dep_var_array_expr = self.get_one_hot_encoded_dep_var_expr()
+
+        # skip normalization step if normalizing_const = 1.0
+        if self.normalizing_const and (self.normalizing_const < 0.999999 or self.normalizing_const > 1.000001):
+            rescale_independent_var = """{self.schema_madlib}.array_scalar_mult(
+                                         {self.independent_varname}::{float32}[],
+                                         (1/{self.normalizing_const})::{float32})
+                                      """.format(**locals())
+        else:
+            self.normalizing_const = DEFAULT_NORMALIZING_CONST
+            rescale_independent_var = "{self.independent_varname}::{float32}[]".format(**locals())
+
+        # It's important that we shuffle all rows before batching for fit(), but
+        #  we can skip that for predict()
+        order_by_clause = " ORDER BY RANDOM()" if order_by_random else ""
+
+        # This query template will be used later in pg & gp specific code paths,
+        #  where {make_buffer_id} and {dist_by_buffer_id} are filled in
+        batching_query = """
+            CREATE TEMP TABLE {batched_table} AS SELECT
+                {{make_buffer_id}} buffer_id,
+                {self.schema_madlib}.agg_array_concat(
+                    ARRAY[x_norm::{float32}[]]) AS {x},
+                {self.schema_madlib}.agg_array_concat(
+                    ARRAY[y]) AS {y},
+                COUNT(*) AS count
+            FROM {normalized_tbl}
+            GROUP BY buffer_id
+            {{dist_by_buffer_id}}
+        """.format(**locals())
+
+        # This query template will be used later in pg & gp specific code paths,
+        #  where {dist_key_col_comma} and {dist_by_dist_key} will be filled in
+        bytea_query = """
+            CREATE TABLE {self.output_table} AS SELECT
+                {{dist_key_col_comma}}
+                {self.schema_madlib}.array_to_bytea({x}) AS {x},
+                {self.schema_madlib}.array_to_bytea({y}) AS {y},
+                ARRAY[count,{ind_shape}]::SMALLINT[] AS {ind_shape_col},
+                ARRAY[count,{dep_shape}]::SMALLINT[] AS {dep_shape_col},
+                buffer_id
+            FROM {batched_table}
+            {{dist_by_dist_key}}
+        """.format(**locals())
+
+        if is_platform_pg():
+            # used later for writing summary table
+            self.distribution_rules = '$__madlib__$all_segments$__madlib__$'
+
+            #
+            # For postgres, we just need 3 simple queries:
+            #   1-hot-encode/normalize + batching + bytea conversion
+            #
+
+            # see note in gpdb code branch (lower down) on
+            # 1-hot-encoding of dependent var
+            one_hot_sql = """
+                CREATE TEMP TABLE {normalized_tbl} AS SELECT
+                    (ROW_NUMBER() OVER({order_by_clause}) - 1)::INTEGER as row_id,
+                    {rescale_independent_var} AS x_norm,
+                    {one_hot_dep_var_array_expr} AS y
+                FROM {self.source_table}
+            """.format(**locals())
+
+            plpy.execute(one_hot_sql)
+
+            self.buffer_size = self._get_buffer_size(1)
+
+            # Used to format query templates with locals()
+            make_buffer_id = 'row_id / {0} AS '.format(self.buffer_size)
+            dist_by_dist_key = ''
+            dist_by_buffer_id = ''
+            dist_key_col_comma = ''
+
+            # Disable hashagg since large number of arrays being concatenated
+            # could result in excessive memory usage.
+            with HashaggControl(False):
+                # Batch rows with GROUP BY
+                plpy.execute(batching_query.format(**locals()))
+
+            plpy.execute("DROP TABLE {0}".format(normalized_tbl))
+
+            # Convert to BYTEA and output final (permanent table)
+            plpy.execute(bytea_query.format(**locals()))
+
+            plpy.execute("DROP TABLE {0}".format(batched_table))
+
+            self._create_output_summary_table()
+
+            return
+
+        # Done with postgres, rest is all for gpdb
+        #
+        # This gpdb code path is far more complex, and depends on
+        #   how the user wishes to distribute the data.  Even if
+        #   it's to be spread evenly across all segments, we still
+        #   need to do some extra work to ensure that happens.
+
+        if self.distribution_rules == 'all_segments':
+            all_segments = True
+            self.distribution_rules = '$__madlib__$all_segments$__madlib__$'
+            num_segments = get_seg_number()
+        else:
+            all_segments = False
+
+        if self.distribution_rules == 'gpu_segments':
+            gpu_info_table = unique_string(desp='gpu_info')
+            plpy.execute("""
+                SELECT {self.schema_madlib}.gpu_configuration('{gpu_info_table}')
+            """.format(**locals()))
+            gpu_query = """
+                SELECT array_agg(DISTINCT(hostname)) as gpu_config
+                FROM {gpu_info_table}
+            """.format(**locals())
+            gpu_query_result = plpy.execute(gpu_query)[0]['gpu_config']
+            if not gpu_query_result:
+               plpy.error("{self.module_name}: No GPUs configured on hosts.".format(self=self))
+
+            gpu_config_hostnames = "ARRAY{0}".format(gpu_query_result)
+            # find hosts with gpus
+            get_segment_query = """
+                SELECT array_agg(content) as segment_ids,
+                       array_agg(dbid) as dbid,
+                       count(*) as count
+                FROM gp_segment_configuration
+                WHERE content != -1 AND role = 'p'
+                AND hostname=ANY({gpu_config_hostnames})
+            """.format(**locals())
+            segment_ids_result = plpy.execute(get_segment_query)[0]
+            plpy.execute("DROP TABLE IF EXISTS {0}".format(gpu_info_table))
+
+            self.gpu_config = "ARRAY{0}".format(sorted(segment_ids_result['segment_ids']))
+            self.distribution_rules = "ARRAY{0}".format(sorted(segment_ids_result['dbid']))
+
+            num_segments = segment_ids_result['count']
+
+        elif not all_segments:  # Read from a table with dbids to distribute the data
+            self._validate_distribution_table()
+            gpu_query = """
+                SELECT array_agg(content) as gpu_config,
+                       array_agg(gp_segment_configuration.dbid) as dbid
+                FROM {self.distribution_rules} JOIN gp_segment_configuration
+                ON {self.distribution_rules}.dbid = gp_segment_configuration.dbid
+            """.format(**locals())
+            gpu_query_result = plpy.execute(gpu_query)[0]
+            self.gpu_config = "ARRAY{0}".format(sorted(gpu_query_result['gpu_config']))
+            num_segments = plpy.execute("SELECT count(*) as count FROM {self.distribution_rules}".format(**locals()))[0]['count']
+            self.distribution_rules = "ARRAY{0}".format(sorted(gpu_query_result['dbid']))
+
+        join_key = 't.buffer_id % {num_segments}'.format(**locals())
+
+        if not all_segments:
+            join_key = '({self.gpu_config})[{join_key} + 1]'.format(**locals())
+
+        # Create large temp table such that there is atleast 1 row on each segment
+        # Using 999999 would distribute data(atleast 1 row on each segment) for
+        # a cluster as large as 20000
+        dist_key_col = DISTRIBUTION_KEY_COLNAME
+        query = """
+            CREATE TEMP TABLE {series_tbl} AS
+                SELECT generate_series(0, 999999) {dist_key_col}
+                DISTRIBUTED BY ({dist_key_col})
+            """.format(**locals())
+
+        plpy.execute(query)
+
+        # Used in locals() to format queries, including template queries
+        #  bytea_query & batching_query defined in section common to
+        #  pg & gp (very beginning of this function)
+        dist_by_dist_key = 'DISTRIBUTED BY ({dist_key_col})'.format(**locals())
+        dist_by_buffer_id = 'DISTRIBUTED BY (buffer_id)'
+        dist_key_col_comma = dist_key_col + ' ,'
+        make_buffer_id = ''
+
+        dist_key_query = """
+                CREATE TEMP TABLE {dist_key_tbl} AS
+                SELECT min({dist_key_col}) AS {dist_key_col}
+                FROM {series_tbl}
+                GROUP BY gp_segment_id
+                DISTRIBUTED BY ({dist_key_col})
+        """.format(**locals())
+
+        plpy.execute(dist_key_query)
+
+        plpy.execute("DROP TABLE {0}".format(series_tbl))
 
         # Always one-hot encode the dependent var. For now, we are assuming
         # that input_preprocessor_dl will be used only for deep
@@ -209,150 +414,169 @@
         # assumption that it is only for classification, so one-hot
         # encode the dep var, unless it's already a numeric array in
         # which case we assume it's already one-hot encoded.
-        one_hot_dep_var_array_expr = \
-            self.get_one_hot_encoded_dep_var_expr()
-        order_by_clause = " ORDER BY RANDOM() " if order_by_random else ""
-        scalar_mult_sql = """
+
+        # While 1-hot-encoding is done, we also normalize the independent
+        # var and randomly shuffle the rows on each segment.  (The dist key
+        # we're adding avoids any rows moving between segments.  This may
+        # make things slightly less random, but helps with speed--probably
+        # a safe tradeoff to make.)
+
+        norm_tbl = unique_string(desp='norm_table')
+
+        one_hot_sql = """
             CREATE TEMP TABLE {norm_tbl} AS
-            SELECT {self.schema_madlib}.array_scalar_mult(
-                {self.independent_varname}::{FLOAT32_SQL_TYPE}[],
-                (1/{self.normalizing_const})::{FLOAT32_SQL_TYPE}) AS x_norm,
-                {one_hot_dep_var_array_expr} AS y,
-                row_number() over() AS row_id
-            FROM {self.source_table} {order_by_clause}
-            """.format(FLOAT32_SQL_TYPE=FLOAT32_SQL_TYPE, **locals())
-        plpy.execute(scalar_mult_sql)
+            SELECT {dist_key_col},
+                {rescale_independent_var} AS x_norm,
+                {one_hot_dep_var_array_expr} AS y
+            FROM {self.source_table} s JOIN {dist_key_tbl} AS d
+                ON (s.gp_segment_id = d.gp_segment_id)
+            {order_by_clause}
+            DISTRIBUTED BY ({dist_key_col})
+        """.format(**locals())
+        plpy.execute(one_hot_sql)
 
-        series_tbl = unique_string(desp='series')
-        dist_key_tbl = unique_string(desp='dist_key')
-        dep_shape_col = add_postfix(
-            MINIBATCH_OUTPUT_DEPENDENT_COLNAME_DL, "_shape")
-        ind_shape_col = add_postfix(
-            MINIBATCH_OUTPUT_INDEPENDENT_COLNAME_DL, "_shape")
+        rows_per_seg_tbl = unique_string(desp='rows_per_seg')
+        start_rows_tbl = unique_string(desp='start_rows')
 
-        ind_shape = self._get_independent_var_shape()
-        ind_shape = ','.join([str(i) for i in ind_shape])
-        dep_shape = self._get_dependent_var_shape()
-        dep_shape = ','.join([str(i) for i in dep_shape])
-
-        if is_platform_pg():
-            self.distribution_rules = '$__madlib__$all_segments$__madlib__$'
-            distributed_by_clause = ''
-            dist_key_clause = ''
-            join_clause = ''
-            dist_key_comma = ''
-        else:
-            dist_key = DISTRIBUTION_KEY_COLNAME
-            # Create large temp table such that there is atleast 1 row on each segment
-            # Using 999999 would distribute data(atleast 1 row on each segment) for
-            # a cluster as large as 20000
-            query = """
-                    CREATE TEMP TABLE {series_tbl}
-                    AS
-                    SELECT generate_series(0, 999999) {dist_key}
-                    DISTRIBUTED BY ({dist_key})
-                """.format(**locals())
-            plpy.execute(query)
-            distributed_by_clause= ' DISTRIBUTED BY ({dist_key}) '.format(**locals())
-            dist_key_comma = dist_key + ' ,'
-            gpu_join_clause = """JOIN {dist_key_tbl} ON
-                ({self.gpu_config})[b.buffer_id%{num_segments}+1] = {dist_key_tbl}.id
-                """
-
-            if self.distribution_rules == 'gpu_segments':
-                gpu_info_table = unique_string(desp='gpu_info')
-                plpy.execute("""
-                    SELECT {self.schema_madlib}.gpu_configuration('{gpu_info_table}')
-                """.format(**locals()))
-                gpu_query = """
-                    SELECT array_agg(DISTINCT(hostname)) as gpu_config
-                    FROM {gpu_info_table}
-                """.format(**locals())
-                gpu_query_result = plpy.execute(gpu_query)[0]['gpu_config']
-                if not gpu_query_result:
-                   plpy.error("{self.module_name}: No GPUs configured on hosts.".format(self=self))
-
-                gpu_config_hostnames = "ARRAY{0}".format(gpu_query_result)
-                # find hosts with gpus
-                get_segment_query = """
-                    SELECT array_agg(content) as segment_ids,
-                           array_agg(dbid) as dbid,
-                           count(*) as count
-                    FROM gp_segment_configuration
-                    WHERE content != -1 AND role = 'p'
-                    AND hostname=ANY({gpu_config_hostnames})
-                """.format(**locals())
-                segment_ids_result = plpy.execute(get_segment_query)[0]
-                plpy.execute("DROP TABLE IF EXISTS {0}".format(gpu_info_table))
-
-                self.gpu_config = "ARRAY{0}".format(sorted(segment_ids_result['segment_ids']))
-                self.distribution_rules = "ARRAY{0}".format(sorted(segment_ids_result['dbid']))
-
-                num_segments = segment_ids_result['count']
-                where_clause = "WHERE gp_segment_id=ANY({self.gpu_config})".format(**locals())
-                join_clause = gpu_join_clause.format(**locals())
-
-            elif self.distribution_rules == DEFAULT_GPU_CONFIG:
-
-                self.distribution_rules = '$__madlib__$all_segments$__madlib__$'
-                where_clause = ''
-                num_segments = get_seg_number()
-                join_clause = 'JOIN {dist_key_tbl} ON (b.buffer_id%{num_segments})= {dist_key_tbl}.id'.format(**locals())
-
-            else:  # Read from a table with dbids to distribute the data
-
-                self._validate_distribution_table()
-                gpu_query = """
-                    SELECT array_agg(content) as gpu_config,
-                           array_agg(gp_segment_configuration.dbid) as dbid
-                    FROM {self.distribution_rules} JOIN gp_segment_configuration
-                    ON {self.distribution_rules}.dbid = gp_segment_configuration.dbid
-                """.format(**locals())
-                gpu_query_result = plpy.execute(gpu_query)[0]
-                self.gpu_config = "ARRAY{0}".format(sorted(gpu_query_result['gpu_config']))
-                where_clause = "WHERE gp_segment_id=ANY({self.gpu_config})".format(**locals())
-                num_segments = plpy.execute("SELECT count(*) as count FROM {self.distribution_rules}".format(**locals()))[0]['count']
-                join_clause = gpu_join_clause.format(**locals())
-                self.distribution_rules = "ARRAY{0}".format(sorted(gpu_query_result['dbid']))
-
-            dist_key_query = """
-                    CREATE TEMP TABLE {dist_key_tbl} AS
-                    SELECT gp_segment_id AS id, min({dist_key}) AS {dist_key}
-                    FROM {series_tbl}
-                    {where_clause}
-                    GROUP BY gp_segment_id
-            """
-            plpy.execute(dist_key_query.format(**locals()))
-
-        # Create the mini-batched output table
+        #  Generate rows_per_segment table; this small table will
+        #  just have one row on each segment containing the number
+        #  of rows on that segment in the norm_tbl
         sql = """
-            CREATE TABLE {self.output_table} AS
-            SELECT {dist_key_comma}
-                   {self.schema_madlib}.convert_array_to_bytea({x}) AS {x},
-                   {self.schema_madlib}.convert_array_to_bytea({y}) AS {y},
-                   ARRAY[count,{ind_shape}]::SMALLINT[] AS {ind_shape_col},
-                   ARRAY[count,{dep_shape}]::SMALLINT[] AS {dep_shape_col},
-                   buffer_id
-            FROM
-            (
-                SELECT
-                    {self.schema_madlib}.agg_array_concat(
-                        ARRAY[{norm_tbl}.x_norm::{FLOAT32_SQL_TYPE}[]]) AS {x},
-                    {self.schema_madlib}.agg_array_concat(
-                        ARRAY[{norm_tbl}.y]) AS {y},
-                    ({norm_tbl}.row_id%{self.num_of_buffers})::smallint AS buffer_id,
-                    count(*) AS count
-                FROM {norm_tbl}
-                GROUP BY buffer_id
-            ) b
-            {join_clause}
-            {distributed_by_clause}
-            """.format(x=MINIBATCH_OUTPUT_INDEPENDENT_COLNAME_DL,
-                       y=MINIBATCH_OUTPUT_DEPENDENT_COLNAME_DL,
-                       FLOAT32_SQL_TYPE=FLOAT32_SQL_TYPE,
-                       **locals())
+            CREATE TEMP TABLE {rows_per_seg_tbl} AS SELECT
+                COUNT(*) as rows_per_seg,
+                {dist_key_col}
+            FROM {norm_tbl}
+            GROUP BY {dist_key_col}
+            DISTRIBUTED BY ({dist_key_col})
+        """.format(**locals())
+
         plpy.execute(sql)
-        plpy.execute("DROP TABLE IF EXISTS {0}, {1}, {2}".format(norm_tbl, series_tbl, dist_key_tbl))
+
+        #  Generate start_rows_tbl from rows_per_segment table.
+        #  This assigns a start_row number for each segment based on
+        #  the sum of all rows in previous segments.  These will be
+        #  added to the row numbers within each segment to get an
+        #  absolute index into the table.  All of this is to accomplish
+        #  the equivalent of ROW_NUMBER() OVER() on the whole table,
+        #  but this way is much faster because we don't have to do an
+        #  N:1 Gather Motion (moving entire table to a single segment
+        #  and scanning through it).
+        #
+        sql = """
+            CREATE TEMP TABLE {start_rows_tbl} AS SELECT
+                {dist_key_col},
+                SUM(rows_per_seg) OVER (ORDER BY gp_segment_id) - rows_per_seg AS start_row
+            FROM {rows_per_seg_tbl}
+            DISTRIBUTED BY ({dist_key_col})
+        """.format(**locals())
+
+        plpy.execute(sql)
+
+        plpy.execute("DROP TABLE {0}".format(rows_per_seg_tbl))
+
+        self.buffer_size = self._get_buffer_size(num_segments)
+
+        # The query below assigns slot_id's to each row within
+        #  a segment, computes a row_id by adding start_row for
+        #  that segment to it, then divides by buffer_size to make
+        #  this into a buffer_id
+        # ie:
+        #  buffer_id = row_id / buffer_size
+        #     row_id = start_row + slot_id
+        #    slot_id = ROW_NUMBER() OVER(PARTITION BY <dist key>)::INTEGER
+        #
+        #   Instead of partitioning by gp_segment_id itself, we
+        # use __dist_key__ col instead.  This is the same partition,
+        # since there's a 1-to-1 mapping between the columns; but
+        # using __dist_key__ avoids an extra Redistribute Motion.
+        #
+        # Note: even though the ordering of these two columns is
+        #  different, this doesn't matter as each segment is being
+        #  numbered separately (only the start_row is different,
+        #  and those are fixed to the correct segments by the JOIN
+        #  condition.
+
+        sql = """
+        CREATE TEMP TABLE {normalized_tbl} AS SELECT
+            {dist_key_col},
+            x_norm,
+            y,
+            (ROW_NUMBER() OVER( PARTITION BY {dist_key_col} ))::INTEGER as slot_id,
+            ((start_row +
+               (ROW_NUMBER() OVER( PARTITION BY {dist_key_col} ) - 1)
+             )::INTEGER / {self.buffer_size}
+            ) AS buffer_id
+        FROM {norm_tbl} JOIN {start_rows_tbl}
+            USING ({dist_key_col})
+        ORDER BY buffer_id
+        DISTRIBUTED BY (slot_id)
+        """.format(**locals())
+
+        plpy.execute(sql)   # label buffer_id's
+
+        # A note on DISTRIBUTED BY (slot_id) in above query:
+        #
+        #     In the next query, we'll be doing the actual batching.  Due
+        #  to the GROUP BY, gpdb will Redistribute on buffer_id.  We could
+        #  avoid this by using DISTRIBUTED BY (buffer_id) in the above
+        #  (buffer-labelling) query.  But this also causes the GROUP BY
+        #  to use single-stage GroupAgg instead of multistage GroupAgg,
+        #  which for unknown reasons is *much* slower and often runs out
+        #  of VMEM unless it's set very high!
+
+        plpy.execute("DROP TABLE {norm_tbl}, {start_rows_tbl}".format(**locals()))
+
+        # Disable optimizer (ORCA) for platforms that use it
+        # since we want to use a groupagg instead of hashagg
+        with OptimizerControl(False) and HashaggControl(False):
+            # Run actual batching query
+            plpy.execute(batching_query.format(**locals()))
+
+        plpy.execute("DROP TABLE {0}".format(normalized_tbl))
+
+        if not all_segments: # remove any segments we don't plan to use
+            sql = """
+                DELETE FROM {dist_key_tbl}
+                    WHERE NOT gp_segment_id = ANY({self.gpu_config})
+            """.format(**locals())
+
+        plpy.execute("ANALYZE {dist_key_tbl}".format(**locals()))
+        plpy.execute("ANALYZE {batched_table}".format(**locals()))
+
+        # Redistribute from buffer_id to dist_key
+        #
+        #  This has to be separate from the batching query, because
+        #   we found that adding DISTRIBUTED BY (dist_key) to that
+        #   query causes it to run out of VMEM on large datasets such
+        #   as places100.  Possibly this is because the memory available
+        #   for GroupAgg has to be shared with an extra slice if they
+        #   are part of the same query.
+        #
+        #  We also tried adding this to the BYTEA conversion query, but
+        #   that resulted in slower performance than just keeping it
+        #   separate.
+        #
+        sql = """CREATE TEMP TABLE {batched_table}_dist_key AS
+                    SELECT {dist_key_col}, t.*
+                        FROM {batched_table} t
+                            JOIN {dist_key_tbl} d
+                                ON {join_key} = d.gp_segment_id
+                            DISTRIBUTED BY ({dist_key_col})
+              """.format(**locals())
+
+        # match buffer_id's with dist_keys
+        plpy.execute(sql)
+
+        sql = """DROP TABLE {batched_table}, {dist_key_tbl};
+                 ALTER TABLE {batched_table}_dist_key RENAME TO {batched_table}
+              """.format(**locals())
+        plpy.execute(sql)
+
+        # Convert batched table to BYTEA and output as final (permanent) table
+        plpy.execute(bytea_query.format(**locals()))
+
+        plpy.execute("DROP TABLE {0}".format(batched_table))
+
         # Create summary table
         self._create_output_summary_table()
 
@@ -405,7 +629,8 @@
             _assert(self.buffer_size > 0,
                     "{0}: The buffer size has to be a "
                     "positive integer or NULL.".format(self.module_name))
-        _assert(self.normalizing_const > 0,
+        if self.normalizing_const is not None:
+            _assert(self.normalizing_const > 0,
                 "{0}: The normalizing constant has to be a "
                 "positive integer or NULL.".format(self.module_name))
 
@@ -442,16 +667,17 @@
         return get_distinct_col_levels(table, dependent_varname,
             dependent_vartype, include_nulls=True)
 
-    def _get_num_buffers(self):
+    def _get_buffer_size(self, num_segments):
         num_rows_in_tbl = plpy.execute("""
                 SELECT count(*) AS cnt FROM {0}
             """.format(self.source_table))[0]['cnt']
         buffer_size_calculator = MiniBatchBufferSizeCalculator()
         indepdent_var_dim = get_product_of_dimensions(self.source_table,
             self.independent_varname)
-        self.buffer_size = buffer_size_calculator.calculate_default_buffer_size(
-            self.buffer_size, num_rows_in_tbl, indepdent_var_dim)
-        return ceil((1.0 * num_rows_in_tbl) / self.buffer_size)
+        buffer_size = buffer_size_calculator.calculate_default_buffer_size(
+            self.buffer_size, num_rows_in_tbl, indepdent_var_dim, num_segments)
+        num_buffers = num_segments * ceil((1.0 * num_rows_in_tbl) / buffer_size / num_segments)
+        return int(ceil(num_rows_in_tbl / num_buffers))
 
 class ValidationDataPreprocessorDL(InputDataPreprocessorDL):
     def __init__(self, schema_madlib, source_table, output_table,
diff --git a/src/ports/postgres/modules/deep_learning/madlib_keras_helper.py_in b/src/ports/postgres/modules/deep_learning/madlib_keras_helper.py_in
index 40ae56e..6e006d5 100644
--- a/src/ports/postgres/modules/deep_learning/madlib_keras_helper.py_in
+++ b/src/ports/postgres/modules/deep_learning/madlib_keras_helper.py_in
@@ -51,7 +51,6 @@
 SMALLINT_SQL_TYPE = 'SMALLINT'
 
 DEFAULT_NORMALIZING_CONST = 1.0
-DEFAULT_GPU_CONFIG = 'all_segments'
 GP_SEGMENT_ID_COLNAME = "gp_segment_id"
 INTERNAL_GPU_CONFIG = '__internal_gpu_config__'
 
diff --git a/src/ports/postgres/modules/deep_learning/madlib_keras_validator.py_in b/src/ports/postgres/modules/deep_learning/madlib_keras_validator.py_in
index 37a2e25..11730cf 100644
--- a/src/ports/postgres/modules/deep_learning/madlib_keras_validator.py_in
+++ b/src/ports/postgres/modules/deep_learning/madlib_keras_validator.py_in
@@ -31,7 +31,6 @@
 from madlib_keras_helper import DISTRIBUTION_KEY_COLNAME
 from madlib_keras_helper import METRIC_TYPE_COLNAME
 from madlib_keras_helper import INTERNAL_GPU_CONFIG
-from madlib_keras_helper import DEFAULT_GPU_CONFIG
 from madlib_keras_helper import query_model_configs
 
 from utilities.minibatch_validation import validate_bytea_var_for_minibatch
@@ -234,7 +233,7 @@
         gpu_config = plpy.execute(
             "SELECT {0} FROM {1}".format(INTERNAL_GPU_CONFIG, summary_table)
             )[0][INTERNAL_GPU_CONFIG]
-        if gpu_config == DEFAULT_GPU_CONFIG:
+        if gpu_config == 'all_segments':
             _assert(0 not in accessible_gpus_for_seg,
                 "{0} error: Host(s) are missing gpus.".format(module_name))
         else:
diff --git a/src/ports/postgres/modules/deep_learning/test/input_data_preprocessor.sql_in b/src/ports/postgres/modules/deep_learning/test/input_data_preprocessor.sql_in
index d8c6798..7c6c5c3 100644
--- a/src/ports/postgres/modules/deep_learning/test/input_data_preprocessor.sql_in
+++ b/src/ports/postgres/modules/deep_learning/test/input_data_preprocessor.sql_in
@@ -19,6 +19,7 @@
  *
  *//* ----------------------------------------------------------------------- */
 m4_include(`SQLCommon.m4')
+m4_changequote(`<!', `!>')
 
 DROP TABLE IF EXISTS data_preprocessor_input;
 CREATE TABLE data_preprocessor_input(id serial, x double precision[], label TEXT);
@@ -49,20 +50,72 @@
   'x',
   5);
 
-SELECT assert(count(*)=4, 'Incorrect number of buffers in data_preprocessor_input_batch.')
+-- Divide two numbers and round up to the nearest integer
+CREATE FUNCTION divide_roundup(numerator NUMERIC, denominator NUMERIC)
+RETURNS INTEGER AS
+$$
+    SELECT (ceil($1 / $2)::INTEGER);
+$$ LANGUAGE SQL;
+
+-- num_buffers_calc() represents the num_buffers value that should be
+--  calculated by the preprocessor.
+-- For postgres, just need total rows / buffer_size rounded up.
+-- For greenplum, we take that result, and round up to the nearest multiple
+--   of num_segments.
+CREATE FUNCTION num_buffers_calc(rows_in_tbl INTEGER, buffer_size INTEGER)
+RETURNS INTEGER AS
+$$
+m4_ifdef(<!__POSTGRESQL__!>,
+    <! SELECT divide_roundup($1, $2); !>,
+    <! SELECT (COUNT(*)::INTEGER) * divide_roundup(divide_roundup($1, $2), COUNT(*)) FROM gp_segment_configuration
+                                                WHERE role = 'p' AND content != -1; !>
+)
+$$ LANGUAGE SQL;
+
+--  num_buffers() represents the actual number of buffers expected to
+--      be returned in the output table.
+--   For postgres, this should always be the same as num_buffers_calc()
+--      (as long as rows_in_tbl > 0, which should be validated elsewhere)
+--   For greenplum, this can be less than num_buffers_calc() in
+--     the special case where there is only one row per buffer.  In
+--      that case, the buffers in the output table will be equal to
+--      the number of rows in the input table.  This can only happen
+--      if rows_in_tbl < num_segments and is the only case where the
+--      number of buffers on each segment will not be exactly equal
+CREATE FUNCTION num_buffers(rows_in_tbl INTEGER, buffer_size INTEGER)
+RETURNS INTEGER AS
+$$
+    SELECT LEAST(num_buffers_calc($1, $2), $1);
+$$ LANGUAGE SQL;
+
+CREATE FUNCTION buffer_size(rows_in_tbl INTEGER, requested_buffer_size INTEGER)
+RETURNS INTEGER AS
+$$
+  SELECT divide_roundup($1, num_buffers($1, $2));
+$$ LANGUAGE SQL;
+
+SELECT assert(COUNT(*) = num_buffers(17, 5),
+    'Incorrect number of buffers in data_preprocessor_input_batch.')
 FROM data_preprocessor_input_batch;
 
-SELECT assert(independent_var_shape[2]=6, 'Incorrect buffer size.')
+SELECT assert(independent_var_shape[2]=6, 'Incorrect image shape ' || independent_var_shape[2])
 FROM data_preprocessor_input_batch WHERE buffer_id=0;
 
-SELECT assert(independent_var_shape[1]=5, 'Incorrect buffer size.')
-FROM data_preprocessor_input_batch WHERE buffer_id=1;
+SELECT assert(independent_var_shape[1]=buffer_size, 'Incorrect buffer size ' || independent_var_shape[1])
+FROM (SELECT buffer_size(17, 5) buffer_size) a, data_preprocessor_input_batch WHERE buffer_id=0;
 
-SELECT assert(independent_var_shape[1]=4, 'Incorrect buffer size.')
-FROM data_preprocessor_input_batch WHERE buffer_id=3;
+SELECT assert(independent_var_shape[1]=buffer_size, 'Incorrect buffer size ' || independent_var_shape[1])
+FROM (SELECT buffer_size(17, 5) buffer_size) a, data_preprocessor_input_batch WHERE buffer_id=1;
 
-SELECT assert(octet_length(independent_var) = 96, 'Incorrect buffer size')
-FROM data_preprocessor_input_batch WHERE buffer_id=0;
+SELECT assert(independent_var_shape[1]=buffer_size, 'Incorrect buffer size ' || independent_var_shape[1])
+FROM (SELECT buffer_size(17, 5) buffer_size) a, data_preprocessor_input_batch WHERE buffer_id=2;
+
+SELECT assert(total_images = 17, 'Incorrect total number of images! Last buffer has incorrect size?')
+FROM (SELECT SUM(independent_var_shape[1]) AS total_images FROM data_preprocessor_input_batch) a;
+
+SELECT assert(octet_length(independent_var) = buffer_size*6*4, 'Incorrect buffer length ' || octet_length(independent_var)::TEXT)
+FROM (SELECT buffer_size(17, 5) buffer_size) a, data_preprocessor_input_batch WHERE buffer_id=0;
+
 
 DROP TABLE IF EXISTS validation_out, validation_out_summary;
 SELECT validation_preprocessor_dl(
@@ -73,20 +126,21 @@
   'data_preprocessor_input_batch',
   5);
 
-SELECT assert(count(*)=4, 'Incorrect number of buffers in validation_out.')
+SELECT assert(COUNT(*) = num_buffers(17, 5),
+    'Incorrect number of buffers in validation_out.')
 FROM validation_out;
 
-SELECT assert(independent_var_shape[2]=6, 'Incorrect buffer size.')
+SELECT assert(independent_var_shape[2]=6, 'Incorrect image shape.')
 FROM data_preprocessor_input_batch WHERE buffer_id=0;
 
-SELECT assert(independent_var_shape[1]=5, 'Incorrect buffer size.')
-FROM data_preprocessor_input_batch WHERE buffer_id=1;
+SELECT assert(independent_var_shape[1]=buffer_size, 'Incorrect buffer size.')
+FROM (SELECT buffer_size(17, 5) buffer_size) a, data_preprocessor_input_batch WHERE buffer_id=1;
 
-SELECT assert(independent_var_shape[1]=4, 'Incorrect buffer size.')
-FROM data_preprocessor_input_batch WHERE buffer_id=3;
+SELECT assert(total_images = 17, 'Incorrect total number of images! Last buffer has incorrect size?')
+FROM (SELECT SUM(independent_var_shape[1]) AS total_images FROM data_preprocessor_input_batch) a;
 
-SELECT assert(octet_length(independent_var) = 96, 'Incorrect buffer size')
-FROM validation_out WHERE buffer_id=0;
+SELECT assert(octet_length(independent_var) = buffer_size*6*4, 'Incorrect buffer length')
+FROM (SELECT buffer_size(17, 5) buffer_size) a, validation_out WHERE buffer_id=0;
 
 DROP TABLE IF EXISTS data_preprocessor_input_batch, data_preprocessor_input_batch_summary;
 SELECT training_preprocessor_dl(
@@ -96,7 +150,6 @@
   'x');
 
 -- Test data is evenly distributed across all segments (GPDB only)
-m4_changequote(`<!', `!>')
 m4_ifdef(<!__POSTGRESQL__!>, <!!>, <!
 DROP TABLE IF EXISTS data_preprocessor_input_batch, data_preprocessor_input_batch_summary;
 SELECT training_preprocessor_dl(
@@ -109,11 +162,10 @@
 -- This test expects that total number of images(17 for input table data_preprocessor_input)
 -- are equally distributed across all segments.
 -- Therefore, after preprocessing seg0 will have 17/(# of segs) buffers.
-SELECT assert(count(*)=(SELECT ceil(17.0/count(*)) from gp_segment_configuration WHERE role = 'p' and content != -1), 'Even distribution of buffers failed.')
-FROM data_preprocessor_input_batch
-WHERE gp_segment_id = 0;
+SELECT gp_segment_id, assert((SELECT divide_roundup(17, count(*)) from gp_segment_configuration WHERE role = 'p' and content != -1) - COUNT(*) <= 1, 'Even distribution of buffers failed. Seeing ' || count(*) || ' buffers.')
+    FROM data_preprocessor_input_batch GROUP BY 1;
 SELECT assert(__internal_gpu_config__ = 'all_segments', 'Missing column in summary table')
-FROM data_preprocessor_input_batch_summary;
+    FROM data_preprocessor_input_batch_summary;
 
 -- Test validation data is evenly distributed across all segments (GPDB only)
 DROP TABLE IF EXISTS validation_out, validation_out_summary;
@@ -124,9 +176,8 @@
   'x',
   'data_preprocessor_input_batch',
   1);
-SELECT assert(count(*)=(SELECT ceil(17.0/count(*)) from gp_segment_configuration WHERE role = 'p' and content != -1), 'Even distribution of validation buffers failed.')
-FROM validation_out
-WHERE gp_segment_id = 0;
+SELECT gp_segment_id, assert((SELECT divide_roundup(17, count(*)) from gp_segment_configuration WHERE role = 'p' and content != -1) - COUNT(*) <= 1, 'Even distribution of buffers failed. Seeing ' || count(*) || ' buffers.')
+    FROM validation_out GROUP BY 1;
 SELECT assert(__internal_gpu_config__ = 'all_segments', 'Missing column in validation summary table')
 FROM validation_out_summary;
 
@@ -208,8 +259,8 @@
 SELECT assert(dependent_var_shape[2] = 16, 'Incorrect one-hot encode dimension with num_classes') FROM
   data_preprocessor_input_batch WHERE buffer_id = 0;
 
-SELECT assert(octet_length(independent_var) = 72, 'Incorrect buffer size')
-FROM data_preprocessor_input_batch WHERE buffer_id=0;
+SELECT assert(octet_length(independent_var) = buffer_size*6*4, 'Incorrect buffer length')
+FROM (SELECT buffer_size(17, 4) buffer_size) a, data_preprocessor_input_batch WHERE buffer_id=0;
 
 -- Test summary table
 SELECT assert
@@ -220,13 +271,14 @@
         independent_varname = 'x' AND
         dependent_vartype   = 'integer' AND
         class_values        = '{-6,-3,-1,0,2,3,4,5,6,7,8,9,10,12,NULL,NULL}' AND
-        buffer_size         = 4 AND  -- we sort the class values in python
+        summary.buffer_size = a.buffer_size AND  -- we sort the class values in python
         normalizing_const   = 5 AND
         pg_typeof(normalizing_const) = 'real'::regtype AND
         num_classes         = 16 AND
         distribution_rules  = 'all_segments',
         'Summary Validation failed. Actual:' || __to_char(summary)
-        ) from (select * from data_preprocessor_input_batch_summary) summary;
+        ) FROM (SELECT buffer_size(17, 4) buffer_size) a,
+          (SELECT * FROM data_preprocessor_input_batch_summary) summary;
 
 --- Test output data type
 SELECT assert(pg_typeof(independent_var) = 'bytea'::regtype, 'Wrong independent_var type') FROM data_preprocessor_input_batch WHERE buffer_id = 0;
@@ -286,8 +338,8 @@
 SELECT assert(dependent_var_shape[2] = 2, 'Incorrect one-hot encode dimension') FROM
    data_preprocessor_input_batch WHERE buffer_id = 0;
 
-SELECT assert(octet_length(independent_var) = 72, 'Incorrect buffer size')
-FROM data_preprocessor_input_batch WHERE buffer_id=0;
+SELECT assert(octet_length(independent_var) = buffer_size*6*4, 'Incorrect buffer length')
+FROM (SELECT buffer_size(17, 4) buffer_size) a, data_preprocessor_input_batch WHERE buffer_id=0;
 
 SELECT assert(SUM(y) = 1, 'Incorrect one-hot encode format') FROM (SELECT buffer_id, UNNEST((convert_bytea_to_smallint_array(dependent_var))[1:2]) as y FROM data_preprocessor_input_batch) a WHERE buffer_id = 0;
 SELECT assert (dependent_vartype   = 'boolean' AND
@@ -328,8 +380,8 @@
 SELECT assert(dependent_var_shape[2] = 3, 'Incorrect one-hot encode dimension') FROM
    data_preprocessor_input_batch WHERE buffer_id = 0;
 
-SELECT assert(octet_length(independent_var) = 72, 'Incorrect buffer size')
-FROM data_preprocessor_input_batch WHERE buffer_id=0;
+SELECT assert(octet_length(independent_var) = buffer_size*6*4, 'Incorrect buffer length')
+FROM (SELECT buffer_size(17, 4) buffer_size) a, data_preprocessor_input_batch WHERE buffer_id=0;
 
 SELECT assert(SUM(y) = 1, 'Incorrect one-hot encode format') FROM (SELECT buffer_id, UNNEST((convert_bytea_to_smallint_array(dependent_var))[1:3]) as y FROM data_preprocessor_input_batch) a WHERE buffer_id = 0;
 SELECT assert (dependent_vartype   = 'text' AND
@@ -363,8 +415,8 @@
 SELECT assert(pg_typeof(dependent_var) = 'bytea'::regtype, 'One-hot encode doesn''t convert into integer array format') FROM data_preprocessor_input_batch WHERE buffer_id = 0;
 SELECT assert(dependent_var_shape[2] = 3, 'Incorrect one-hot encode dimension') FROM
   data_preprocessor_input_batch WHERE buffer_id = 0;
-SELECT assert(octet_length(independent_var) = 72, 'Incorrect buffer size')
-FROM data_preprocessor_input_batch WHERE buffer_id=0;
+SELECT assert(octet_length(independent_var) = buffer_size*6*4, 'Incorrect buffer length')
+FROM (SELECT buffer_size(17, 4) buffer_size) a, data_preprocessor_input_batch WHERE buffer_id=0;
 SELECT assert(SUM(y) = 1, 'Incorrect one-hot encode format') FROM (SELECT buffer_id, UNNEST((convert_bytea_to_smallint_array(dependent_var))[1:3]) as y FROM data_preprocessor_input_batch) a WHERE buffer_id = 0;
 SELECT assert (dependent_vartype   = 'double precision' AND
                class_values        = '{4.0,4.2,5.0}' AND
@@ -385,8 +437,8 @@
 SELECT assert(dependent_var_shape[2] = 2, 'Incorrect one-hot encode dimension') FROM
   data_preprocessor_input_batch WHERE buffer_id = 0;
 
-SELECT assert(octet_length(independent_var) = 72, 'Incorrect buffer size')
-FROM data_preprocessor_input_batch WHERE buffer_id=0;
+SELECT assert(octet_length(independent_var) = buffer_size*6*4, 'Incorrect buffer length')
+FROM (SELECT buffer_size(17, 4) buffer_size) a, data_preprocessor_input_batch WHERE buffer_id=0;
 
 SELECT assert(relative_error(SUM(y), SUM(y4)) < 0.000001, 'Incorrect one-hot encode value') FROM (SELECT UNNEST(convert_bytea_to_smallint_array(dependent_var)) AS y FROM data_preprocessor_input_batch) a, (SELECT UNNEST(y4) as y4 FROM data_preprocessor_input) b;
 SELECT assert (dependent_vartype   = 'double precision[]' AND
@@ -419,8 +471,8 @@
 SELECT assert(dependent_var_shape[2] = 2, 'Incorrect one-hot encode dimension') FROM
   data_preprocessor_input_batch WHERE buffer_id = 0;
 
-SELECT assert(octet_length(independent_var) = 72, 'Incorrect buffer size')
-FROM data_preprocessor_input_batch WHERE buffer_id=0;
+SELECT assert(octet_length(independent_var) = buffer_size*6*4, 'Incorrect buffer length')
+FROM (SELECT buffer_size(17, 4) buffer_size) a, data_preprocessor_input_batch WHERE buffer_id=0;
 
 SELECT assert(relative_error(SUM(y), SUM(y5)) < 0.000001, 'Incorrect one-hot encode value') FROM (SELECT UNNEST(convert_bytea_to_smallint_array(dependent_var)) AS y FROM data_preprocessor_input_batch) a, (SELECT UNNEST(y5) as y5 FROM data_preprocessor_input) b;
 SELECT assert (dependent_vartype   = 'integer[]' AND
@@ -473,8 +525,8 @@
 SELECT assert(dependent_var_shape[2] = 5, 'Incorrect one-hot encode dimension') FROM
   data_preprocessor_input_batch WHERE buffer_id = 0;
 
-SELECT assert(octet_length(independent_var) = 72, 'Incorrect buffer size')
-FROM data_preprocessor_input_batch WHERE buffer_id=0;
+SELECT assert(octet_length(independent_var) = buffer_size*6*4, 'Incorrect buffer length')
+FROM (SELECT buffer_size(17, 4) buffer_size) a, data_preprocessor_input_batch WHERE buffer_id=0;
 
 -- The same tests, but for validation.
 DROP TABLE IF EXISTS data_preprocessor_input_validation_null;
@@ -541,7 +593,7 @@
 
 SELECT assert(dependent_var_shape[2] = 3, 'Incorrect one-hot encode dimension') FROM
   data_preprocessor_input_batch WHERE buffer_id = 0;
-SELECT assert(octet_length(independent_var) = 24, 'Incorrect buffer size')
+SELECT assert(octet_length(independent_var) = 24, 'Incorrect buffer length')
 FROM data_preprocessor_input_batch WHERE buffer_id=0;
 -- NULL is treated as a class label, so it should show '1' for the
 -- first index
@@ -570,7 +622,7 @@
 
 SELECT assert(dependent_var_shape[2] = 3, 'Incorrect one-hot encode dimension') FROM
   validation_out_batch WHERE buffer_id = 0;
-SELECT assert(octet_length(independent_var) = 24, 'Incorrect buffer size')
+SELECT assert(octet_length(independent_var) = 24, 'Incorrect buffer length')
 FROM data_preprocessor_input_batch WHERE buffer_id=0;
 -- NULL is treated as a class label, so it should show '1' for the
 -- first index
diff --git a/src/ports/postgres/modules/deep_learning/test/madlib_keras_cifar.setup.sql_in b/src/ports/postgres/modules/deep_learning/test/madlib_keras_cifar.setup.sql_in
index 1f3a24f..7c9ad5e 100644
--- a/src/ports/postgres/modules/deep_learning/test/madlib_keras_cifar.setup.sql_in
+++ b/src/ports/postgres/modules/deep_learning/test/madlib_keras_cifar.setup.sql_in
@@ -24,8 +24,8 @@
 DROP TABLE IF EXISTS cifar_10_sample;
 CREATE TABLE cifar_10_sample(id INTEGER, y SMALLINT, y_text TEXT, imgpath TEXT, x  REAL[]);
 COPY cifar_10_sample FROM STDIN DELIMITER '|';
-1|0|'cat'|'0/img0.jpg'|{{{202,204,199},{202,204,199},{204,206,201},{206,208,203},{208,210,205},{209,211,206},{210,212,207},{212,214,210},{213,215,212},{215,217,214},{216,218,215},{216,218,215},{215,217,214},{216,218,215},{216,218,215},{216,218,214},{217,219,214},{217,219,214},{218,220,215},{218,219,214},{216,217,212},{217,218,213},{218,219,214},{214,215,209},{213,214,207},{212,213,206},{211,212,205},{209,210,203},{208,209,202},{207,208,200},{205,206,199},{203,204,198}},{{206,208,203},{206,208,203},{207,209,204},{209,211,207},{211,213,209},{212,214,210},{213,215,211},{215,217,214},{216,218,215},{218,220,217},{220,222,219},{220,222,219},{220,222,219},{220,222,220},{221,223,219},{220,222,218},{220,222,217},{221,223,218},{221,223,218},{220,222,217},{218,219,214},{215,216,211},{220,221,216},{220,221,215},{217,218,212},{215,216,210},{214,215,209},{213,214,208},{212,213,207},{210,211,205},{208,209,203},{206,207,201}},{{210,212,207},{211,213,208},{212,214,210},{214,216,213},{215,217,215},{217,219,216},{218,220,217},{219,221,219},{221,223,220},{221,223,220},{222,224,221},{223,225,222},{224,226,223},{226,228,225},{226,228,225},{226,228,224},{226,228,223},{227,229,224},{227,228,223},{226,228,223},{225,226,221},{221,222,217},{221,222,217},{223,224,219},{221,222,218},{219,220,216},{219,220,216},{218,219,214},{216,216,212},{214,214,210},{212,213,208},{210,211,206}},{{212,214,209},{213,215,210},{215,217,213},{217,219,217},{218,220,219},{220,222,221},{222,224,223},{224,226,223},{225,227,224},{226,228,225},{230,232,229},{230,232,229},{228,230,227},{229,231,228},{230,232,229},{231,233,229},{232,234,229},{233,235,230},{233,235,230},{233,235,229},{234,235,230},{232,233,228},{229,230,225},{228,229,226},{228,228,226},{226,226,224},{224,224,222},{222,222,220},{220,220,218},{218,218,216},{216,216,213},{213,214,209}},{{215,217,212},{216,218,213},{218,220,215},{219,221,219},{221,223,222},{222,224,223},{224,226,225},{225,227,224},{227,229,225},{224,226,223},{209,211,209},{223,224,222},{232,234,231},{232,234,231},{234,236,233},{235,237,232},{237,239,234},{237,239,235},{238,240,235},{240,241,237},{240,241,236},{240,241,236},{240,241,236},{238,239,236},{237,237,235},{236,236,234},{234,234,232},{231,231,229},{228,229,227},{225,225,223},{221,221,219},{218,219,215}},{{218,220,217},{218,220,216},{220,222,218},{221,223,221},{223,225,222},{225,227,224},{226,228,225},{226,228,223},{232,235,228},{183,185,183},{79,80,84},{155,157,160},{236,238,235},{235,237,232},{236,238,236},{239,241,237},{241,243,240},{244,245,244},{244,246,245},{241,245,243},{242,245,242},{243,245,240},{245,246,241},{246,246,242},{245,246,241},{245,246,242},{244,245,241},{243,244,241},{240,241,239},{236,238,235},{232,234,231},{228,230,228}},{{221,223,220},{221,223,220},{222,224,221},{224,226,223},{226,228,224},{230,232,229},{232,234,231},{231,233,229},{235,238,232},{170,172,171},{34,34,42},{92,92,101},{223,224,224},{238,240,234},{238,240,236},{235,237,234},{216,218,216},{194,195,196},{200,202,205},{237,242,242},{244,247,245},{245,246,242},{247,248,242},{248,249,244},{249,250,245},{250,251,246},{250,251,246},{250,252,248},{249,251,248},{247,249,246},{246,248,245},{243,245,243}},{{222,224,221},{223,225,222},{225,227,224},{227,229,226},{230,232,229},{187,189,186},{172,174,171},{227,229,228},{243,245,241},{179,181,180},{38,38,46},{47,47,60},{191,192,197},{240,243,237},{242,245,238},{186,188,185},{82,84,83},{63,64,67},{72,73,77},{184,188,189},{246,250,249},{245,246,244},{247,248,244},{249,250,245},{249,250,245},{250,251,246},{251,253,248},{252,254,251},{252,254,251},{252,254,251},{252,254,252},{252,254,253}},{{222,224,221},{223,225,222},{226,228,225},{229,231,228},{233,235,232},{158,161,157},{72,74,72},{107,108,110},{190,192,191},{183,184,184},{44,45,52},{26,25,39},{135,135,141},{241,243,239},{246,248,243},{152,154,152},{37,38,39},{46,47,51},{47,48,53},{96,100,103},{223,226,228},{248,249,248},{247,247,245},{249,250,245},{249,250,245},{250,251,246},{250,251,246},{250,252,249},{250,252,250},{251,253,250},{251,253,250},{251,253,252}},{{222,225,221},{222,224,221},{225,228,224},{229,231,228},{232,234,231},{220,222,219},{163,165,163},{81,81,86},{70,71,74},{98,100,100},{41,41,48},{23,23,35},{76,77,83},{202,204,203},{221,223,223},{124,125,126},{42,43,46},{52,53,58},{51,52,59},{51,54,59},{158,161,164},{248,249,250},{248,248,248},{249,250,246},{249,250,245},{250,251,246},{250,251,246},{250,252,248},{250,252,249},{250,252,249},{250,252,249},{250,252,251}},{{223,225,223},{223,225,223},{227,228,226},{230,232,229},{231,233,231},{234,236,233},{236,238,236},{195,196,200},{103,104,107},{38,39,40},{25,26,30},{32,33,41},{39,41,45},{81,82,85},{93,95,99},{70,71,74},{46,47,51},{48,49,54},{48,50,56},{42,46,51},{81,84,87},{219,220,222},{251,251,251},{249,250,245},{249,250,245},{250,251,246},{250,251,246},{250,252,248},{250,252,249},{249,251,248},{249,251,248},{249,251,250}},{{224,224,225},{225,225,226},{228,228,229},{230,231,230},{232,234,233},{233,235,233},{234,236,235},{235,237,234},{212,214,211},{125,127,125},{24,26,26},{16,18,21},{28,30,34},{35,37,41},{48,50,54},{50,52,56},{56,59,62},{52,55,59},{43,46,50},{47,53,58},{47,53,57},{145,148,149},{246,247,246},{250,251,246},{251,252,246},{251,252,247},{251,252,247},{251,252,247},{251,252,247},{250,251,247},{250,251,247},{250,251,249}},{{224,224,226},{225,225,227},{228,228,230},{230,231,231},{232,234,233},{234,235,235},{235,237,235},{235,237,232},{241,243,238},{148,150,147},{19,21,21},{12,13,17},{12,13,17},{19,20,24},{29,30,34},{28,29,33},{51,52,56},{53,54,58},{35,37,41},{40,46,52},{57,63,67},{70,73,76},{168,168,170},{247,248,244},{251,252,247},{251,252,247},{251,252,247},{251,252,247},{252,253,248},{251,252,247},{251,252,247},{251,251,249}},{{224,224,226},{225,225,227},{228,228,230},{229,231,231},{232,234,232},{233,235,234},{235,237,236},{238,240,236},{226,228,224},{86,88,88},{11,12,14},{17,17,21},{13,12,17},{11,10,15},{10,9,14},{13,13,17},{22,22,26},{22,21,26},{21,21,26},{44,48,54},{46,51,56},{30,33,38},{70,73,77},{214,214,214},{253,253,250},{250,251,247},{251,252,247},{251,252,247},{251,252,247},{252,253,248},{252,253,248},{251,251,249}},{{224,224,226},{224,224,226},{226,226,228},{228,229,229},{229,231,230},{229,231,230},{229,231,230},{233,235,233},{208,210,208},{62,64,65},{10,10,14},{11,10,15},{11,10,15},{11,10,15},{9,8,13},{9,8,13},{9,7,12},{10,8,13},{14,13,18},{29,31,36},{27,30,36},{28,32,39},{45,49,56},{168,169,172},{252,252,252},{250,250,247},{250,251,246},{251,252,246},{251,252,247},{251,252,247},{251,252,247},{251,251,249}},{{224,224,226},{224,224,226},{225,225,226},{225,226,226},{226,228,227},{225,227,226},{227,229,228},{230,232,230},{200,201,202},{57,58,62},{10,11,16},{11,9,14},{11,9,14},{14,12,17},{13,10,15},{9,7,12},{8,6,11},{10,8,13},{9,7,12},{22,21,26},{27,29,35},{38,42,51},{46,52,62},{115,115,122},{236,235,237},{251,251,248},{250,251,245},{251,252,246},{251,252,247},{251,252,247},{251,252,247},{251,251,249}},{{223,224,225},{223,224,225},{227,227,229},{230,231,232},{230,231,232},{222,224,224},{191,193,193},{156,157,158},{139,140,142},{44,45,48},{9,10,15},{12,9,15},{13,10,15},{13,10,15},{15,11,17},{16,12,18},{11,7,13},{11,7,13},{9,6,12},{15,14,19},{25,27,33},{41,46,55},{52,58,69},{49,52,61},{165,166,170},{252,254,252},{250,251,246},{251,252,246},{251,252,247},{251,252,247},{251,252,247},{251,251,249}},{{220,222,222},{220,222,221},{182,183,187},{158,159,163},{158,159,162},{121,122,126},{77,78,81},{40,41,45},{32,33,37},{14,15,19},{8,8,12},{10,8,12},{11,9,12},{11,9,12},{12,9,13},{14,12,17},{13,10,16},{9,6,12},{7,4,10},{8,7,12},{18,19,24},{32,35,42},{40,45,54},{32,40,46},{66,73,76},{171,175,176},{244,246,244},{251,252,248},{252,253,247},{252,253,247},{252,253,248},{252,252,250}},{{219,221,220},{206,208,206},{96,97,101},{39,39,46},{47,48,55},{35,35,42},{34,34,41},{30,30,35},{22,23,27},{19,20,24},{7,8,12},{8,6,9},{10,8,11},{9,7,10},{9,7,10},{8,6,10},{8,6,11},{6,4,9},{7,5,10},{9,8,13},{10,10,15},{19,21,27},{35,39,47},{48,56,62},{39,47,50},{59,64,66},{197,200,201},{253,254,251},{251,252,247},{252,253,248},{252,253,249},{253,253,252}},{{223,225,225},{186,188,186},{60,61,65},{25,25,33},{30,30,39},{35,35,44},{34,34,42},{31,32,37},{27,28,31},{25,26,30},{16,17,21},{7,6,9},{11,9,12},{11,9,12},{8,6,9},{7,5,9},{7,5,9},{6,4,8},{8,6,9},{12,10,13},{10,9,13},{10,10,16},{35,38,44},{47,54,59},{53,60,64},{46,51,55},{113,118,122},{241,242,241},{252,252,249},{251,251,249},{252,252,249},{252,252,251}},{{227,228,228},{198,200,198},{80,81,84},{39,39,45},{30,31,37},{23,24,30},{20,20,26},{23,24,28},{23,24,28},{24,25,29},{19,20,24},{10,8,12},{9,6,9},{11,9,12},{5,3,6},{6,4,6},{6,4,6},{6,4,6},{8,6,8},{13,9,11},{10,7,10},{11,10,15},{23,24,30},{36,39,44},{49,54,59},{52,59,65},{69,76,82},{212,213,214},{254,254,253},{250,250,249},{250,250,249},{251,251,250}},{{225,227,226},{223,225,223},{180,181,184},{150,152,154},{119,121,121},{78,80,80},{48,49,50},{26,27,31},{17,17,22},{13,13,18},{12,13,17},{14,13,16},{8,6,9},{6,4,7},{4,2,5},{5,3,4},{5,3,3},{4,2,3},{8,6,7},{14,10,10},{11,7,9},{12,10,14},{18,18,23},{24,25,30},{45,49,54},{60,68,76},{50,58,67},{163,165,167},{250,250,249},{249,249,249},{250,250,250},{250,250,250}},{{222,223,224},{222,224,223},{225,226,229},{226,228,228},{218,220,219},{199,201,200},{169,172,171},{133,135,137},{96,98,101},{62,64,67},{39,41,44},{29,29,31},{18,16,19},{8,6,9},{4,3,5},{4,2,4},{6,4,6},{7,5,6},{10,8,9},{13,9,10},{14,10,12},{16,14,17},{15,15,19},{20,20,25},{28,31,37},{44,49,57},{47,54,63},{96,100,105},{224,225,227},{251,251,252},{250,249,249},{250,249,250}},{{216,217,219},{211,212,214},{211,212,214},{212,214,215},{211,214,215},{208,210,211},{199,201,202},{185,187,189},{162,165,167},{128,130,132},{87,89,91},{63,63,65},{53,53,55},{43,43,45},{33,32,34},{24,24,26},{21,20,22},{18,17,19},{14,14,15},{9,8,10},{11,10,12},{13,13,15},{12,12,15},{25,25,31},{23,23,31},{20,21,28},{34,36,44},{65,71,81},{177,181,189},{251,251,254},{251,247,249},{250,249,250}},{{202,203,205},{183,184,186},{165,166,168},{147,148,150},{135,136,138},{138,139,141},{141,142,144},{140,141,143},{133,134,136},{118,119,121},{93,94,96},{73,73,75},{70,70,72},{69,69,71},{67,67,69},{62,62,64},{57,57,59},{63,63,65},{50,51,53},{42,42,44},{37,37,39},{24,24,26},{15,15,17},{17,18,23},{17,18,24},{12,13,19},{19,20,26},{37,42,52},{107,111,119},{229,228,232},{251,248,250},{248,247,249}},{{194,195,197},{168,169,171},{134,135,137},{94,94,96},{70,70,72},{68,68,70},{70,70,72},{71,71,73},{72,72,74},{72,72,74},{67,67,69},{59,59,61},{58,58,60},{60,60,62},{61,61,63},{62,62,64},{78,78,80},{92,92,94},{99,99,101},{102,102,104},{115,115,117},{121,121,123},{116,116,118},{104,104,109},{90,91,95},{75,76,81},{54,55,60},{62,64,71},{112,112,119},{212,211,215},{251,249,251},{247,246,248}},{{187,188,190},{160,161,163},{124,125,127},{84,83,86},{66,65,67},{62,61,64},{59,58,61},{58,57,59},{56,55,58},{54,54,56},{53,52,55},{52,52,54},{54,54,56},{56,56,58},{57,57,59},{60,60,62},{71,71,73},{77,77,79},{96,96,98},{105,105,107},{123,123,125},{150,150,152},{175,175,177},{192,193,196},{202,203,206},{202,203,206},{189,190,193},{194,194,198},{217,216,220},{239,237,241},{248,246,249},{247,247,249}},{{190,191,193},{160,162,163},{120,121,123},{81,79,82},{68,65,69},{65,62,65},{61,59,62},{58,56,59},{55,52,56},{52,50,53},{51,49,52},{49,49,51},{50,50,52},{51,52,54},{54,54,56},{54,54,56},{52,52,54},{58,58,60},{75,75,77},{92,92,94},{112,112,114},{136,136,138},{160,160,162},{183,183,185},{203,204,206},{224,225,227},{243,243,245},{251,248,250},{249,246,248},{246,244,247},{246,244,248},{246,245,248}},{{206,207,210},{184,185,188},{149,149,152},{104,102,105},{73,71,73},{64,62,64},{61,60,62},{60,57,60},{57,53,57},{55,51,55},{53,49,53},{51,49,52},{50,50,52},{50,49,52},{50,50,52},{48,48,50},{48,47,50},{53,52,54},{64,64,66},{84,83,85},{105,105,107},{129,129,131},{152,152,154},{174,175,177},{194,195,197},{213,214,216},{228,228,230},{239,237,239},{244,241,244},{244,243,246},{245,244,248},{245,244,247}},{{218,217,222},{210,209,214},{194,194,198},{161,162,162},{115,117,116},{79,80,79},{60,62,61},{57,58,59},{57,56,58},{57,52,56},{55,50,54},{52,49,53},{50,48,51},{49,48,51},{49,48,51},{50,49,52},{51,49,52},{54,52,55},{61,59,62},{78,77,79},{98,98,100},{120,120,122},{143,143,145},{166,166,168},{187,187,189},{208,208,210},{224,224,226},{236,235,239},{241,240,245},{243,242,247},{244,242,247},{243,243,247}},{{219,218,223},{217,216,221},{216,216,220},{206,207,208},{182,184,184},{144,146,145},{100,102,102},{70,71,72},{57,56,58},{54,51,54},{54,49,53},{51,49,52},{49,48,51},{49,47,50},{50,48,51},{52,50,53},{54,52,55},{55,53,56},{59,57,60},{72,72,74},{92,92,94},{111,111,113},{133,133,135},{156,156,158},{179,179,181},{200,200,202},{217,217,219},{230,230,234},{238,237,242},{241,240,245},{241,240,245},{241,240,245}},{{217,216,221},{216,215,220},{217,216,221},{216,217,219},{212,213,214},{198,199,201},{170,171,172},{131,133,135},{94,95,97},{69,68,70},{58,55,58},{53,51,54},{52,50,53},{52,50,53},{51,49,52},{52,50,53},{54,52,55},{55,53,56},{59,57,60},{69,68,70},{86,86,88},{104,104,106},{125,125,127},{147,147,149},{172,172,174},{193,193,195},{210,210,212},{224,223,227},{233,232,237},{239,238,243},{239,238,243},{240,238,243}}}
-2|1|'dog'|'0/img2.jpg'|{{{126,118,110},{122,115,108},{126,119,111},{127,119,109},{130,122,111},{130,122,111},{132,124,113},{133,125,114},{130,122,111},{132,124,113},{134,126,115},{131,123,112},{131,123,112},{134,126,115},{133,125,114},{136,128,117},{137,129,118},{137,129,118},{136,128,117},{131,123,112},{130,122,111},{132,124,113},{132,124,113},{132,124,113},{129,122,110},{127,121,109},{127,121,109},{125,119,107},{124,118,106},{124,118,106},{120,114,102},{117,111,99}},{{122,115,107},{119,112,104},{121,114,106},{124,116,107},{129,121,110},{130,122,111},{130,122,111},{130,122,111},{128,120,109},{131,123,112},{134,126,115},{132,124,113},{132,124,113},{134,126,115},{136,128,117},{139,131,120},{141,133,122},{138,130,119},{135,127,116},{130,122,111},{131,123,112},{132,124,113},{133,125,114},{133,125,114},{131,124,113},{129,123,111},{130,124,112},{129,123,111},{127,121,109},{124,118,106},{122,116,104},{117,111,99}},{{122,115,107},{121,114,106},{121,114,106},{125,118,108},{128,120,109},{129,121,110},{131,123,112},{130,122,111},{128,120,109},{131,123,112},{133,125,114},{134,126,115},{136,128,117},{137,129,118},{138,130,119},{139,131,120},{139,131,120},{139,131,120},{139,131,120},{139,131,120},{139,131,120},{137,129,118},{137,129,118},{138,130,119},{136,129,118},{135,129,117},{133,127,115},{130,124,112},{129,123,111},{126,120,108},{123,117,105},{121,115,103}},{{123,116,108},{123,116,108},{125,118,110},{127,119,109},{127,119,108},{129,121,110},{132,125,113},{131,123,112},{130,122,111},{134,127,115},{134,126,115},{135,127,116},{133,125,114},{134,126,116},{136,128,118},{140,132,121},{142,134,123},{141,133,122},{141,132,122},{142,133,123},{141,132,122},{141,133,122},{140,131,120},{137,128,117},{136,128,117},{136,130,118},{132,127,115},{128,122,110},{127,121,109},{127,121,109},{125,119,107},{123,117,105}},{{124,119,110},{125,119,111},{128,122,113},{129,123,113},{132,123,113},{136,123,115},{137,125,116},{134,125,116},{135,125,116},{136,125,116},{134,125,115},{133,126,116},{136,124,115},{135,127,113},{137,131,115},{140,130,118},{141,130,120},{139,131,120},{137,132,118},{135,133,117},{134,130,117},{136,130,118},{136,130,119},{134,129,117},{132,127,115},{132,126,114},{131,125,113},{131,124,112},{129,122,110},{127,121,109},{128,122,110},{128,122,110}},{{124,121,112},{124,121,112},{128,124,115},{129,125,116},{136,124,118},{145,124,121},{143,126,121},{136,128,120},{136,126,119},{140,123,118},{136,126,118},{133,129,121},{146,130,130},{136,130,115},{135,133,112},{138,131,117},{138,130,119},{139,132,120},{137,132,117},{133,132,113},{132,131,115},{135,130,118},{136,129,118},{135,130,118},{131,127,115},{132,124,113},{132,124,113},{131,123,112},{129,122,110},{128,122,110},{127,121,109},{127,121,109}},{{123,120,111},{122,119,110},{124,121,112},{126,122,113},{132,122,115},{138,125,119},{135,128,120},{128,130,119},{130,129,119},{134,124,117},{127,118,110},{129,126,120},{142,135,139},{154,139,136},{141,130,123},{130,133,125},{131,135,121},{138,135,122},{143,130,122},{145,124,120},{142,126,119},{140,128,118},{141,128,119},{142,126,118},{139,125,116},{134,126,115},{134,126,115},{133,125,114},{131,123,112},{129,122,110},{126,120,108},{124,118,106}},{{121,118,109},{121,118,109},{122,119,110},{123,118,109},{127,121,113},{130,126,117},{131,131,121},{134,136,125},{130,131,121},{133,130,121},{133,127,119},{136,131,123},{115,104,98},{128,91,93},{151,118,117},{139,136,125},{135,137,122},{140,136,124},{139,132,124},{141,131,126},{140,128,121},{136,129,117},{134,131,118},{135,128,117},{137,126,116},{135,127,116},{135,127,116},{133,125,114},{131,123,112},{128,122,110},{127,121,109},{125,119,107}},{{124,121,112},{124,121,112},{126,123,114},{127,122,113},{128,124,115},{132,130,121},{138,131,123},{162,149,143},{156,144,138},{137,133,124},{141,136,128},{143,136,125},{137,120,105},{119,80,78},{144,108,108},{147,136,125},{142,133,123},{146,133,124},{149,134,123},{148,132,120},{151,130,121},{140,129,119},{127,134,118},{126,133,117},{131,128,116},{135,127,116},{134,126,115},{132,124,113},{130,122,111},{128,122,110},{128,122,110},{126,120,108}},{{126,123,114},{126,123,114},{128,125,116},{130,127,118},{129,129,119},{135,131,122},{143,122,119},{142,106,108},{163,131,132},{147,134,128},{140,136,127},{139,132,124},{141,134,128},{120,115,122},{134,131,142},{151,151,155},{143,139,133},{154,128,121},{152,90,85},{155,61,56},{158,77,80},{155,115,115},{141,131,121},{133,131,118},{135,128,117},{135,127,116},{134,126,115},{132,124,113},{130,123,112},{129,123,111},{126,120,108},{126,120,108}},{{126,121,112},{126,121,113},{130,125,116},{131,126,116},{131,128,117},{134,127,119},{141,123,120},{110,64,71},{121,57,65},{151,117,110},{143,135,122},{132,134,122},{136,138,130},{102,114,121},{80,97,121},{90,107,138},{130,146,164},{149,133,146},{142,71,84},{165,54,64},{151,34,43},{150,63,65},{159,116,110},{142,131,119},{134,130,118},{135,127,116},{136,128,117},{134,126,115},{131,123,112},{130,124,112},{126,120,108},{124,118,106}},{{129,122,114},{128,121,114},{131,124,116},{133,127,115},{135,129,117},{136,129,120},{138,132,127},{150,123,127},{144,96,100},{152,120,109},{144,129,114},{131,128,119},{171,174,177},{160,168,180},{132,146,167},{97,117,145},{106,131,159},{136,135,162},{150,112,134},{186,129,141},{202,129,128},{149,78,70},{159,118,107},{146,134,121},{135,132,119},{136,128,117},{136,128,117},{136,128,117},{134,126,115},{134,128,116},{131,125,113},{126,120,108}},{{133,126,118},{132,125,117},{134,127,119},{136,130,118},{136,130,118},{138,132,123},{139,131,126},{113,113,117},{126,135,146},{168,171,175},{149,142,143},{119,111,122},{148,157,188},{194,207,232},{166,178,191},{159,172,180},{180,194,207},{191,192,207},{170,156,167},{189,168,173},{211,196,191},{149,139,127},{147,135,123},{149,136,123},{145,134,122},{140,132,121},{138,130,119},{136,128,117},{132,125,113},{132,126,114},{131,125,113},{128,122,110}},{{134,127,119},{135,128,120},{136,128,121},{136,130,118},{137,131,119},{137,131,122},{142,134,128},{90,93,108},{33,52,92},{89,107,144},{145,161,193},{152,169,196},{116,138,164},{133,151,175},{167,177,200},{189,194,216},{207,211,225},{210,216,225},{204,212,217},{203,212,214},{173,170,171},{143,132,131},{146,137,129},{145,137,122},{144,136,120},{141,133,122},{139,131,120},{138,130,119},{133,125,114},{130,124,112},{128,122,110},{126,120,108}},{{131,124,116},{134,128,120},{135,128,120},{136,130,118},{139,133,121},{138,131,122},{109,101,95},{121,117,130},{103,109,150},{52,67,114},{60,87,130},{108,140,172},{150,174,198},{146,165,191},{128,141,172},{140,148,181},{158,164,187},{169,181,196},{183,201,208},{183,195,199},{154,140,144},{156,133,132},{150,137,128},{142,138,121},{141,138,121},{141,133,122},{137,129,118},{136,128,117},{135,127,116},{133,127,115},{129,123,111},{128,122,110}},{{132,124,116},{130,121,114},{135,126,120},{140,131,122},{140,134,122},{138,135,124},{125,119,115},{157,159,162},{162,174,189},{140,149,179},{140,154,186},{131,146,177},{99,114,155},{137,152,188},{164,182,209},{164,182,203},{156,170,195},{146,162,184},{147,161,179},{168,173,191},{159,159,158},{147,144,128},{146,139,124},{146,139,123},{143,135,123},{142,134,124},{136,128,117},{133,125,114},{134,127,116},{127,121,109},{129,123,111},{129,123,111}},{{141,129,119},{136,122,116},{135,124,120},{141,129,128},{135,130,122},{148,152,145},{185,191,204},{141,169,193},{119,147,170},{181,181,202},{192,200,212},{204,203,220},{167,172,199},{132,143,172},{119,136,168},{137,160,193},{169,188,213},{185,192,204},{175,177,202},{92,107,179},{97,120,166},{134,143,141},{149,140,126},{147,137,121},{140,132,126},{140,131,128},{138,131,116},{137,129,115},{137,128,119},{133,125,115},{131,124,112},{129,122,111}},{{139,129,117},{132,130,117},{127,134,120},{131,137,124},{157,154,155},{180,178,199},{118,136,175},{89,115,149},{153,146,163},{192,179,185},{182,207,197},{193,217,214},{202,206,219},{193,200,218},{171,186,209},{129,149,182},{96,125,171},{137,161,179},{169,183,203},{53,62,153},{34,44,157},{102,112,172},{124,127,141},{145,138,133},{143,133,125},{140,133,117},{139,132,117},{137,129,117},{136,128,116},{137,129,118},{135,127,116},{130,122,111}},{{134,128,114},{137,129,112},{143,130,112},{145,90,76},{165,123,127},{107,118,147},{94,115,158},{162,137,167},{152,76,85},{165,113,119},{207,202,202},{200,212,217},{213,213,226},{223,226,236},{220,229,236},{210,224,231},{150,175,200},{71,107,153},{104,138,195},{107,119,184},{50,51,159},{70,83,198},{60,77,162},{114,111,149},{140,126,118},{136,130,103},{133,125,112},{136,127,117},{134,126,110},{135,127,115},{134,126,115},{129,121,110}},{{137,127,114},{154,108,97},{159,66,64},{168,25,19},{151,29,30},{117,61,76},{168,148,162},{201,168,175},{180,136,140},{192,168,177},{222,207,225},{228,224,240},{219,216,222},{187,186,189},{189,190,189},{214,215,210},{233,229,229},{181,198,221},{77,114,170},{93,124,184},{133,149,200},{68,82,181},{33,46,185},{86,86,176},{134,125,146},{137,128,123},{136,128,117},{135,128,113},{134,126,112},{132,124,113},{131,123,112},{129,121,110}},{{139,122,111},{133,65,55},{151,14,14},{167,15,13},{174,17,17},{171,20,23},{155,53,47},{191,142,135},{216,219,216},{213,237,237},{214,218,224},{166,172,170},{95,92,90},{46,40,39},{51,45,44},{82,74,75},{125,112,115},{164,166,160},{147,167,173},{55,80,130},{118,138,172},{139,150,196},{64,67,163},{104,112,203},{113,111,187},{121,103,148},{140,131,120},{136,129,106},{133,124,120},{130,122,113},{129,121,110},{127,119,108}},{{140,121,112},{133,85,73},{137,45,37},{147,29,31},{162,20,19},{180,18,10},{173,19,14},{161,56,61},{181,157,159},{148,152,149},{90,79,74},{44,39,25},{55,47,39},{69,59,54},{57,45,43},{43,31,31},{33,28,24},{34,29,26},{54,48,52},{58,57,67},{73,79,100},{142,144,158},{137,137,155},{113,126,182},{84,89,183},{102,86,158},{130,123,125},{135,128,110},{135,123,122},{131,121,111},{128,120,107},{126,118,106}},{{135,124,117},{106,91,83},{65,46,39},{114,58,55},{154,47,45},{162,31,28},{145,27,26},{99,25,26},{61,33,30},{39,31,22},{52,47,36},{97,89,77},{135,124,111},{143,132,119},{137,125,113},{126,114,102},{110,99,92},{93,81,78},{74,60,59},{63,51,47},{80,71,69},{114,110,107},{132,133,133},{129,139,151},{141,151,187},{166,169,211},{173,172,189},{140,133,131},{134,123,112},{133,123,107},{129,120,104},{127,119,104}},{{129,120,112},{68,60,52},{19,12,5},{33,27,22},{73,42,41},{81,26,28},{50,12,14},{32,15,11},{51,37,28},{92,85,73},{133,125,114},{151,137,127},{147,135,121},{144,132,118},{147,135,121},{149,137,123},{150,138,125},{148,136,124},{145,133,120},{137,125,113},{115,105,92},{85,79,69},{101,101,95},{128,133,130},{159,169,173},{199,211,219},{225,231,232},{184,185,176},{133,129,114},{129,122,107},{127,119,105},{125,117,104}},{{128,120,109},{109,101,91},{76,67,58},{25,33,23},{14,17,12},{30,18,16},{45,42,38},{78,81,71},{120,115,100},{140,131,118},{144,128,117},{145,126,116},{144,132,118},{144,132,118},{145,133,119},{145,133,119},{142,131,114},{145,134,115},{147,136,118},{148,137,119},{146,137,118},{128,122,105},{97,93,79},{90,89,78},{116,120,113},{155,163,161},{200,206,205},{220,225,226},{178,181,181},{127,125,116},{122,118,107},{124,117,104}},{{125,117,104},{124,116,104},{129,122,109},{99,95,79},{72,63,51},{93,76,70},{126,111,100},{136,130,113},{136,133,114},{142,128,113},{135,116,103},{129,112,99},{130,118,104},{137,125,111},{142,130,116},{144,132,118},{142,131,113},{144,133,114},{145,134,115},{145,134,115},{144,134,117},{144,135,119},{142,134,118},{112,104,89},{79,72,64},{96,92,94},{147,146,154},{194,197,211},{222,228,241},{170,174,170},{119,119,110},{123,117,104}},{{124,117,101},{120,113,97},{125,118,103},{131,121,104},{130,120,109},{136,124,119},{143,126,117},{139,127,110},{139,132,113},{146,129,114},{142,125,109},{132,123,105},{134,123,109},{141,129,115},{142,130,116},{143,131,117},{141,129,114},{144,132,117},{145,133,118},{144,132,117},{140,128,116},{140,128,117},{144,131,119},{145,131,118},{131,115,104},{99,84,77},{86,77,74},{112,112,114},{166,172,176},{203,211,209},{153,156,149},{120,115,103}},{{124,117,103},{122,115,101},{130,122,109},{130,122,109},{127,122,112},{130,125,118},{134,125,116},{138,128,114},{140,130,113},{142,127,112},{140,126,111},{138,129,113},{142,130,116},{139,127,113},{134,122,108},{142,130,116},{141,129,116},{142,130,117},{144,132,119},{143,131,118},{133,122,111},{136,126,116},{140,128,118},{139,127,116},{141,127,113},{140,126,110},{119,110,96},{92,88,75},{81,81,71},{120,120,114},{156,154,145},{123,117,105}},{{126,117,106},{128,120,109},{130,122,111},{129,121,110},{131,122,111},{132,124,113},{133,125,114},{136,126,114},{139,127,113},{138,126,112},{138,126,112},{140,128,114},{139,127,113},{137,125,111},{135,123,109},{142,130,116},{140,128,114},{140,128,114},{140,128,114},{140,128,114},{136,126,114},{136,128,117},{137,129,118},{137,129,118},{134,126,114},{134,127,111},{137,130,115},{136,128,113},{119,112,97},{97,89,78},{113,105,93},{121,113,102}},{{126,118,107},{126,118,107},{128,120,109},{130,122,111},{130,122,111},{131,123,112},{134,126,115},{136,126,114},{137,125,111},{137,125,111},{136,124,110},{139,127,113},{139,127,113},{139,127,113},{137,125,111},{138,126,112},{138,126,112},{138,126,112},{139,127,113},{139,127,113},{135,125,113},{138,130,119},{142,134,123},{141,133,122},{140,132,120},{138,130,117},{138,130,117},{135,127,114},{138,130,118},{133,125,114},{122,114,103},{114,106,95}},{{125,117,106},{126,118,107},{127,119,108},{129,121,110},{129,121,110},{129,121,110},{133,125,114},{133,123,111},{134,122,108},{135,123,109},{136,124,110},{138,126,112},{138,126,112},{136,124,110},{135,123,109},{136,124,110},{136,124,110},{137,125,111},{139,127,113},{138,126,112},{136,127,115},{138,130,119},{142,134,123},{140,132,121},{138,130,119},{134,126,115},{135,127,116},{135,127,116},{132,124,113},{128,120,109},{121,113,102},{114,106,95}},{{123,115,104},{123,115,104},{126,118,107},{128,120,109},{129,121,110},{129,121,110},{131,123,112},{132,122,110},{133,121,107},{135,123,109},{135,123,109},{135,123,109},{136,124,110},{133,121,107},{134,122,108},{136,124,110},{136,124,110},{137,125,111},{134,122,108},{130,118,104},{133,123,111},{133,125,115},{136,128,117},{136,128,117},{135,127,117},{134,125,116},{132,123,114},{131,122,113},{129,121,111},{128,119,109},{126,118,107},{121,113,102}}}
+0|0|'cat'|'0/img0.jpg'|{{{202,204,199},{202,204,199},{204,206,201},{206,208,203},{208,210,205},{209,211,206},{210,212,207},{212,214,210},{213,215,212},{215,217,214},{216,218,215},{216,218,215},{215,217,214},{216,218,215},{216,218,215},{216,218,214},{217,219,214},{217,219,214},{218,220,215},{218,219,214},{216,217,212},{217,218,213},{218,219,214},{214,215,209},{213,214,207},{212,213,206},{211,212,205},{209,210,203},{208,209,202},{207,208,200},{205,206,199},{203,204,198}},{{206,208,203},{206,208,203},{207,209,204},{209,211,207},{211,213,209},{212,214,210},{213,215,211},{215,217,214},{216,218,215},{218,220,217},{220,222,219},{220,222,219},{220,222,219},{220,222,220},{221,223,219},{220,222,218},{220,222,217},{221,223,218},{221,223,218},{220,222,217},{218,219,214},{215,216,211},{220,221,216},{220,221,215},{217,218,212},{215,216,210},{214,215,209},{213,214,208},{212,213,207},{210,211,205},{208,209,203},{206,207,201}},{{210,212,207},{211,213,208},{212,214,210},{214,216,213},{215,217,215},{217,219,216},{218,220,217},{219,221,219},{221,223,220},{221,223,220},{222,224,221},{223,225,222},{224,226,223},{226,228,225},{226,228,225},{226,228,224},{226,228,223},{227,229,224},{227,228,223},{226,228,223},{225,226,221},{221,222,217},{221,222,217},{223,224,219},{221,222,218},{219,220,216},{219,220,216},{218,219,214},{216,216,212},{214,214,210},{212,213,208},{210,211,206}},{{212,214,209},{213,215,210},{215,217,213},{217,219,217},{218,220,219},{220,222,221},{222,224,223},{224,226,223},{225,227,224},{226,228,225},{230,232,229},{230,232,229},{228,230,227},{229,231,228},{230,232,229},{231,233,229},{232,234,229},{233,235,230},{233,235,230},{233,235,229},{234,235,230},{232,233,228},{229,230,225},{228,229,226},{228,228,226},{226,226,224},{224,224,222},{222,222,220},{220,220,218},{218,218,216},{216,216,213},{213,214,209}},{{215,217,212},{216,218,213},{218,220,215},{219,221,219},{221,223,222},{222,224,223},{224,226,225},{225,227,224},{227,229,225},{224,226,223},{209,211,209},{223,224,222},{232,234,231},{232,234,231},{234,236,233},{235,237,232},{237,239,234},{237,239,235},{238,240,235},{240,241,237},{240,241,236},{240,241,236},{240,241,236},{238,239,236},{237,237,235},{236,236,234},{234,234,232},{231,231,229},{228,229,227},{225,225,223},{221,221,219},{218,219,215}},{{218,220,217},{218,220,216},{220,222,218},{221,223,221},{223,225,222},{225,227,224},{226,228,225},{226,228,223},{232,235,228},{183,185,183},{79,80,84},{155,157,160},{236,238,235},{235,237,232},{236,238,236},{239,241,237},{241,243,240},{244,245,244},{244,246,245},{241,245,243},{242,245,242},{243,245,240},{245,246,241},{246,246,242},{245,246,241},{245,246,242},{244,245,241},{243,244,241},{240,241,239},{236,238,235},{232,234,231},{228,230,228}},{{221,223,220},{221,223,220},{222,224,221},{224,226,223},{226,228,224},{230,232,229},{232,234,231},{231,233,229},{235,238,232},{170,172,171},{34,34,42},{92,92,101},{223,224,224},{238,240,234},{238,240,236},{235,237,234},{216,218,216},{194,195,196},{200,202,205},{237,242,242},{244,247,245},{245,246,242},{247,248,242},{248,249,244},{249,250,245},{250,251,246},{250,251,246},{250,252,248},{249,251,248},{247,249,246},{246,248,245},{243,245,243}},{{222,224,221},{223,225,222},{225,227,224},{227,229,226},{230,232,229},{187,189,186},{172,174,171},{227,229,228},{243,245,241},{179,181,180},{38,38,46},{47,47,60},{191,192,197},{240,243,237},{242,245,238},{186,188,185},{82,84,83},{63,64,67},{72,73,77},{184,188,189},{246,250,249},{245,246,244},{247,248,244},{249,250,245},{249,250,245},{250,251,246},{251,253,248},{252,254,251},{252,254,251},{252,254,251},{252,254,252},{252,254,253}},{{222,224,221},{223,225,222},{226,228,225},{229,231,228},{233,235,232},{158,161,157},{72,74,72},{107,108,110},{190,192,191},{183,184,184},{44,45,52},{26,25,39},{135,135,141},{241,243,239},{246,248,243},{152,154,152},{37,38,39},{46,47,51},{47,48,53},{96,100,103},{223,226,228},{248,249,248},{247,247,245},{249,250,245},{249,250,245},{250,251,246},{250,251,246},{250,252,249},{250,252,250},{251,253,250},{251,253,250},{251,253,252}},{{222,225,221},{222,224,221},{225,228,224},{229,231,228},{232,234,231},{220,222,219},{163,165,163},{81,81,86},{70,71,74},{98,100,100},{41,41,48},{23,23,35},{76,77,83},{202,204,203},{221,223,223},{124,125,126},{42,43,46},{52,53,58},{51,52,59},{51,54,59},{158,161,164},{248,249,250},{248,248,248},{249,250,246},{249,250,245},{250,251,246},{250,251,246},{250,252,248},{250,252,249},{250,252,249},{250,252,249},{250,252,251}},{{223,225,223},{223,225,223},{227,228,226},{230,232,229},{231,233,231},{234,236,233},{236,238,236},{195,196,200},{103,104,107},{38,39,40},{25,26,30},{32,33,41},{39,41,45},{81,82,85},{93,95,99},{70,71,74},{46,47,51},{48,49,54},{48,50,56},{42,46,51},{81,84,87},{219,220,222},{251,251,251},{249,250,245},{249,250,245},{250,251,246},{250,251,246},{250,252,248},{250,252,249},{249,251,248},{249,251,248},{249,251,250}},{{224,224,225},{225,225,226},{228,228,229},{230,231,230},{232,234,233},{233,235,233},{234,236,235},{235,237,234},{212,214,211},{125,127,125},{24,26,26},{16,18,21},{28,30,34},{35,37,41},{48,50,54},{50,52,56},{56,59,62},{52,55,59},{43,46,50},{47,53,58},{47,53,57},{145,148,149},{246,247,246},{250,251,246},{251,252,246},{251,252,247},{251,252,247},{251,252,247},{251,252,247},{250,251,247},{250,251,247},{250,251,249}},{{224,224,226},{225,225,227},{228,228,230},{230,231,231},{232,234,233},{234,235,235},{235,237,235},{235,237,232},{241,243,238},{148,150,147},{19,21,21},{12,13,17},{12,13,17},{19,20,24},{29,30,34},{28,29,33},{51,52,56},{53,54,58},{35,37,41},{40,46,52},{57,63,67},{70,73,76},{168,168,170},{247,248,244},{251,252,247},{251,252,247},{251,252,247},{251,252,247},{252,253,248},{251,252,247},{251,252,247},{251,251,249}},{{224,224,226},{225,225,227},{228,228,230},{229,231,231},{232,234,232},{233,235,234},{235,237,236},{238,240,236},{226,228,224},{86,88,88},{11,12,14},{17,17,21},{13,12,17},{11,10,15},{10,9,14},{13,13,17},{22,22,26},{22,21,26},{21,21,26},{44,48,54},{46,51,56},{30,33,38},{70,73,77},{214,214,214},{253,253,250},{250,251,247},{251,252,247},{251,252,247},{251,252,247},{252,253,248},{252,253,248},{251,251,249}},{{224,224,226},{224,224,226},{226,226,228},{228,229,229},{229,231,230},{229,231,230},{229,231,230},{233,235,233},{208,210,208},{62,64,65},{10,10,14},{11,10,15},{11,10,15},{11,10,15},{9,8,13},{9,8,13},{9,7,12},{10,8,13},{14,13,18},{29,31,36},{27,30,36},{28,32,39},{45,49,56},{168,169,172},{252,252,252},{250,250,247},{250,251,246},{251,252,246},{251,252,247},{251,252,247},{251,252,247},{251,251,249}},{{224,224,226},{224,224,226},{225,225,226},{225,226,226},{226,228,227},{225,227,226},{227,229,228},{230,232,230},{200,201,202},{57,58,62},{10,11,16},{11,9,14},{11,9,14},{14,12,17},{13,10,15},{9,7,12},{8,6,11},{10,8,13},{9,7,12},{22,21,26},{27,29,35},{38,42,51},{46,52,62},{115,115,122},{236,235,237},{251,251,248},{250,251,245},{251,252,246},{251,252,247},{251,252,247},{251,252,247},{251,251,249}},{{223,224,225},{223,224,225},{227,227,229},{230,231,232},{230,231,232},{222,224,224},{191,193,193},{156,157,158},{139,140,142},{44,45,48},{9,10,15},{12,9,15},{13,10,15},{13,10,15},{15,11,17},{16,12,18},{11,7,13},{11,7,13},{9,6,12},{15,14,19},{25,27,33},{41,46,55},{52,58,69},{49,52,61},{165,166,170},{252,254,252},{250,251,246},{251,252,246},{251,252,247},{251,252,247},{251,252,247},{251,251,249}},{{220,222,222},{220,222,221},{182,183,187},{158,159,163},{158,159,162},{121,122,126},{77,78,81},{40,41,45},{32,33,37},{14,15,19},{8,8,12},{10,8,12},{11,9,12},{11,9,12},{12,9,13},{14,12,17},{13,10,16},{9,6,12},{7,4,10},{8,7,12},{18,19,24},{32,35,42},{40,45,54},{32,40,46},{66,73,76},{171,175,176},{244,246,244},{251,252,248},{252,253,247},{252,253,247},{252,253,248},{252,252,250}},{{219,221,220},{206,208,206},{96,97,101},{39,39,46},{47,48,55},{35,35,42},{34,34,41},{30,30,35},{22,23,27},{19,20,24},{7,8,12},{8,6,9},{10,8,11},{9,7,10},{9,7,10},{8,6,10},{8,6,11},{6,4,9},{7,5,10},{9,8,13},{10,10,15},{19,21,27},{35,39,47},{48,56,62},{39,47,50},{59,64,66},{197,200,201},{253,254,251},{251,252,247},{252,253,248},{252,253,249},{253,253,252}},{{223,225,225},{186,188,186},{60,61,65},{25,25,33},{30,30,39},{35,35,44},{34,34,42},{31,32,37},{27,28,31},{25,26,30},{16,17,21},{7,6,9},{11,9,12},{11,9,12},{8,6,9},{7,5,9},{7,5,9},{6,4,8},{8,6,9},{12,10,13},{10,9,13},{10,10,16},{35,38,44},{47,54,59},{53,60,64},{46,51,55},{113,118,122},{241,242,241},{252,252,249},{251,251,249},{252,252,249},{252,252,251}},{{227,228,228},{198,200,198},{80,81,84},{39,39,45},{30,31,37},{23,24,30},{20,20,26},{23,24,28},{23,24,28},{24,25,29},{19,20,24},{10,8,12},{9,6,9},{11,9,12},{5,3,6},{6,4,6},{6,4,6},{6,4,6},{8,6,8},{13,9,11},{10,7,10},{11,10,15},{23,24,30},{36,39,44},{49,54,59},{52,59,65},{69,76,82},{212,213,214},{254,254,253},{250,250,249},{250,250,249},{251,251,250}},{{225,227,226},{223,225,223},{180,181,184},{150,152,154},{119,121,121},{78,80,80},{48,49,50},{26,27,31},{17,17,22},{13,13,18},{12,13,17},{14,13,16},{8,6,9},{6,4,7},{4,2,5},{5,3,4},{5,3,3},{4,2,3},{8,6,7},{14,10,10},{11,7,9},{12,10,14},{18,18,23},{24,25,30},{45,49,54},{60,68,76},{50,58,67},{163,165,167},{250,250,249},{249,249,249},{250,250,250},{250,250,250}},{{222,223,224},{222,224,223},{225,226,229},{226,228,228},{218,220,219},{199,201,200},{169,172,171},{133,135,137},{96,98,101},{62,64,67},{39,41,44},{29,29,31},{18,16,19},{8,6,9},{4,3,5},{4,2,4},{6,4,6},{7,5,6},{10,8,9},{13,9,10},{14,10,12},{16,14,17},{15,15,19},{20,20,25},{28,31,37},{44,49,57},{47,54,63},{96,100,105},{224,225,227},{251,251,252},{250,249,249},{250,249,250}},{{216,217,219},{211,212,214},{211,212,214},{212,214,215},{211,214,215},{208,210,211},{199,201,202},{185,187,189},{162,165,167},{128,130,132},{87,89,91},{63,63,65},{53,53,55},{43,43,45},{33,32,34},{24,24,26},{21,20,22},{18,17,19},{14,14,15},{9,8,10},{11,10,12},{13,13,15},{12,12,15},{25,25,31},{23,23,31},{20,21,28},{34,36,44},{65,71,81},{177,181,189},{251,251,254},{251,247,249},{250,249,250}},{{202,203,205},{183,184,186},{165,166,168},{147,148,150},{135,136,138},{138,139,141},{141,142,144},{140,141,143},{133,134,136},{118,119,121},{93,94,96},{73,73,75},{70,70,72},{69,69,71},{67,67,69},{62,62,64},{57,57,59},{63,63,65},{50,51,53},{42,42,44},{37,37,39},{24,24,26},{15,15,17},{17,18,23},{17,18,24},{12,13,19},{19,20,26},{37,42,52},{107,111,119},{229,228,232},{251,248,250},{248,247,249}},{{194,195,197},{168,169,171},{134,135,137},{94,94,96},{70,70,72},{68,68,70},{70,70,72},{71,71,73},{72,72,74},{72,72,74},{67,67,69},{59,59,61},{58,58,60},{60,60,62},{61,61,63},{62,62,64},{78,78,80},{92,92,94},{99,99,101},{102,102,104},{115,115,117},{121,121,123},{116,116,118},{104,104,109},{90,91,95},{75,76,81},{54,55,60},{62,64,71},{112,112,119},{212,211,215},{251,249,251},{247,246,248}},{{187,188,190},{160,161,163},{124,125,127},{84,83,86},{66,65,67},{62,61,64},{59,58,61},{58,57,59},{56,55,58},{54,54,56},{53,52,55},{52,52,54},{54,54,56},{56,56,58},{57,57,59},{60,60,62},{71,71,73},{77,77,79},{96,96,98},{105,105,107},{123,123,125},{150,150,152},{175,175,177},{192,193,196},{202,203,206},{202,203,206},{189,190,193},{194,194,198},{217,216,220},{239,237,241},{248,246,249},{247,247,249}},{{190,191,193},{160,162,163},{120,121,123},{81,79,82},{68,65,69},{65,62,65},{61,59,62},{58,56,59},{55,52,56},{52,50,53},{51,49,52},{49,49,51},{50,50,52},{51,52,54},{54,54,56},{54,54,56},{52,52,54},{58,58,60},{75,75,77},{92,92,94},{112,112,114},{136,136,138},{160,160,162},{183,183,185},{203,204,206},{224,225,227},{243,243,245},{251,248,250},{249,246,248},{246,244,247},{246,244,248},{246,245,248}},{{206,207,210},{184,185,188},{149,149,152},{104,102,105},{73,71,73},{64,62,64},{61,60,62},{60,57,60},{57,53,57},{55,51,55},{53,49,53},{51,49,52},{50,50,52},{50,49,52},{50,50,52},{48,48,50},{48,47,50},{53,52,54},{64,64,66},{84,83,85},{105,105,107},{129,129,131},{152,152,154},{174,175,177},{194,195,197},{213,214,216},{228,228,230},{239,237,239},{244,241,244},{244,243,246},{245,244,248},{245,244,247}},{{218,217,222},{210,209,214},{194,194,198},{161,162,162},{115,117,116},{79,80,79},{60,62,61},{57,58,59},{57,56,58},{57,52,56},{55,50,54},{52,49,53},{50,48,51},{49,48,51},{49,48,51},{50,49,52},{51,49,52},{54,52,55},{61,59,62},{78,77,79},{98,98,100},{120,120,122},{143,143,145},{166,166,168},{187,187,189},{208,208,210},{224,224,226},{236,235,239},{241,240,245},{243,242,247},{244,242,247},{243,243,247}},{{219,218,223},{217,216,221},{216,216,220},{206,207,208},{182,184,184},{144,146,145},{100,102,102},{70,71,72},{57,56,58},{54,51,54},{54,49,53},{51,49,52},{49,48,51},{49,47,50},{50,48,51},{52,50,53},{54,52,55},{55,53,56},{59,57,60},{72,72,74},{92,92,94},{111,111,113},{133,133,135},{156,156,158},{179,179,181},{200,200,202},{217,217,219},{230,230,234},{238,237,242},{241,240,245},{241,240,245},{241,240,245}},{{217,216,221},{216,215,220},{217,216,221},{216,217,219},{212,213,214},{198,199,201},{170,171,172},{131,133,135},{94,95,97},{69,68,70},{58,55,58},{53,51,54},{52,50,53},{52,50,53},{51,49,52},{52,50,53},{54,52,55},{55,53,56},{59,57,60},{69,68,70},{86,86,88},{104,104,106},{125,125,127},{147,147,149},{172,172,174},{193,193,195},{210,210,212},{224,223,227},{233,232,237},{239,238,243},{239,238,243},{240,238,243}}}
+1|1|'dog'|'0/img2.jpg'|{{{126,118,110},{122,115,108},{126,119,111},{127,119,109},{130,122,111},{130,122,111},{132,124,113},{133,125,114},{130,122,111},{132,124,113},{134,126,115},{131,123,112},{131,123,112},{134,126,115},{133,125,114},{136,128,117},{137,129,118},{137,129,118},{136,128,117},{131,123,112},{130,122,111},{132,124,113},{132,124,113},{132,124,113},{129,122,110},{127,121,109},{127,121,109},{125,119,107},{124,118,106},{124,118,106},{120,114,102},{117,111,99}},{{122,115,107},{119,112,104},{121,114,106},{124,116,107},{129,121,110},{130,122,111},{130,122,111},{130,122,111},{128,120,109},{131,123,112},{134,126,115},{132,124,113},{132,124,113},{134,126,115},{136,128,117},{139,131,120},{141,133,122},{138,130,119},{135,127,116},{130,122,111},{131,123,112},{132,124,113},{133,125,114},{133,125,114},{131,124,113},{129,123,111},{130,124,112},{129,123,111},{127,121,109},{124,118,106},{122,116,104},{117,111,99}},{{122,115,107},{121,114,106},{121,114,106},{125,118,108},{128,120,109},{129,121,110},{131,123,112},{130,122,111},{128,120,109},{131,123,112},{133,125,114},{134,126,115},{136,128,117},{137,129,118},{138,130,119},{139,131,120},{139,131,120},{139,131,120},{139,131,120},{139,131,120},{139,131,120},{137,129,118},{137,129,118},{138,130,119},{136,129,118},{135,129,117},{133,127,115},{130,124,112},{129,123,111},{126,120,108},{123,117,105},{121,115,103}},{{123,116,108},{123,116,108},{125,118,110},{127,119,109},{127,119,108},{129,121,110},{132,125,113},{131,123,112},{130,122,111},{134,127,115},{134,126,115},{135,127,116},{133,125,114},{134,126,116},{136,128,118},{140,132,121},{142,134,123},{141,133,122},{141,132,122},{142,133,123},{141,132,122},{141,133,122},{140,131,120},{137,128,117},{136,128,117},{136,130,118},{132,127,115},{128,122,110},{127,121,109},{127,121,109},{125,119,107},{123,117,105}},{{124,119,110},{125,119,111},{128,122,113},{129,123,113},{132,123,113},{136,123,115},{137,125,116},{134,125,116},{135,125,116},{136,125,116},{134,125,115},{133,126,116},{136,124,115},{135,127,113},{137,131,115},{140,130,118},{141,130,120},{139,131,120},{137,132,118},{135,133,117},{134,130,117},{136,130,118},{136,130,119},{134,129,117},{132,127,115},{132,126,114},{131,125,113},{131,124,112},{129,122,110},{127,121,109},{128,122,110},{128,122,110}},{{124,121,112},{124,121,112},{128,124,115},{129,125,116},{136,124,118},{145,124,121},{143,126,121},{136,128,120},{136,126,119},{140,123,118},{136,126,118},{133,129,121},{146,130,130},{136,130,115},{135,133,112},{138,131,117},{138,130,119},{139,132,120},{137,132,117},{133,132,113},{132,131,115},{135,130,118},{136,129,118},{135,130,118},{131,127,115},{132,124,113},{132,124,113},{131,123,112},{129,122,110},{128,122,110},{127,121,109},{127,121,109}},{{123,120,111},{122,119,110},{124,121,112},{126,122,113},{132,122,115},{138,125,119},{135,128,120},{128,130,119},{130,129,119},{134,124,117},{127,118,110},{129,126,120},{142,135,139},{154,139,136},{141,130,123},{130,133,125},{131,135,121},{138,135,122},{143,130,122},{145,124,120},{142,126,119},{140,128,118},{141,128,119},{142,126,118},{139,125,116},{134,126,115},{134,126,115},{133,125,114},{131,123,112},{129,122,110},{126,120,108},{124,118,106}},{{121,118,109},{121,118,109},{122,119,110},{123,118,109},{127,121,113},{130,126,117},{131,131,121},{134,136,125},{130,131,121},{133,130,121},{133,127,119},{136,131,123},{115,104,98},{128,91,93},{151,118,117},{139,136,125},{135,137,122},{140,136,124},{139,132,124},{141,131,126},{140,128,121},{136,129,117},{134,131,118},{135,128,117},{137,126,116},{135,127,116},{135,127,116},{133,125,114},{131,123,112},{128,122,110},{127,121,109},{125,119,107}},{{124,121,112},{124,121,112},{126,123,114},{127,122,113},{128,124,115},{132,130,121},{138,131,123},{162,149,143},{156,144,138},{137,133,124},{141,136,128},{143,136,125},{137,120,105},{119,80,78},{144,108,108},{147,136,125},{142,133,123},{146,133,124},{149,134,123},{148,132,120},{151,130,121},{140,129,119},{127,134,118},{126,133,117},{131,128,116},{135,127,116},{134,126,115},{132,124,113},{130,122,111},{128,122,110},{128,122,110},{126,120,108}},{{126,123,114},{126,123,114},{128,125,116},{130,127,118},{129,129,119},{135,131,122},{143,122,119},{142,106,108},{163,131,132},{147,134,128},{140,136,127},{139,132,124},{141,134,128},{120,115,122},{134,131,142},{151,151,155},{143,139,133},{154,128,121},{152,90,85},{155,61,56},{158,77,80},{155,115,115},{141,131,121},{133,131,118},{135,128,117},{135,127,116},{134,126,115},{132,124,113},{130,123,112},{129,123,111},{126,120,108},{126,120,108}},{{126,121,112},{126,121,113},{130,125,116},{131,126,116},{131,128,117},{134,127,119},{141,123,120},{110,64,71},{121,57,65},{151,117,110},{143,135,122},{132,134,122},{136,138,130},{102,114,121},{80,97,121},{90,107,138},{130,146,164},{149,133,146},{142,71,84},{165,54,64},{151,34,43},{150,63,65},{159,116,110},{142,131,119},{134,130,118},{135,127,116},{136,128,117},{134,126,115},{131,123,112},{130,124,112},{126,120,108},{124,118,106}},{{129,122,114},{128,121,114},{131,124,116},{133,127,115},{135,129,117},{136,129,120},{138,132,127},{150,123,127},{144,96,100},{152,120,109},{144,129,114},{131,128,119},{171,174,177},{160,168,180},{132,146,167},{97,117,145},{106,131,159},{136,135,162},{150,112,134},{186,129,141},{202,129,128},{149,78,70},{159,118,107},{146,134,121},{135,132,119},{136,128,117},{136,128,117},{136,128,117},{134,126,115},{134,128,116},{131,125,113},{126,120,108}},{{133,126,118},{132,125,117},{134,127,119},{136,130,118},{136,130,118},{138,132,123},{139,131,126},{113,113,117},{126,135,146},{168,171,175},{149,142,143},{119,111,122},{148,157,188},{194,207,232},{166,178,191},{159,172,180},{180,194,207},{191,192,207},{170,156,167},{189,168,173},{211,196,191},{149,139,127},{147,135,123},{149,136,123},{145,134,122},{140,132,121},{138,130,119},{136,128,117},{132,125,113},{132,126,114},{131,125,113},{128,122,110}},{{134,127,119},{135,128,120},{136,128,121},{136,130,118},{137,131,119},{137,131,122},{142,134,128},{90,93,108},{33,52,92},{89,107,144},{145,161,193},{152,169,196},{116,138,164},{133,151,175},{167,177,200},{189,194,216},{207,211,225},{210,216,225},{204,212,217},{203,212,214},{173,170,171},{143,132,131},{146,137,129},{145,137,122},{144,136,120},{141,133,122},{139,131,120},{138,130,119},{133,125,114},{130,124,112},{128,122,110},{126,120,108}},{{131,124,116},{134,128,120},{135,128,120},{136,130,118},{139,133,121},{138,131,122},{109,101,95},{121,117,130},{103,109,150},{52,67,114},{60,87,130},{108,140,172},{150,174,198},{146,165,191},{128,141,172},{140,148,181},{158,164,187},{169,181,196},{183,201,208},{183,195,199},{154,140,144},{156,133,132},{150,137,128},{142,138,121},{141,138,121},{141,133,122},{137,129,118},{136,128,117},{135,127,116},{133,127,115},{129,123,111},{128,122,110}},{{132,124,116},{130,121,114},{135,126,120},{140,131,122},{140,134,122},{138,135,124},{125,119,115},{157,159,162},{162,174,189},{140,149,179},{140,154,186},{131,146,177},{99,114,155},{137,152,188},{164,182,209},{164,182,203},{156,170,195},{146,162,184},{147,161,179},{168,173,191},{159,159,158},{147,144,128},{146,139,124},{146,139,123},{143,135,123},{142,134,124},{136,128,117},{133,125,114},{134,127,116},{127,121,109},{129,123,111},{129,123,111}},{{141,129,119},{136,122,116},{135,124,120},{141,129,128},{135,130,122},{148,152,145},{185,191,204},{141,169,193},{119,147,170},{181,181,202},{192,200,212},{204,203,220},{167,172,199},{132,143,172},{119,136,168},{137,160,193},{169,188,213},{185,192,204},{175,177,202},{92,107,179},{97,120,166},{134,143,141},{149,140,126},{147,137,121},{140,132,126},{140,131,128},{138,131,116},{137,129,115},{137,128,119},{133,125,115},{131,124,112},{129,122,111}},{{139,129,117},{132,130,117},{127,134,120},{131,137,124},{157,154,155},{180,178,199},{118,136,175},{89,115,149},{153,146,163},{192,179,185},{182,207,197},{193,217,214},{202,206,219},{193,200,218},{171,186,209},{129,149,182},{96,125,171},{137,161,179},{169,183,203},{53,62,153},{34,44,157},{102,112,172},{124,127,141},{145,138,133},{143,133,125},{140,133,117},{139,132,117},{137,129,117},{136,128,116},{137,129,118},{135,127,116},{130,122,111}},{{134,128,114},{137,129,112},{143,130,112},{145,90,76},{165,123,127},{107,118,147},{94,115,158},{162,137,167},{152,76,85},{165,113,119},{207,202,202},{200,212,217},{213,213,226},{223,226,236},{220,229,236},{210,224,231},{150,175,200},{71,107,153},{104,138,195},{107,119,184},{50,51,159},{70,83,198},{60,77,162},{114,111,149},{140,126,118},{136,130,103},{133,125,112},{136,127,117},{134,126,110},{135,127,115},{134,126,115},{129,121,110}},{{137,127,114},{154,108,97},{159,66,64},{168,25,19},{151,29,30},{117,61,76},{168,148,162},{201,168,175},{180,136,140},{192,168,177},{222,207,225},{228,224,240},{219,216,222},{187,186,189},{189,190,189},{214,215,210},{233,229,229},{181,198,221},{77,114,170},{93,124,184},{133,149,200},{68,82,181},{33,46,185},{86,86,176},{134,125,146},{137,128,123},{136,128,117},{135,128,113},{134,126,112},{132,124,113},{131,123,112},{129,121,110}},{{139,122,111},{133,65,55},{151,14,14},{167,15,13},{174,17,17},{171,20,23},{155,53,47},{191,142,135},{216,219,216},{213,237,237},{214,218,224},{166,172,170},{95,92,90},{46,40,39},{51,45,44},{82,74,75},{125,112,115},{164,166,160},{147,167,173},{55,80,130},{118,138,172},{139,150,196},{64,67,163},{104,112,203},{113,111,187},{121,103,148},{140,131,120},{136,129,106},{133,124,120},{130,122,113},{129,121,110},{127,119,108}},{{140,121,112},{133,85,73},{137,45,37},{147,29,31},{162,20,19},{180,18,10},{173,19,14},{161,56,61},{181,157,159},{148,152,149},{90,79,74},{44,39,25},{55,47,39},{69,59,54},{57,45,43},{43,31,31},{33,28,24},{34,29,26},{54,48,52},{58,57,67},{73,79,100},{142,144,158},{137,137,155},{113,126,182},{84,89,183},{102,86,158},{130,123,125},{135,128,110},{135,123,122},{131,121,111},{128,120,107},{126,118,106}},{{135,124,117},{106,91,83},{65,46,39},{114,58,55},{154,47,45},{162,31,28},{145,27,26},{99,25,26},{61,33,30},{39,31,22},{52,47,36},{97,89,77},{135,124,111},{143,132,119},{137,125,113},{126,114,102},{110,99,92},{93,81,78},{74,60,59},{63,51,47},{80,71,69},{114,110,107},{132,133,133},{129,139,151},{141,151,187},{166,169,211},{173,172,189},{140,133,131},{134,123,112},{133,123,107},{129,120,104},{127,119,104}},{{129,120,112},{68,60,52},{19,12,5},{33,27,22},{73,42,41},{81,26,28},{50,12,14},{32,15,11},{51,37,28},{92,85,73},{133,125,114},{151,137,127},{147,135,121},{144,132,118},{147,135,121},{149,137,123},{150,138,125},{148,136,124},{145,133,120},{137,125,113},{115,105,92},{85,79,69},{101,101,95},{128,133,130},{159,169,173},{199,211,219},{225,231,232},{184,185,176},{133,129,114},{129,122,107},{127,119,105},{125,117,104}},{{128,120,109},{109,101,91},{76,67,58},{25,33,23},{14,17,12},{30,18,16},{45,42,38},{78,81,71},{120,115,100},{140,131,118},{144,128,117},{145,126,116},{144,132,118},{144,132,118},{145,133,119},{145,133,119},{142,131,114},{145,134,115},{147,136,118},{148,137,119},{146,137,118},{128,122,105},{97,93,79},{90,89,78},{116,120,113},{155,163,161},{200,206,205},{220,225,226},{178,181,181},{127,125,116},{122,118,107},{124,117,104}},{{125,117,104},{124,116,104},{129,122,109},{99,95,79},{72,63,51},{93,76,70},{126,111,100},{136,130,113},{136,133,114},{142,128,113},{135,116,103},{129,112,99},{130,118,104},{137,125,111},{142,130,116},{144,132,118},{142,131,113},{144,133,114},{145,134,115},{145,134,115},{144,134,117},{144,135,119},{142,134,118},{112,104,89},{79,72,64},{96,92,94},{147,146,154},{194,197,211},{222,228,241},{170,174,170},{119,119,110},{123,117,104}},{{124,117,101},{120,113,97},{125,118,103},{131,121,104},{130,120,109},{136,124,119},{143,126,117},{139,127,110},{139,132,113},{146,129,114},{142,125,109},{132,123,105},{134,123,109},{141,129,115},{142,130,116},{143,131,117},{141,129,114},{144,132,117},{145,133,118},{144,132,117},{140,128,116},{140,128,117},{144,131,119},{145,131,118},{131,115,104},{99,84,77},{86,77,74},{112,112,114},{166,172,176},{203,211,209},{153,156,149},{120,115,103}},{{124,117,103},{122,115,101},{130,122,109},{130,122,109},{127,122,112},{130,125,118},{134,125,116},{138,128,114},{140,130,113},{142,127,112},{140,126,111},{138,129,113},{142,130,116},{139,127,113},{134,122,108},{142,130,116},{141,129,116},{142,130,117},{144,132,119},{143,131,118},{133,122,111},{136,126,116},{140,128,118},{139,127,116},{141,127,113},{140,126,110},{119,110,96},{92,88,75},{81,81,71},{120,120,114},{156,154,145},{123,117,105}},{{126,117,106},{128,120,109},{130,122,111},{129,121,110},{131,122,111},{132,124,113},{133,125,114},{136,126,114},{139,127,113},{138,126,112},{138,126,112},{140,128,114},{139,127,113},{137,125,111},{135,123,109},{142,130,116},{140,128,114},{140,128,114},{140,128,114},{140,128,114},{136,126,114},{136,128,117},{137,129,118},{137,129,118},{134,126,114},{134,127,111},{137,130,115},{136,128,113},{119,112,97},{97,89,78},{113,105,93},{121,113,102}},{{126,118,107},{126,118,107},{128,120,109},{130,122,111},{130,122,111},{131,123,112},{134,126,115},{136,126,114},{137,125,111},{137,125,111},{136,124,110},{139,127,113},{139,127,113},{139,127,113},{137,125,111},{138,126,112},{138,126,112},{138,126,112},{139,127,113},{139,127,113},{135,125,113},{138,130,119},{142,134,123},{141,133,122},{140,132,120},{138,130,117},{138,130,117},{135,127,114},{138,130,118},{133,125,114},{122,114,103},{114,106,95}},{{125,117,106},{126,118,107},{127,119,108},{129,121,110},{129,121,110},{129,121,110},{133,125,114},{133,123,111},{134,122,108},{135,123,109},{136,124,110},{138,126,112},{138,126,112},{136,124,110},{135,123,109},{136,124,110},{136,124,110},{137,125,111},{139,127,113},{138,126,112},{136,127,115},{138,130,119},{142,134,123},{140,132,121},{138,130,119},{134,126,115},{135,127,116},{135,127,116},{132,124,113},{128,120,109},{121,113,102},{114,106,95}},{{123,115,104},{123,115,104},{126,118,107},{128,120,109},{129,121,110},{129,121,110},{131,123,112},{132,122,110},{133,121,107},{135,123,109},{135,123,109},{135,123,109},{136,124,110},{133,121,107},{134,122,108},{136,124,110},{136,124,110},{137,125,111},{134,122,108},{130,118,104},{133,123,111},{133,125,115},{136,128,117},{136,128,117},{135,127,117},{134,125,116},{132,123,114},{131,122,113},{129,121,111},{128,119,109},{126,118,107},{121,113,102}}}
 \.
 
 DROP TABLE IF EXISTS cifar_10_sample_batched;
diff --git a/src/ports/postgres/modules/deep_learning/test/unit_tests/test_input_data_preprocessor.py_in b/src/ports/postgres/modules/deep_learning/test/unit_tests/test_input_data_preprocessor.py_in
index f21176c..d2e14cd 100644
--- a/src/ports/postgres/modules/deep_learning/test/unit_tests/test_input_data_preprocessor.py_in
+++ b/src/ports/postgres/modules/deep_learning/test/unit_tests/test_input_data_preprocessor.py_in
@@ -61,6 +61,8 @@
         self.module = deep_learning.input_data_preprocessor
         import utilities.minibatch_preprocessing
         self.util_module = utilities.minibatch_preprocessing
+        import utilities.control
+        self.control_module = utilities.control
         self.module.get_expr_type = Mock(side_effect = ['integer[]', 'integer[]'])
         self.module.validate_module_input_params = Mock()
         self.module.get_distinct_col_levels = Mock(return_value = [0,22,100])
@@ -70,6 +72,9 @@
 
     def test_input_preprocessor_dl_executes_query(self):
         self.module.get_expr_type = Mock(side_effect = ['integer[]', 'integer[]'])
+        self.control_module.OptimizerControl.__enter__ = Mock()
+        self.control_module.OptimizerControl.optimizer_control = True
+        self.control_module.OptimizerControl.optimizer_enabled = True
         preprocessor_obj = self.module.InputDataPreprocessorDL(
             self.default_schema_madlib,
             "input",
@@ -85,6 +90,9 @@
 
     def test_input_preprocessor_null_buffer_size_executes_query(self):
         self.module.get_expr_type = Mock(side_effect = ['integer[]', 'integer[]'])
+        self.control_module.OptimizerControl.__enter__ = Mock()
+        self.control_module.OptimizerControl.optimizer_control = True
+        self.control_module.OptimizerControl.optimizer_enabled = True
         preprocessor_obj = self.module.InputDataPreprocessorDL(
             self.default_schema_madlib,
             "input",
diff --git a/src/ports/postgres/modules/utilities/minibatch_preprocessing.py_in b/src/ports/postgres/modules/utilities/minibatch_preprocessing.py_in
index e03bf44..c25463c 100644
--- a/src/ports/postgres/modules/utilities/minibatch_preprocessing.py_in
+++ b/src/ports/postgres/modules/utilities/minibatch_preprocessing.py_in
@@ -457,10 +457,13 @@
     @staticmethod
     def calculate_default_buffer_size(buffer_size,
                                       avg_num_rows_processed,
-                                      independent_var_dimension):
+                                      independent_var_dimension,
+                                      num_of_segments=None):
         if buffer_size is not None:
             return buffer_size
-        num_of_segments = get_seg_number()
+
+        if num_of_segments is None:
+            num_of_segments = get_seg_number()
 
         default_buffer_size = min(75000000.0/independent_var_dimension,
                                     float(avg_num_rows_processed)/num_of_segments)
diff --git a/src/ports/postgres/modules/utilities/utilities.py_in b/src/ports/postgres/modules/utilities/utilities.py_in
index 687566a..12b5205 100644
--- a/src/ports/postgres/modules/utilities/utilities.py_in
+++ b/src/ports/postgres/modules/utilities/utilities.py_in
@@ -20,6 +20,27 @@
 
 m4_changequote(`<!', `!>')
 
+def plpy_execute_debug(sql, *args, **kwargs):
+    """ Replace plpy.execute(sql, ...) with
+        plpy_execute_debug(sql, ...) to debug
+        a query.  Shows the query itself, the
+        EXPLAIN of it, and how long the query
+        takes to execute.
+    """
+    plpy.info(sql)  # Print sql command
+
+    # Print EXPLAIN of sql command
+    res = plpy.execute("EXPLAIN " + sql, *args)
+    for r in res:
+        plpy.info(r['QUERY PLAN'])
+
+    # Run actual sql command, with timing
+    start = time.time()
+    plpy.execute(sql, *args)
+
+    # Print how long execution of query took
+    plpy.info("Query took {0}s".format(time.time() - start))
+
 def has_function_properties():
     """ __HAS_FUNCTION_PROPERTIES__ variable defined during configure """
     return m4_ifdef(<!__HAS_FUNCTION_PROPERTIES__!>, <!True!>, <!False!>)