blob: 4c37bf4dc3a50e5e5134e58884c192de592f1707 [file] [log] [blame]
#-------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
#-------------------------------------------------------------
/*
* Script to run tests.
*/
source("nn/test/grad_check.dml") as grad_check
source("nn/test/test.dml") as test
print("")
print("Starting grad checks.")
print("---")
# Loss & loss-related functions
grad_check::cross_entropy_loss()
grad_check::cross_entropy_loss2d()
grad_check::l1_loss()
grad_check::l1_reg()
grad_check::l2_loss()
grad_check::l2_reg()
grad_check::log_loss()
print("")
# Core layers
grad_check::affine()
grad_check::low_rank_affine()
grad_check::batch_norm1d()
grad_check::batch_norm2d()
grad_check::conv2d()
grad_check::conv2d_builtin()
grad_check::conv2d_simple()
grad_check::conv2d_depthwise()
grad_check::conv2d_transpose()
grad_check::conv2d_transpose_depthwise()
grad_check::dropout()
grad_check::elu()
grad_check::fm()
grad_check::lstm()
grad_check::max_pool2d()
grad_check::max_pool2d_builtin()
grad_check::max_pool2d_simple()
grad_check::avg_pool2d_builtin()
grad_check::upsample2d()
grad_check::relu()
grad_check::leaky_relu()
grad_check::rnn()
grad_check::scale_shift1d()
grad_check::scale_shift2d()
grad_check::sigmoid()
grad_check::softmax()
grad_check::softmax2d()
grad_check::tanh()
print("")
# Example model
# NOTE: This could result in a false-negative in which the test fails
# due to a kink being crossed in the ReLU nonlinearity. This occurs
# when the tests, f(x-h) and f(x+h), end up on opposite sides of the
# zero threshold of max(0, fx). Although we have stabilized it, we
# will still remove it here to avoid false failures during automated
# testing. In the future, we can explicitly check for this scenario
# and rerun the test automatically. For manual testing, simply
# rerun the tests.
#grad_check::two_layer_affine_l2_net()
print("")
print("---")
print("Grad checks complete -- look for any ERRORs or WARNINGs.")
print("If any tests involving ReLUs failed, try a few times " +
"to ensure that they were not false negatives due to " +
"kinks being crossed.")
print("")
print("")
print("Starting other tests.")
print("---")
test::batch_norm1d()
test::batch_norm2d()
test::conv2d()
test::conv2d_depthwise()
test::conv2d_transpose()
test::conv2d_transpose_depthwise()
test::cross_entropy_loss()
test::cross_entropy_loss2d()
test::elu()
test::im2col()
test::max_pool2d()
test::padding()
test::tanh()
test::threshold()
test::transpose_NCHW_to_CNHW()
test::top_k_row()
test::top_k()
test::top_k2d()
test::softmax2d()
test::compare_tanh_builtin_forward_with_old()
test::compare_tanh_builtin_backward_with_old()
print("---")
print("Other tests complete -- look for any ERRORs or WARNINGs.")
print("")
print("")