Adding Operators (#738)

* Adding Floor and Abs Opertors in autograd and sonnx

* Update test_operation.py
diff --git a/python/singa/autograd.py b/python/singa/autograd.py
index 0fe7b2c..ddfbf03 100644
--- a/python/singa/autograd.py
+++ b/python/singa/autograd.py
@@ -4299,6 +4299,50 @@
     return Ceil()(x)[0]
 
 
+class Floor(Operator):
+    """
+    Floor takes one input data (Tensor) and produces one output data (Tensor), 
+    where the floor is, `y = floor(x)`, is applied to the tensor elementwise
+    """
+
+    def __init__(self):
+        super(Floor, self).__init__()
+
+    def forward(self, x):
+        """
+        forward of floor
+        Args: 
+            x (CTensor): input tensor
+        Returns:
+            the output CTensor    
+        """
+        return singa.Floor(x)
+
+    def backward(self, dy):
+        """
+        backward of floor. Derivative of floor is 0
+        Args: 
+            dy (CTensor): gradient tensor
+        Returns:
+            the gradient tensor over the input tensor. 
+        """
+        dy = singa.Tensor(dy.shape(), dy.device())
+        dy.SetFloatValue(0.)
+        return dy
+
+
+def floor(x):
+    """
+    floor takes one input data (Tensor) and produces one output data (Tensor)
+    the value of floor is `y = floor(x)`, is applied to the tensor elementwise. 
+    Args: 
+        x(Tensor): input tensor.
+    Returns: 
+        the output tensor    
+    """
+    return Floor()(x)[0]
+
+
 class Split(Operator):
     """
     Init a Split, Split a tensor into a list of tensors, along the specified
diff --git a/python/singa/sonnx.py b/python/singa/sonnx.py
index db04e73..6ff7cef 100755
--- a/python/singa/sonnx.py
+++ b/python/singa/sonnx.py
@@ -1089,6 +1089,8 @@
         'Unsqueeze': 'Unsqueeze',
         'NonZero': 'NonZero',
         'Ceil': 'Ceil',
+        'Floor': 'Floor',
+        'Abs': 'Abs',
         # special op
         'ScatterElements': 'ScatterElements',
         'Cast': 'Cast',
diff --git a/test/python/test_operation.py b/test/python/test_operation.py
index 35de47c..54d2513 100755
--- a/test/python/test_operation.py
+++ b/test/python/test_operation.py
@@ -2881,6 +2881,28 @@
     def test_ceil_gpu(self):
         self.ceil_test(gpu_dev)
 
+    def floor_test(self,dev):
+        X = np.array([-1.9,1.2]).astype(np.float32)
+        DY = np.ones((2),dtype=np.float32)
+        y = np.floor(X)
+        x = tensor.from_numpy(X)
+        dy = tensor.from_numpy(DY)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.floor(x)
+        dx = result.creator.backward(dy.data)
+        DX = np.zeros((2),dtype=np.float32)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),y,decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(tensor.from_raw_tensor(dx)),DX,decimal=5)
+    
+    def test_floor_cpu(self):
+        self.floor_test(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_floor_gpu(self):
+        self.floor_test(gpu_dev)    
+
     def _test_scatter_elements(self, dev):
         # testing witout axis
         data = np.zeros((3, 3), dtype=np.float32)