]> git-server-git.apps.pok.os.sepia.ceph.com Git - s3-tests.git/commitdiff
s3select: align s3select tests with ceph
authorAlbin Antony <aantony@redhat.com>
Tue, 18 May 2021 02:09:22 +0000 (07:39 +0530)
committerAlbin Antony <aantony@redhat.com>
Wed, 30 Jun 2021 09:43:45 +0000 (15:13 +0530)
Update s3-tests to handle the error-response (return 400, and error-description)

Signed-off-by: Albin Antony <aantony@redhat.com>
s3tests_boto3/functional/test_s3select.py

index 123d4c2f39e22dab32d7711866b90b3c3363fd5f..de627af30c91e876c6241fd81a4ca973fd4e0da9 100644 (file)
@@ -3,6 +3,7 @@ import random
 import string
 import re
 from nose.plugins.attrib import attr
+from botocore.exceptions import ClientError
 
 import uuid
 from nose.tools import eq_ as eq
@@ -221,16 +222,20 @@ def upload_csv_object(bucket_name,new_key,obj):
 def run_s3select(bucket,key,query,column_delim=",",row_delim="\n",quot_char='"',esc_char='\\',csv_header_info="NONE"):
 
     s3 = get_client()
-
-    r = s3.select_object_content(
+    result = ""
+    try:
+        r = s3.select_object_content(
         Bucket=bucket,
         Key=key,
         ExpressionType='SQL',
         InputSerialization = {"CSV": {"RecordDelimiter" : row_delim, "FieldDelimiter" : column_delim,"QuoteEscapeCharacter": esc_char, "QuoteCharacter": quot_char, "FileHeaderInfo": csv_header_info}, "CompressionType": "NONE"},
         OutputSerialization = {"CSV": {}},
         Expression=query,)
+
+    except ClientError as c:
+        result += str(c)
+        return result
     
-    result = ""
     for event in r['Payload']:
         if 'Records' in event:
             records = event['Records']['Payload'].decode('utf-8')
@@ -266,7 +271,7 @@ def create_list_of_int(column_pos,obj,field_split=",",row_split="\n"):
             col_num+=1
 
     return list_of_int
-       
+
 @attr('s3select')
 def test_count_operation():
     csv_obj_name = get_random_string()
@@ -334,11 +339,13 @@ def test_column_sum_min_max():
 
     # the following queries, validates on *random* input an *accurate* relation between condition result,sum operation and count operation.
     res_s3select = remove_xml_tags_from_result(  run_s3select(bucket_name_2,csv_obj_name_2,"select count(0),sum(int(_1)),sum(int(_2)) from s3object where (int(_1)-int(_2)) = 2;" ) )
+
     count,sum1,sum2,d = res_s3select.split(",")
 
     s3select_assert_result( int(count)*2 , int(sum1)-int(sum2 ) )
 
     res_s3select = remove_xml_tags_from_result(  run_s3select(bucket_name,csv_obj_name,"select count(0),sum(int(_1)),sum(int(_2)) from s3object where (int(_1)-int(_2)) = 4;" ) ) 
+
     count,sum1,sum2,d = res_s3select.split(",")
 
     s3select_assert_result( int(count)*4 , int(sum1)-int(sum2) )
@@ -466,7 +473,7 @@ def test_lowerupper_expressions():
 @attr('s3select')
 def test_in_expressions():
 
-    # purpose of test: engine is process correctly several projections containing aggregation-functions 
+    # purpose of test: engine is process correctly several projections containing aggregation-functions
     csv_obj = create_random_csv_object(10000,10)
 
     csv_obj_name = get_random_string()
@@ -605,6 +612,12 @@ def test_like_expressions():
 
     s3select_assert_result( res_s3select_like, res_s3select )
 
+    res_s3select_like = remove_xml_tags_from_result(  run_s3select(bucket_name,csv_obj_name,'select count(*) from stdin where _1 like "%aeio%" like;')).replace("\n","")
+
+    find_like = res_s3select_like.find("s3select-Syntax-Error")
+
+    assert int(find_like) >= 0
+
     res_s3select_like = remove_xml_tags_from_result(  run_s3select(bucket_name,csv_obj_name,'select (_1 like "cbcd%") from s3object;')).replace("\n","")
 
     res_s3select = remove_xml_tags_from_result(  run_s3select(bucket_name,csv_obj_name, 'select (substring(_1,1,4) = "cbcd") from s3object;')).replace("\n","")
@@ -939,6 +952,10 @@ def test_schema_definition():
 
     assert res_multiple_defintion.find("alias {c11} or column not exist in schema") > 0
 
+    find_processing_error = res_multiple_defintion.find("s3select-ProcessingTime-Error")
+    
+    assert int(find_processing_error) >= 0
+
     # alias-name is identical to column-name
     res_multiple_defintion = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select int(c1)+int(c2) as c4,c4 from s3object;",csv_header_info="USE") ).replace("\n","")