]> git.apps.os.sepia.ceph.com Git - s3-tests.git/commitdiff
add handling for EventStreamError exception 561/head
authorGal Salomon <gal.salomon@gmail.com>
Wed, 17 Apr 2024 15:20:37 +0000 (18:20 +0300)
committerGal Salomon <gal.salomon@gmail.com>
Wed, 17 Apr 2024 15:20:37 +0000 (18:20 +0300)
Signed-off-by: Gal Salomon <gal.salomon@gmail.com>
s3tests_boto3/functional/test_s3select.py

index d7aa409a21033091567f1160b1dfe484b7a59787..86466f3df2769f8006b8f68d3a3aede2ce8992bf 100644 (file)
@@ -4,6 +4,7 @@ import string
 import re
 import json
 from botocore.exceptions import ClientError
+from botocore.exceptions import EventStreamError
 
 import uuid
 
@@ -277,6 +278,7 @@ def run_s3select(bucket,key,query,column_delim=",",row_delim="\n",quot_char='"',
     s3 = get_client()
     result = ""
     result_status = {}
+
     try:
         r = s3.select_object_content(
         Bucket=bucket,
@@ -292,26 +294,34 @@ def run_s3select(bucket,key,query,column_delim=",",row_delim="\n",quot_char='"',
         return result
 
     if progress == False:
-        for event in r['Payload']:
-            if 'Records' in event:
-                records = event['Records']['Payload'].decode('utf-8')
-                result += records
+
+        try:
+            for event in r['Payload']:
+                if 'Records' in event:
+                    records = event['Records']['Payload'].decode('utf-8')
+                    result += records
+
+        except EventStreamError as c:
+            result = str(c)
+            return result
+        
     else:
-        result = []
-        max_progress_scanned = 0
-        for event in r['Payload']:
-            if 'Records' in event:
-                records = event['Records']
-                result.append(records.copy())
-            if 'Progress' in event:
-                if(event['Progress']['Details']['BytesScanned'] > max_progress_scanned):
-                    max_progress_scanned = event['Progress']['Details']['BytesScanned']
-                    result_status['Progress'] = event['Progress']
-
-            if 'Stats' in event:
-                result_status['Stats'] = event['Stats']
-            if 'End' in event:
-                result_status['End'] = event['End']
+            result = []
+            max_progress_scanned = 0
+            for event in r['Payload']:
+                if 'Records' in event:
+                    records = event['Records']
+                    result.append(records.copy())
+                if 'Progress' in event:
+                    if(event['Progress']['Details']['BytesScanned'] > max_progress_scanned):
+                        max_progress_scanned = event['Progress']['Details']['BytesScanned']
+                        result_status['Progress'] = event['Progress']
+
+                if 'Stats' in event:
+                    result_status['Stats'] = event['Stats']
+                if 'End' in event:
+                    result_status['End'] = event['End']
+
 
     if progress == False:
         return result
@@ -1160,8 +1170,6 @@ def test_alias():
 @pytest.mark.s3select
 def test_alias_cyclic_refernce():
 
-    ## TEMP : RGW may return error-status that it is not handled by this test
-    return
     number_of_rows = 10000
     
     # purpose of test is to validate the s3select-engine is able to detect a cyclic reference to alias.
@@ -1316,8 +1324,6 @@ def test_csv_definition():
 @pytest.mark.s3select
 def test_schema_definition():
 
-    ## TEMP : RGW may return error-status that it is not handled by this test
-    return
     number_of_rows = 10000
 
     # purpose of test is to validate functionality using csv header info
@@ -1333,7 +1339,6 @@ def test_schema_definition():
 
     # using the scheme on first line, query is using the attach schema
     res_use = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select c1,c3 from s3object;",csv_header_info="USE") ).replace("\n","")
-    
     # result of both queries should be the same
     s3select_assert_result( res_ignore, res_use)