]> git.apps.os.sepia.ceph.com Git - s3-tests.git/commitdiff
add handling for EventStreamError exception
authorGal Salomon <gal.salomon@gmail.com>
Wed, 17 Apr 2024 15:20:37 +0000 (18:20 +0300)
committerGal Salomon <gal.salomon@gmail.com>
Wed, 19 Feb 2025 18:48:34 +0000 (20:48 +0200)
Signed-off-by: Gal Salomon <gal.salomon@gmail.com>
(cherry picked from commit 77f1334571416e110d27f574c7f563d8c9873d9b)

s3tests_boto3/functional/test_s3select.py

index bda6ff77b411e188ea620b7146f6efed1d620837..b8533f9bde3237483f3fdbd86afc1789fa8d10df 100644 (file)
@@ -4,6 +4,7 @@ import string
 import re
 import json
 from botocore.exceptions import ClientError
+from botocore.exceptions import EventStreamError
 
 import uuid
 
@@ -276,6 +277,7 @@ def run_s3select(bucket,key,query,column_delim=",",row_delim="\n",quot_char='"',
     s3 = get_client()
     result = ""
     result_status = {}
+
     try:
         r = s3.select_object_content(
         Bucket=bucket,
@@ -291,26 +293,34 @@ def run_s3select(bucket,key,query,column_delim=",",row_delim="\n",quot_char='"',
         return result
 
     if progress == False:
-        for event in r['Payload']:
-            if 'Records' in event:
-                records = event['Records']['Payload'].decode('utf-8')
-                result += records
+
+        try:
+            for event in r['Payload']:
+                if 'Records' in event:
+                    records = event['Records']['Payload'].decode('utf-8')
+                    result += records
+
+        except EventStreamError as c:
+            result = str(c)
+            return result
+        
     else:
-        result = []
-        max_progress_scanned = 0
-        for event in r['Payload']:
-            if 'Records' in event:
-                records = event['Records']
-                result.append(records.copy())
-            if 'Progress' in event:
-                if(event['Progress']['Details']['BytesScanned'] > max_progress_scanned):
-                    max_progress_scanned = event['Progress']['Details']['BytesScanned']
-                    result_status['Progress'] = event['Progress']
-
-            if 'Stats' in event:
-                result_status['Stats'] = event['Stats']
-            if 'End' in event:
-                result_status['End'] = event['End']
+            result = []
+            max_progress_scanned = 0
+            for event in r['Payload']:
+                if 'Records' in event:
+                    records = event['Records']
+                    result.append(records.copy())
+                if 'Progress' in event:
+                    if(event['Progress']['Details']['BytesScanned'] > max_progress_scanned):
+                        max_progress_scanned = event['Progress']['Details']['BytesScanned']
+                        result_status['Progress'] = event['Progress']
+
+                if 'Stats' in event:
+                    result_status['Stats'] = event['Stats']
+                if 'End' in event:
+                    result_status['End'] = event['End']
+
 
     if progress == False:
         return result
@@ -1309,7 +1319,6 @@ def test_schema_definition():
 
     # using the scheme on first line, query is using the attach schema
     res_use = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select c1,c3 from s3object;",csv_header_info="USE") ).replace("\n","")
-    
     # result of both queries should be the same
     s3select_assert_result( res_ignore, res_use)