import random
import string
import re
+import json
from botocore.exceptions import ClientError
import uuid
epsilon = float(0.00001)
# both results should be close (epsilon)
- assert( abs(float(res.split("\n")[1]) - eval(e)) < epsilon )
+ assert( abs(float(res.split("\n")[0]) - eval(e)) < epsilon )
@pytest.mark.s3select
def get_random_string():
single_line_csv = create_random_csv_object(1,1)
bucket_name = get_new_bucket_name()
obj_name = get_random_string() #"single_line_csv.csv"
- upload_csv_object(bucket_name,obj_name,single_line_csv)
+ upload_object(bucket_name,obj_name,single_line_csv)
for _ in range(100):
generate_s3select_where_clause(bucket_name,obj_name)
single_line_csv = create_random_csv_object(1,1)
bucket_name = get_new_bucket_name()
obj_name = get_random_string() #"single_line_csv.csv"
- upload_csv_object(bucket_name,obj_name,single_line_csv)
+ upload_object(bucket_name,obj_name,single_line_csv)
for _ in range(100):
generate_s3select_expression_projection(bucket_name,obj_name)
return result
-def upload_csv_object(bucket_name,new_key,obj):
+def create_random_json_object(rows,columns,col_delim=",",record_delim="\n",csv_schema=""):
+ result = "{\"root\" : ["
+ result += record_delim
+ if len(csv_schema)>0 :
+ result = csv_schema + record_delim
+
+ for _ in range(rows):
+ row = ""
+ num = 0
+ row += "{"
+ for _ in range(columns):
+ num += 1
+ row = row + "\"c" + str(num) + "\"" + ": " "{}{}".format(random.randint(0,1000),col_delim)
+ row = row[:-1]
+ row += "}"
+ row += ","
+ result += row + record_delim
+
+ result = result[:-2]
+ result += record_delim
+ result += "]" + "}"
+
+ return result
+
+def csv_to_json(obj, field_split=",",row_split="\n",csv_schema=""):
+ result = "{\"root\" : ["
+ result += row_split
+ if len(csv_schema)>0 :
+ result = csv_schema + row_split
+
+ for rec in obj.split(row_split):
+ row = ""
+ num = 0
+ row += "{"
+ for col in rec.split(field_split):
+ if col == "":
+ break
+ num += 1
+ row = row + "\"c" + str(num) + "\"" + ": " "{}{}".format(col,field_split)
+ row = row[:-1]
+ row += "}"
+ row += ","
+ result += row + row_split
+
+ result = result[:-5]
+ result += row_split
+ result += "]" + "}"
+
+ return result
+
+def upload_object(bucket_name,new_key,obj):
client = get_client()
client.create_bucket(Bucket=bucket_name)
s3 = get_client()
result = ""
+ result_status = {}
try:
r = s3.select_object_content(
Bucket=bucket,
result += records
else:
result = []
+ max_progress_scanned = 0
for event in r['Payload']:
if 'Records' in event:
records = event['Records']
result.append(records.copy())
if 'Progress' in event:
- progress = event['Progress']
- result.append(progress.copy())
+ if(event['Progress']['Details']['BytesScanned'] > max_progress_scanned):
+ max_progress_scanned = event['Progress']['Details']['BytesScanned']
+ result_status['Progress'] = event['Progress']
+
if 'Stats' in event:
- stats = event['Stats']
- result.append(stats.copy())
+ result_status['Stats'] = event['Stats']
if 'End' in event:
- end = event['End']
- result.append(end.copy())
- return result
+ result_status['End'] = event['End']
+
+ if progress == False:
+ return result
+ else:
+ return result,result_status
+
def run_s3select_output(bucket,key,query, quot_field, op_column_delim = ",", op_row_delim = "\n", column_delim=",", op_quot_char = '"', op_esc_char = '\\', row_delim="\n",quot_char='"',esc_char='\\',csv_header_info="NONE"):
s3 = get_client()
return result
+def run_s3select_json(bucket,key,query, op_row_delim = "\n"):
+
+ s3 = get_client()
+
+ r = s3.select_object_content(
+ Bucket=bucket,
+ Key=key,
+ ExpressionType='SQL',
+ InputSerialization = {"JSON": {"Type": "DOCUMENT"}},
+ OutputSerialization = {"JSON": {}},
+ Expression=query,)
+ #Record delimiter optional in output serialization
+
+ result = ""
+ for event in r['Payload']:
+ if 'Records' in event:
+ records = event['Records']['Payload'].decode('utf-8')
+ result += records
+
+ return result
+
def remove_xml_tags_from_result(obj):
result = ""
for rec in obj.split("\n"):
bucket_name = get_new_bucket_name()
num_of_rows = 1234
obj_to_load = create_random_csv_object(num_of_rows,10)
- upload_csv_object(bucket_name,csv_obj_name,obj_to_load)
+ upload_object(bucket_name,csv_obj_name,obj_to_load)
res = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(0) from s3object;") ).replace(",","")
s3select_assert_result( num_of_rows, int( res ))
+@pytest.mark.s3select
+def test_count_json_operation():
+ json_obj_name = get_random_string()
+ bucket_name = get_new_bucket_name()
+ num_of_rows = 1
+ obj_to_load = create_random_json_object(num_of_rows,10)
+ upload_object(bucket_name,json_obj_name,obj_to_load)
+ res = remove_xml_tags_from_result(run_s3select_json(bucket_name,json_obj_name,"select count(0) from s3object[*];"))
+ s3select_assert_result( 1, int(res))
+
+ res = remove_xml_tags_from_result(run_s3select_json(bucket_name,json_obj_name,"select count(0) from s3object[*].root;"))
+ s3select_assert_result( 1, int(res))
+
+ obj_to_load = create_random_json_object(3,10)
+ upload_object(bucket_name,json_obj_name,obj_to_load)
+ res = remove_xml_tags_from_result(run_s3select_json(bucket_name,json_obj_name,"select count(0) from s3object[*].root;"))
+ s3select_assert_result( 3, int(res))
+
+@pytest.mark.s3select
+def test_json_column_sum_min_max():
+ csv_obj = create_random_csv_object(10000,10)
+
+ json_obj = csv_to_json(csv_obj);
+
+ json_obj_name = get_random_string()
+ bucket_name = get_new_bucket_name()
+ upload_object(bucket_name,json_obj_name,json_obj)
+
+ json_obj_name_2 = get_random_string()
+ bucket_name_2 = "testbuck2"
+ upload_object(bucket_name_2,json_obj_name_2,json_obj)
+
+ res_s3select = remove_xml_tags_from_result( run_s3select_json(bucket_name,json_obj_name,"select min(_1.c1) from s3object[*].root;") ).replace(",","")
+ list_int = create_list_of_int( 1 , csv_obj )
+ res_target = min( list_int )
+
+ s3select_assert_result( int(res_s3select), int(res_target))
+
+ res_s3select = remove_xml_tags_from_result( run_s3select_json(bucket_name,json_obj_name,"select min(_1.c4) from s3object[*].root;") ).replace(",","")
+ list_int = create_list_of_int( 4 , csv_obj )
+ res_target = min( list_int )
+
+ s3select_assert_result( int(res_s3select), int(res_target))
+
+ res_s3select = remove_xml_tags_from_result( run_s3select_json(bucket_name,json_obj_name,"select avg(_1.c6) from s3object[*].root;") ).replace(",","")
+ list_int = create_list_of_int( 6 , csv_obj )
+ res_target = float(sum(list_int ))/10000
+
+ s3select_assert_result( float(res_s3select), float(res_target))
+
+ res_s3select = remove_xml_tags_from_result( run_s3select_json(bucket_name,json_obj_name,"select max(_1.c4) from s3object[*].root;") ).replace(",","")
+ list_int = create_list_of_int( 4 , csv_obj )
+ res_target = max( list_int )
+
+ s3select_assert_result( int(res_s3select), int(res_target))
+
+ res_s3select = remove_xml_tags_from_result( run_s3select_json(bucket_name,json_obj_name,"select max(_1.c7) from s3object[*].root;") ).replace(",","")
+ list_int = create_list_of_int( 7 , csv_obj )
+ res_target = max( list_int )
+
+ s3select_assert_result( int(res_s3select), int(res_target))
+
+ res_s3select = remove_xml_tags_from_result( run_s3select_json(bucket_name,json_obj_name,"select sum(_1.c4) from s3object[*].root;") ).replace(",","")
+ list_int = create_list_of_int( 4 , csv_obj )
+ res_target = sum( list_int )
+
+ s3select_assert_result( int(res_s3select), int(res_target))
+
+ res_s3select = remove_xml_tags_from_result( run_s3select_json(bucket_name,json_obj_name,"select sum(_1.c7) from s3object[*].root;") ).replace(",","")
+ list_int = create_list_of_int( 7 , csv_obj )
+ res_target = sum( list_int )
+
+ s3select_assert_result( int(res_s3select) , int(res_target) )
+
+ # the following queries, validates on *random* input an *accurate* relation between condition result,sum operation and count operation.
+ res_s3select = remove_xml_tags_from_result( run_s3select_json(bucket_name_2,json_obj_name_2,"select count(0),sum(_1.c1),sum(_1.c2) from s3object[*].root where (_1.c1-_1.c2) = 2;" ) )
+ count,sum1,sum2 = res_s3select.split(",")
+
+ s3select_assert_result( int(count)*2 , int(sum1)-int(sum2 ) )
+
+ res_s3select = remove_xml_tags_from_result( run_s3select_json(bucket_name,json_obj_name,"select count(0),sum(_1.c1),sum(_1.c2) from s3object[*].root where (_1.c1-_1.c2) = 4;" ) )
+ count,sum1,sum2 = res_s3select.split(",")
+
+ s3select_assert_result( int(count)*4 , int(sum1)-int(sum2) )
+
+@pytest.mark.s3select
+def test_json_nullif_expressions():
+
+ json_obj = create_random_json_object(10000,10)
+
+ json_obj_name = get_random_string()
+ bucket_name = get_new_bucket_name()
+ upload_object(bucket_name,json_obj_name,json_obj)
+
+ res_s3select_nullif = remove_xml_tags_from_result( run_s3select_json(bucket_name,json_obj_name,"select count(0) from s3object[*].root where nullif(_1.c1,_1.c2) is null ;") ).replace("\n","")
+
+ res_s3select = remove_xml_tags_from_result( run_s3select_json(bucket_name,json_obj_name,"select count(0) from s3object[*].root where _1.c1 = _1.c2 ;") ).replace("\n","")
+
+ s3select_assert_result( res_s3select_nullif, res_s3select)
+
+ res_s3select_nullif = remove_xml_tags_from_result( run_s3select_json(bucket_name,json_obj_name,"select (nullif(_1.c1,_1.c2) is null) from s3object[*].root ;") ).replace("\n","")
+
+ res_s3select = remove_xml_tags_from_result( run_s3select_json(bucket_name,json_obj_name,"select (_1.c1 = _1.c2) from s3object[*].root ;") ).replace("\n","")
+
+ s3select_assert_result( res_s3select_nullif, res_s3select)
+
+ res_s3select_nullif = remove_xml_tags_from_result( run_s3select_json(bucket_name,json_obj_name,"select count(0) from s3object[*].root where not nullif(_1.c1,_1.c2) is null ;") ).replace("\n","")
+
+ res_s3select = remove_xml_tags_from_result( run_s3select_json(bucket_name,json_obj_name,"select count(0) from s3object[*].root where _1.c1 != _1.c2 ;") ).replace("\n","")
+
+ s3select_assert_result( res_s3select_nullif, res_s3select)
+
+ res_s3select_nullif = remove_xml_tags_from_result( run_s3select_json(bucket_name,json_obj_name,"select (nullif(_1.c1,_1.c2) is not null) from s3object[*].root ;") ).replace("\n","")
+
+ res_s3select = remove_xml_tags_from_result( run_s3select_json(bucket_name,json_obj_name,"select (_1.c1 != _1.c2) from s3object[*].root ;") ).replace("\n","")
+
+ s3select_assert_result( res_s3select_nullif, res_s3select)
+
+ res_s3select_nullif = remove_xml_tags_from_result( run_s3select_json(bucket_name,json_obj_name,"select count(0) from s3object[*].root where nullif(_1.c1,_1.c2) = _1.c1 ;") ).replace("\n","")
+
+ res_s3select = remove_xml_tags_from_result( run_s3select_json(bucket_name,json_obj_name,"select count(0) from s3object[*].root where _1.c1 != _1.c2 ;") ).replace("\n","")
+
+ s3select_assert_result( res_s3select_nullif, res_s3select)
+
+
@pytest.mark.s3select
def test_column_sum_min_max():
csv_obj = create_random_csv_object(10000,10)
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
csv_obj_name_2 = get_random_string()
bucket_name_2 = "testbuck2"
- upload_csv_object(bucket_name_2,csv_obj_name_2,csv_obj)
+ upload_object(bucket_name_2,csv_obj_name_2,csv_obj)
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select min(int(_1)) from s3object;") ).replace(",","")
list_int = create_list_of_int( 1 , csv_obj )
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select_nullif = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(0) from s3object where nullif(_1,_2) is null ;") ).replace("\n","")
csv_obj = create_random_csv_object_null(10000,10)
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select_nullif = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(*) from s3object where nullif(_1,null) is null;") ).replace("\n","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select_nullif = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(0) from s3object where (nullif(_1,_2) is null) = true ;") ).replace("\n","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select_null = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(*) from s3object where nullif(_1,_2) is not null ;") ).replace("\n","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select lower("AB12cd$$") from s3object ;') ).replace("\n","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select int(_1) from s3object where int(_1) in(1);')).replace("\n","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select int(_1) from s3object where (int(_1) in(1)) = true;')).replace("\n","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select_like = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where _1 like "%aeio%";')).replace("\n","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select_like = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where (_1 like "%aeio%") = true;')).replace("\n","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select_nullif = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(0) from stdin where nullif(_1,_2) is null ;") ).replace("\n","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select lower("AB12cd$$") from stdin ;') ).replace("\n","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select int(_1) from stdin where int(_1) in(1);')).replace("\n","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from stdin where _1 like "%aeio%";')).replace("\n","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select min(int(_1)),max(int(_2)),min(int(_3))+1 from s3object;")).replace("\n","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select_alias = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select int(_1) as a1, int(_2) as a2 , (a1+a2) as a3 from s3object where a3>100 and a3<300;") ).replace(",","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select_alias = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select int(_1) as a1,int(_2) as a2, a1+a4 as a3, a5+a1 as a4, int(_3)+a3 as a5 from s3object;") )
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select_date_time = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(0) from s3object where extract(year from to_timestamp(_1)) > 1950 and extract(year from to_timestamp(_1)) < 1960;') )
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select_date_time = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(0) from s3object where (extract(year from to_timestamp(_1)) > 1950) = true and (extract(year from to_timestamp(_1)) < 1960) = true;') )
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
# return value contain comma{,}
res_s3select_alias = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select _6 from s3object;") ).replace("\n","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
# purpose of tests is to parse correctly input with different csv defintions
res = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(0) from s3object;","|","\t") ).replace(",","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
# ignoring the schema on first line and retrieve using generic column number
res_ignore = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select _1,_3 from s3object;",csv_header_info="IGNORE") ).replace("\n","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select case when cast(_1 as int)>100 and cast(_1 as int)<200 then "(100-200)" when cast(_1 as int)>200 and cast(_1 as int)<300 then "(200-300)" else "NONE" end from s3object;') ).replace("\n","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where char_length(_3)>2 and char_length(_4)>2 and cast(substring(_3,1,2) as int) = cast(substring(_4,1,2) as int);') ).replace("\n","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where cast(_3 as int)>999;') ).replace("\n","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_version = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select version() from s3object;") ).replace("\n","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select_trim = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where trim(_1) = "aeiou";')).replace("\n","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select_trim = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where trim(_1) = "aeiou" = true;')).replace("\n","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select_escape = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where _1 like "%_ar" escape "%";')).replace("\n","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select_case = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select case cast(_1 as int) when cast(_2 as int) then "case_1_1" else "case_2_2" end from s3object;')).replace("\n","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select_cast = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where cast(int(_1) as bool) = true ;')).replace("\n","")
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
obj_size = len(csv_obj.encode('utf-8'))
- res_s3select_response = run_s3select(bucket_name,csv_obj_name,"select sum(int(_1)) from s3object;",progress = True)
- records_payload_size = len(remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name, 'select sum(int(_1)) from s3object;')).replace("\n",""))
+ result_status = {}
+ result_size = 0
- total_response = len(res_s3select_response)
-
+ res_s3select_response,result_status = run_s3select(bucket_name,csv_obj_name,"select sum(int(_1)) from s3object;",progress = True)
+
+ for rec in res_s3select_response:
+ result_size += len(rec['Payload'])
+
+ records_payload_size = result_size
+
# To do: Validate bytes processed after supporting compressed data
- s3select_assert_result(obj_size, res_s3select_response[total_response-3]['Details']['BytesScanned'])
- s3select_assert_result(records_payload_size, res_s3select_response[total_response-3]['Details']['BytesReturned'])
+ s3select_assert_result(obj_size, result_status['Progress']['Details']['BytesScanned'])
+ s3select_assert_result(records_payload_size, result_status['Progress']['Details']['BytesReturned'])
# stats response payload validation
- s3select_assert_result(obj_size, res_s3select_response[total_response-2]['Details']['BytesScanned'])
- s3select_assert_result(records_payload_size, res_s3select_response[total_response-2]['Details']['BytesReturned'])
+ s3select_assert_result(obj_size, result_status['Stats']['Details']['BytesScanned'])
+ s3select_assert_result(records_payload_size, result_status['Stats']['Details']['BytesReturned'])
# end response
- s3select_assert_result({}, res_s3select_response[total_response-1])
+ s3select_assert_result({}, result_status['End'])
@pytest.mark.s3select
def test_output_serial_expressions():
csv_obj_name = get_random_string()
bucket_name = get_new_bucket_name()
- upload_csv_object(bucket_name,csv_obj_name,csv_obj)
+ upload_object(bucket_name,csv_obj_name,csv_obj)
res_s3select_1 = remove_xml_tags_from_result( run_s3select_output(bucket_name,csv_obj_name,"select _1, _2 from s3object where nullif(_1,_2) is null ;", "ALWAYS") ).replace("\n",",").replace(",","")