I resolved this issue using AWS Glue pyspark
sns = boto3.client('sns')
sns.publish(TopicArn ='arn:aws:sns:us-west-2:xxxxxxxxx', Subject = "Description - subject line ", Message = data)
datasink5 = glueContext.write_dynamic_frame.from_jdbc_conf(frame = datasource0, catalog_connection = "connection_name", connection_options = {"dbtable": "schema.table", "database": "db_name", "postactions": "TRUNCATE TABLE schema.table_name;"}, redshift_tmp_dir = args["TempDir"], transformation_ctx = "datasink5")