Untitled
unknown
yaml
8 months ago
1.8 kB
4
Indexable
pipeline:
processors:
# Step 1: Log the raw S3 event
- log:
level: INFO
message: "Raw S3 event received: ${! json() }"
# Step 2: Extract bucket name and file key from the S3 event
- mapping: |
root.bucket_name = this.Records.index(0).s3.bucket.name
root.file_key = this.Records.index(0).s3.object.key
- log:
level: INFO
message: "Extracted Bucket Name: ${! json(\"bucket_name\") }"
- log:
level: INFO
message: "Extracted File Key: ${! json(\"file_key\") }"
# Step 3: Fetch the actual file content from S3
- aws_s3:
bucket: ${! json("bucket_name") }
key: ${! json("file_key") }
- log:
level: INFO
message: "Actual file content fetched: ${! content() }"
# Step 4: Pass the file content to Lambda for processing
- branch:
processors:
- aws_lambda:
function: bentodboperation # Replace with your Lambda function name
parallel: false # Execute sequentially, set to true if you want parallel invocations
result_map: |
root = if meta().exists("lambda_function_error") {
throw("Invocation failed due to %v: %v".format(this.errorType, this.errorMessage))
} else {
this # If no error, keep the message unchanged
}
output:
aws_s3:
bucket: bento-bucket-new # Replace with your output S3 bucket name
path: output-files/${!json("custNo")}_${!json("distrNationCode")}.json # Dynamic file name based on the Lambda output
region: "eu-west-1" # Region where the output bucket is located
content_type: application/json
processors:
- log:
level: INFO
message: "Processed output written to S3: ${! json() }"Editor is loading...
Leave a Comment