Hi Team, I am getting a validation error on running ./dataflow-runner up --emr-config cluster.json command
cluster.json:
{
"schema": "iglu:com.snowplowanalytics.dataflowrunner/ClusterConfig/avro/1-1-0",
"data": {
"name": "RDB Shredder",
"logUri": "s3://rr-snowplow-events-sample-app-dev/emr-logs/",
"region":"us-east-1",
"credentials": {
"accessKeyId": "xxxxxxxxxxxxxxxxxxxxxxxxx",
"secretAccessKey": "xxxxxxxxxxxxxxxxxxxxxxxxxx"
},
"roles": {
"jobflow": "EMR_EC2_DefaultRole",
"service": "EMR_DefaultRole"
},
"ec2": {
"amiVersion": "6.2.0",
"keyName": "snowplow_dev.pem",
"location": {
"vpc": {
"subnetId": "subnet-xxxxxxxx"
}
},
"instances": {
"master": {
"type": "m4.large",
"ebsConfiguration": {
"ebsOptimized": true,
"ebsBlockDeviceConfigs": [
]
}
},
"core": {
"type": "r4.xlarge",
"count": 1
},
"task": {
"type": "m4.large",
"count": 0,
"bid": "0.015"
}
}
},
"tags": [ ],
"bootstrapActionConfigs": [ ],
"configurations": [
{
"classification":"core-site",
"properties":{
"Io.file.buffer.size":"65536"
},
"configurations":[
]
},
{
"classification":"yarn-site",
"properties":{
"yarn.nodemanager.resource.memory-mb":"57344",
"yarn.scheduler.maximum-allocation-mb":"57344",
"yarn.nodemanager.vmem-check-enabled":"false"
},
"configurations":[
]
},
{
"classification":"spark",
"properties":{
"maximizeResourceAllocation":"false"
},
"configurations":[
]
},
{
"classification":"spark-defaults",
"properties":{
"spark.executor.memory":"7G",
"spark.driver.memory":"7G",
"spark.driver.cores":"3",
"spark.yarn.driver.memoryOverhead":"1024",
"spark.default.parallelism":"24",
"spark.executor.cores":"1",
"spark.executor.instances":"6",
"spark.yarn.executor.memoryOverhead":"1024",
"spark.dynamicAllocation.enabled":"false"
},
"configurations":[
]
}
],
"applications": [ "Hadoop", "Spark" ]
}
}