Skip to content

Commit 62696d4

Browse files
author
AWS
committed
AWS Clean Rooms Service Update: Add support for configurable compute sizes for PySpark jobs.
1 parent 5fcf5b0 commit 62696d4

File tree

2 files changed

+57
-1
lines changed

2 files changed

+57
-1
lines changed
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
{
2+
"type": "feature",
3+
"category": "AWS Clean Rooms Service",
4+
"contributor": "",
5+
"description": "Add support for configurable compute sizes for PySpark jobs."
6+
}

services/cleanrooms/src/main/resources/codegen-resources/service-2.json

Lines changed: 51 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8604,6 +8604,10 @@
86048604
"error":{
86058605
"shape":"ProtectedJobError",
86068606
"documentation":"<p> The error from the protected job.</p>"
8607+
},
8608+
"computeConfiguration":{
8609+
"shape":"ProtectedJobComputeConfiguration",
8610+
"documentation":"<p>The compute configuration for the protected job.</p>"
86078611
}
86088612
},
86098613
"documentation":"<p>The parameters for an Clean Rooms protected job.</p>"
@@ -8612,6 +8616,17 @@
86128616
"type":"string",
86138617
"enum":["DIRECT_ANALYSIS"]
86148618
},
8619+
"ProtectedJobComputeConfiguration":{
8620+
"type":"structure",
8621+
"members":{
8622+
"worker":{
8623+
"shape":"ProtectedJobWorkerComputeConfiguration",
8624+
"documentation":"<p>The worker configuration for the compute environment.</p>"
8625+
}
8626+
},
8627+
"documentation":"<p>The configuration of the compute resources for a PySpark job.</p>",
8628+
"union":true
8629+
},
86158630
"ProtectedJobConfigurationDetails":{
86168631
"type":"structure",
86178632
"members":{
@@ -8925,6 +8940,37 @@
89258940
"type":"string",
89268941
"enum":["PYSPARK"]
89278942
},
8943+
"ProtectedJobWorkerComputeConfiguration":{
8944+
"type":"structure",
8945+
"required":[
8946+
"type",
8947+
"number"
8948+
],
8949+
"members":{
8950+
"type":{
8951+
"shape":"ProtectedJobWorkerComputeType",
8952+
"documentation":"<p>The worker compute configuration type.</p>"
8953+
},
8954+
"number":{
8955+
"shape":"ProtectedJobWorkerComputeConfigurationNumberInteger",
8956+
"documentation":"<p>The number of workers for a PySpark job.</p>"
8957+
}
8958+
},
8959+
"documentation":"<p>The configuration of the compute resources for a PySpark job.</p>"
8960+
},
8961+
"ProtectedJobWorkerComputeConfigurationNumberInteger":{
8962+
"type":"integer",
8963+
"box":true,
8964+
"max":128,
8965+
"min":4
8966+
},
8967+
"ProtectedJobWorkerComputeType":{
8968+
"type":"string",
8969+
"enum":[
8970+
"CR.1X",
8971+
"CR.4X"
8972+
]
8973+
},
89288974
"ProtectedQuery":{
89298975
"type":"structure",
89308976
"required":[
@@ -9901,6 +9947,10 @@
99019947
"resultConfiguration":{
99029948
"shape":"ProtectedJobResultConfigurationInput",
99039949
"documentation":"<p>The details needed to write the job results.</p>"
9950+
},
9951+
"computeConfiguration":{
9952+
"shape":"ProtectedJobComputeConfiguration",
9953+
"documentation":"<p>The compute configuration for the protected job.</p>"
99049954
}
99059955
}
99069956
},
@@ -10655,7 +10705,7 @@
1065510705
},
1065610706
"number":{
1065710707
"shape":"WorkerComputeConfigurationNumberInteger",
10658-
"documentation":"<p> The number of workers.</p>"
10708+
"documentation":"<p> The number of workers.</p> <p>SQL queries support a minimum value of 2 and a maximum value of 400. </p> <p>PySpark jobs support a minimum value of 4 and a maximum value of 128.</p>"
1065910709
}
1066010710
},
1066110711
"documentation":"<p> The configuration of the compute resources for workers running an analysis with the Clean Rooms SQL analytics engine.</p>"

0 commit comments

Comments
 (0)