blob: 79ecb99a601d7e4fb1e0ed871a840b08827a1cd8 [file] [log] [blame] [edit]
<html><body>
<style>
body, h1, h2, h3, div, span, p, pre, a {
margin: 0;
padding: 0;
border: 0;
font-weight: inherit;
font-style: inherit;
font-size: 100%;
font-family: inherit;
vertical-align: baseline;
}
body {
font-size: 13px;
padding: 1em;
}
h1 {
font-size: 26px;
margin-bottom: 1em;
}
h2 {
font-size: 24px;
margin-bottom: 1em;
}
h3 {
font-size: 20px;
margin-bottom: 1em;
margin-top: 1em;
}
pre, code {
line-height: 1.5;
font-family: Monaco, 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', 'Lucida Console', monospace;
}
pre {
margin-top: 0.5em;
}
h1, h2, h3, p {
font-family: Arial, sans serif;
}
h1, h2, h3 {
border-bottom: solid #CCC 1px;
}
.toc_element {
margin-top: 0.5em;
}
.firstline {
margin-left: 2 em;
}
.method {
margin-top: 1em;
border: solid 1px #CCC;
padding: 1em;
background: #EEE;
}
.details {
font-weight: bold;
font-size: 14px;
}
</style>
<h1><a href="dataproc_v1.html">Cloud Dataproc API</a> . <a href="dataproc_v1.projects.html">projects</a> . <a href="dataproc_v1.projects.locations.html">locations</a> . <a href="dataproc_v1.projects.locations.batches.html">batches</a> . <a href="dataproc_v1.projects.locations.batches.sparkApplications.html">sparkApplications</a></h1>
<h2>Instance Methods</h2>
<p class="toc_element">
<code><a href="#access">access(name, parent=None, x__xgafv=None)</a></code></p>
<p class="firstline">Obtain high level information corresponding to a single Spark Application.</p>
<p class="toc_element">
<code><a href="#accessEnvironmentInfo">accessEnvironmentInfo(name, parent=None, x__xgafv=None)</a></code></p>
<p class="firstline">Obtain environment details for a Spark Application</p>
<p class="toc_element">
<code><a href="#accessJob">accessJob(name, jobId=None, parent=None, x__xgafv=None)</a></code></p>
<p class="firstline">Obtain data corresponding to a spark job for a Spark Application.</p>
<p class="toc_element">
<code><a href="#accessSqlPlan">accessSqlPlan(name, executionId=None, parent=None, x__xgafv=None)</a></code></p>
<p class="firstline">Obtain Spark Plan Graph for a Spark Application SQL execution. Limits the number of clusters returned as part of the graph to 10000.</p>
<p class="toc_element">
<code><a href="#accessSqlQuery">accessSqlQuery(name, details=None, executionId=None, parent=None, planDescription=None, x__xgafv=None)</a></code></p>
<p class="firstline">Obtain data corresponding to a particular SQL Query for a Spark Application.</p>
<p class="toc_element">
<code><a href="#accessStageAttempt">accessStageAttempt(name, parent=None, stageAttemptId=None, stageId=None, summaryMetricsMask=None, x__xgafv=None)</a></code></p>
<p class="firstline">Obtain data corresponding to a spark stage attempt for a Spark Application.</p>
<p class="toc_element">
<code><a href="#accessStageRddGraph">accessStageRddGraph(name, parent=None, stageId=None, x__xgafv=None)</a></code></p>
<p class="firstline">Obtain RDD operation graph for a Spark Application Stage. Limits the number of clusters returned as part of the graph to 10000.</p>
<p class="toc_element">
<code><a href="#close">close()</a></code></p>
<p class="firstline">Close httplib2 connections.</p>
<p class="toc_element">
<code><a href="#search">search(parent, applicationStatus=None, maxEndTime=None, maxTime=None, minEndTime=None, minTime=None, pageSize=None, pageToken=None, x__xgafv=None)</a></code></p>
<p class="firstline">Obtain high level information and list of Spark Applications corresponding to a batch</p>
<p class="toc_element">
<code><a href="#searchExecutorStageSummary">searchExecutorStageSummary(name, pageSize=None, pageToken=None, parent=None, stageAttemptId=None, stageId=None, x__xgafv=None)</a></code></p>
<p class="firstline">Obtain executor summary with respect to a spark stage attempt.</p>
<p class="toc_element">
<code><a href="#searchExecutorStageSummary_next">searchExecutorStageSummary_next()</a></code></p>
<p class="firstline">Retrieves the next page of results.</p>
<p class="toc_element">
<code><a href="#searchExecutors">searchExecutors(name, executorStatus=None, pageSize=None, pageToken=None, parent=None, x__xgafv=None)</a></code></p>
<p class="firstline">Obtain data corresponding to executors for a Spark Application.</p>
<p class="toc_element">
<code><a href="#searchExecutors_next">searchExecutors_next()</a></code></p>
<p class="firstline">Retrieves the next page of results.</p>
<p class="toc_element">
<code><a href="#searchJobs">searchJobs(name, jobStatus=None, pageSize=None, pageToken=None, parent=None, x__xgafv=None)</a></code></p>
<p class="firstline">Obtain list of spark jobs corresponding to a Spark Application.</p>
<p class="toc_element">
<code><a href="#searchJobs_next">searchJobs_next()</a></code></p>
<p class="firstline">Retrieves the next page of results.</p>
<p class="toc_element">
<code><a href="#searchSqlQueries">searchSqlQueries(name, details=None, pageSize=None, pageToken=None, parent=None, planDescription=None, x__xgafv=None)</a></code></p>
<p class="firstline">Obtain data corresponding to SQL Queries for a Spark Application.</p>
<p class="toc_element">
<code><a href="#searchSqlQueries_next">searchSqlQueries_next()</a></code></p>
<p class="firstline">Retrieves the next page of results.</p>
<p class="toc_element">
<code><a href="#searchStageAttemptTasks">searchStageAttemptTasks(name, pageSize=None, pageToken=None, parent=None, sortRuntime=None, stageAttemptId=None, stageId=None, taskStatus=None, x__xgafv=None)</a></code></p>
<p class="firstline">Obtain data corresponding to tasks for a spark stage attempt for a Spark Application.</p>
<p class="toc_element">
<code><a href="#searchStageAttemptTasks_next">searchStageAttemptTasks_next()</a></code></p>
<p class="firstline">Retrieves the next page of results.</p>
<p class="toc_element">
<code><a href="#searchStageAttempts">searchStageAttempts(name, pageSize=None, pageToken=None, parent=None, stageId=None, summaryMetricsMask=None, x__xgafv=None)</a></code></p>
<p class="firstline">Obtain data corresponding to a spark stage attempts for a Spark Application.</p>
<p class="toc_element">
<code><a href="#searchStageAttempts_next">searchStageAttempts_next()</a></code></p>
<p class="firstline">Retrieves the next page of results.</p>
<p class="toc_element">
<code><a href="#searchStages">searchStages(name, pageSize=None, pageToken=None, parent=None, stageStatus=None, summaryMetricsMask=None, x__xgafv=None)</a></code></p>
<p class="firstline">Obtain data corresponding to stages for a Spark Application.</p>
<p class="toc_element">
<code><a href="#searchStages_next">searchStages_next()</a></code></p>
<p class="firstline">Retrieves the next page of results.</p>
<p class="toc_element">
<code><a href="#search_next">search_next()</a></code></p>
<p class="firstline">Retrieves the next page of results.</p>
<p class="toc_element">
<code><a href="#summarizeExecutors">summarizeExecutors(name, parent=None, x__xgafv=None)</a></code></p>
<p class="firstline">Obtain summary of Executor Summary for a Spark Application</p>
<p class="toc_element">
<code><a href="#summarizeJobs">summarizeJobs(name, parent=None, x__xgafv=None)</a></code></p>
<p class="firstline">Obtain summary of Jobs for a Spark Application</p>
<p class="toc_element">
<code><a href="#summarizeStageAttemptTasks">summarizeStageAttemptTasks(name, parent=None, stageAttemptId=None, stageId=None, x__xgafv=None)</a></code></p>
<p class="firstline">Obtain summary of Tasks for a Spark Application Stage Attempt</p>
<p class="toc_element">
<code><a href="#summarizeStages">summarizeStages(name, parent=None, x__xgafv=None)</a></code></p>
<p class="firstline">Obtain summary of Stages for a Spark Application</p>
<p class="toc_element">
<code><a href="#write">write(name, body=None, x__xgafv=None)</a></code></p>
<p class="firstline">Write wrapper objects from dataplane to spanner</p>
<h3>Method Details</h3>
<div class="method">
<code class="details" id="access">access(name, parent=None, x__xgafv=None)</code>
<pre>Obtain high level information corresponding to a single Spark Application.
Args:
name: string, Required. The fully qualified name of the batch to retrieve in the format &quot;projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID/sparkApplications/APPLICATION_ID&quot; (required)
parent: string, Required. Parent (Batch) resource reference.
x__xgafv: string, V1 error format.
Allowed values
1 - v1 error format
2 - v2 error format
Returns:
An object of the form:
{ # A summary of Spark Application
&quot;application&quot;: { # High level information corresponding to an application. # Output only. High level information corresponding to an application.
&quot;applicationContextIngestionStatus&quot;: &quot;A String&quot;,
&quot;applicationId&quot;: &quot;A String&quot;,
&quot;attempts&quot;: [
{ # Specific attempt of an application.
&quot;appSparkVersion&quot;: &quot;A String&quot;,
&quot;attemptId&quot;: &quot;A String&quot;,
&quot;completed&quot;: True or False,
&quot;durationMillis&quot;: &quot;A String&quot;,
&quot;endTime&quot;: &quot;A String&quot;,
&quot;lastUpdated&quot;: &quot;A String&quot;,
&quot;sparkUser&quot;: &quot;A String&quot;,
&quot;startTime&quot;: &quot;A String&quot;,
},
],
&quot;coresGranted&quot;: 42,
&quot;coresPerExecutor&quot;: 42,
&quot;maxCores&quot;: 42,
&quot;memoryPerExecutorMb&quot;: 42,
&quot;name&quot;: &quot;A String&quot;,
&quot;quantileDataStatus&quot;: &quot;A String&quot;,
},
}</pre>
</div>
<div class="method">
<code class="details" id="accessEnvironmentInfo">accessEnvironmentInfo(name, parent=None, x__xgafv=None)</code>
<pre>Obtain environment details for a Spark Application
Args:
name: string, Required. The fully qualified name of the batch to retrieve in the format &quot;projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID/sparkApplications/APPLICATION_ID&quot; (required)
parent: string, Required. Parent (Batch) resource reference.
x__xgafv: string, V1 error format.
Allowed values
1 - v1 error format
2 - v2 error format
Returns:
An object of the form:
{ # Environment details of a Saprk Application.
&quot;applicationEnvironmentInfo&quot;: { # Details about the Environment that the application is running in. # Details about the Environment that the application is running in.
&quot;classpathEntries&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;hadoopProperties&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;metricsProperties&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;resourceProfiles&quot;: [
{ # Resource profile that contains information about all the resources required by executors and tasks.
&quot;executorResources&quot;: {
&quot;a_key&quot;: { # Resources used per executor used by the application.
&quot;amount&quot;: &quot;A String&quot;,
&quot;discoveryScript&quot;: &quot;A String&quot;,
&quot;resourceName&quot;: &quot;A String&quot;,
&quot;vendor&quot;: &quot;A String&quot;,
},
},
&quot;resourceProfileId&quot;: 42,
&quot;taskResources&quot;: {
&quot;a_key&quot;: { # Resources used per task created by the application.
&quot;amount&quot;: 3.14,
&quot;resourceName&quot;: &quot;A String&quot;,
},
},
},
],
&quot;runtime&quot;: {
&quot;javaHome&quot;: &quot;A String&quot;,
&quot;javaVersion&quot;: &quot;A String&quot;,
&quot;scalaVersion&quot;: &quot;A String&quot;,
},
&quot;sparkProperties&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;systemProperties&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
},
}</pre>
</div>
<div class="method">
<code class="details" id="accessJob">accessJob(name, jobId=None, parent=None, x__xgafv=None)</code>
<pre>Obtain data corresponding to a spark job for a Spark Application.
Args:
name: string, Required. The fully qualified name of the batch to retrieve in the format &quot;projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID/sparkApplications/APPLICATION_ID&quot; (required)
jobId: string, Required. Job ID to fetch data for.
parent: string, Required. Parent (Batch) resource reference.
x__xgafv: string, V1 error format.
Allowed values
1 - v1 error format
2 - v2 error format
Returns:
An object of the form:
{ # Details of a particular job associated with Spark Application
&quot;jobData&quot;: { # Data corresponding to a spark job. # Output only. Data corresponding to a spark job.
&quot;completionTime&quot;: &quot;A String&quot;,
&quot;description&quot;: &quot;A String&quot;,
&quot;jobGroup&quot;: &quot;A String&quot;,
&quot;jobId&quot;: &quot;A String&quot;,
&quot;killTasksSummary&quot;: {
&quot;a_key&quot;: 42,
},
&quot;name&quot;: &quot;A String&quot;,
&quot;numActiveStages&quot;: 42,
&quot;numActiveTasks&quot;: 42,
&quot;numCompletedIndices&quot;: 42,
&quot;numCompletedStages&quot;: 42,
&quot;numCompletedTasks&quot;: 42,
&quot;numFailedStages&quot;: 42,
&quot;numFailedTasks&quot;: 42,
&quot;numKilledTasks&quot;: 42,
&quot;numSkippedStages&quot;: 42,
&quot;numSkippedTasks&quot;: 42,
&quot;numTasks&quot;: 42,
&quot;skippedStages&quot;: [
42,
],
&quot;sqlExecutionId&quot;: &quot;A String&quot;,
&quot;stageIds&quot;: [
&quot;A String&quot;,
],
&quot;status&quot;: &quot;A String&quot;,
&quot;submissionTime&quot;: &quot;A String&quot;,
},
}</pre>
</div>
<div class="method">
<code class="details" id="accessSqlPlan">accessSqlPlan(name, executionId=None, parent=None, x__xgafv=None)</code>
<pre>Obtain Spark Plan Graph for a Spark Application SQL execution. Limits the number of clusters returned as part of the graph to 10000.
Args:
name: string, Required. The fully qualified name of the batch to retrieve in the format &quot;projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID/sparkApplications/APPLICATION_ID&quot; (required)
executionId: string, Required. Execution ID
parent: string, Required. Parent (Batch) resource reference.
x__xgafv: string, V1 error format.
Allowed values
1 - v1 error format
2 - v2 error format
Returns:
An object of the form:
{ # SparkPlanGraph for a Spark Application execution limited to maximum 10000 clusters.
&quot;sparkPlanGraph&quot;: { # A graph used for storing information of an executionPlan of DataFrame. # SparkPlanGraph for a Spark Application execution.
&quot;edges&quot;: [
{ # Represents a directed edge in the spark plan tree from child to parent.
&quot;fromId&quot;: &quot;A String&quot;,
&quot;toId&quot;: &quot;A String&quot;,
},
],
&quot;executionId&quot;: &quot;A String&quot;,
&quot;nodes&quot;: [
{ # Wrapper user to represent either a node or a cluster.
&quot;cluster&quot;: { # Represents a tree of spark plan.
&quot;desc&quot;: &quot;A String&quot;,
&quot;metrics&quot;: [
{ # Metrics related to SQL execution.
&quot;accumulatorId&quot;: &quot;A String&quot;,
&quot;metricType&quot;: &quot;A String&quot;,
&quot;name&quot;: &quot;A String&quot;,
},
],
&quot;name&quot;: &quot;A String&quot;,
&quot;nodes&quot;: [
# Object with schema name: SparkPlanGraphNodeWrapper
],
&quot;sparkPlanGraphClusterId&quot;: &quot;A String&quot;,
},
&quot;node&quot;: { # Represents a node in the spark plan tree.
&quot;desc&quot;: &quot;A String&quot;,
&quot;metrics&quot;: [
{ # Metrics related to SQL execution.
&quot;accumulatorId&quot;: &quot;A String&quot;,
&quot;metricType&quot;: &quot;A String&quot;,
&quot;name&quot;: &quot;A String&quot;,
},
],
&quot;name&quot;: &quot;A String&quot;,
&quot;sparkPlanGraphNodeId&quot;: &quot;A String&quot;,
},
},
],
},
}</pre>
</div>
<div class="method">
<code class="details" id="accessSqlQuery">accessSqlQuery(name, details=None, executionId=None, parent=None, planDescription=None, x__xgafv=None)</code>
<pre>Obtain data corresponding to a particular SQL Query for a Spark Application.
Args:
name: string, Required. The fully qualified name of the batch to retrieve in the format &quot;projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID/sparkApplications/APPLICATION_ID&quot; (required)
details: boolean, Optional. Lists/ hides details of Spark plan nodes. True is set to list and false to hide.
executionId: string, Required. Execution ID
parent: string, Required. Parent (Batch) resource reference.
planDescription: boolean, Optional. Enables/ disables physical plan description on demand
x__xgafv: string, V1 error format.
Allowed values
1 - v1 error format
2 - v2 error format
Returns:
An object of the form:
{ # Details of a query for a Spark Application
&quot;executionData&quot;: { # SQL Execution Data # SQL Execution Data
&quot;completionTime&quot;: &quot;A String&quot;,
&quot;description&quot;: &quot;A String&quot;,
&quot;details&quot;: &quot;A String&quot;,
&quot;errorMessage&quot;: &quot;A String&quot;,
&quot;executionId&quot;: &quot;A String&quot;,
&quot;jobs&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;metricValues&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;metricValuesIsNull&quot;: True or False,
&quot;metrics&quot;: [
{ # Metrics related to SQL execution.
&quot;accumulatorId&quot;: &quot;A String&quot;,
&quot;metricType&quot;: &quot;A String&quot;,
&quot;name&quot;: &quot;A String&quot;,
},
],
&quot;modifiedConfigs&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;physicalPlanDescription&quot;: &quot;A String&quot;,
&quot;rootExecutionId&quot;: &quot;A String&quot;,
&quot;stages&quot;: [
&quot;A String&quot;,
],
&quot;submissionTime&quot;: &quot;A String&quot;,
},
}</pre>
</div>
<div class="method">
<code class="details" id="accessStageAttempt">accessStageAttempt(name, parent=None, stageAttemptId=None, stageId=None, summaryMetricsMask=None, x__xgafv=None)</code>
<pre>Obtain data corresponding to a spark stage attempt for a Spark Application.
Args:
name: string, Required. The fully qualified name of the batch to retrieve in the format &quot;projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID/sparkApplications/APPLICATION_ID&quot; (required)
parent: string, Required. Parent (Batch) resource reference.
stageAttemptId: integer, Required. Stage Attempt ID
stageId: string, Required. Stage ID
summaryMetricsMask: string, Optional. The list of summary metrics fields to include. Empty list will default to skip all summary metrics fields. Example, if the response should include TaskQuantileMetrics, the request should have task_quantile_metrics in summary_metrics_mask field
x__xgafv: string, V1 error format.
Allowed values
1 - v1 error format
2 - v2 error format
Returns:
An object of the form:
{ # Stage Attempt for a Stage of a Spark Application
&quot;stageData&quot;: { # Data corresponding to a stage. # Output only. Data corresponding to a stage.
&quot;accumulatorUpdates&quot;: [
{
&quot;accumullableInfoId&quot;: &quot;A String&quot;,
&quot;name&quot;: &quot;A String&quot;,
&quot;update&quot;: &quot;A String&quot;,
&quot;value&quot;: &quot;A String&quot;,
},
],
&quot;completionTime&quot;: &quot;A String&quot;,
&quot;description&quot;: &quot;A String&quot;,
&quot;details&quot;: &quot;A String&quot;,
&quot;executorMetricsDistributions&quot;: {
&quot;diskBytesSpilled&quot;: [
3.14,
],
&quot;failedTasks&quot;: [
3.14,
],
&quot;inputBytes&quot;: [
3.14,
],
&quot;inputRecords&quot;: [
3.14,
],
&quot;killedTasks&quot;: [
3.14,
],
&quot;memoryBytesSpilled&quot;: [
3.14,
],
&quot;outputBytes&quot;: [
3.14,
],
&quot;outputRecords&quot;: [
3.14,
],
&quot;peakMemoryMetrics&quot;: {
&quot;executorMetrics&quot;: [
{
&quot;metrics&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
},
],
&quot;quantiles&quot;: [
3.14,
],
},
&quot;quantiles&quot;: [
3.14,
],
&quot;shuffleRead&quot;: [
3.14,
],
&quot;shuffleReadRecords&quot;: [
3.14,
],
&quot;shuffleWrite&quot;: [
3.14,
],
&quot;shuffleWriteRecords&quot;: [
3.14,
],
&quot;succeededTasks&quot;: [
3.14,
],
&quot;taskTimeMillis&quot;: [
3.14,
],
},
&quot;executorSummary&quot;: {
&quot;a_key&quot;: { # Executor resources consumed by a stage.
&quot;diskBytesSpilled&quot;: &quot;A String&quot;,
&quot;executorId&quot;: &quot;A String&quot;,
&quot;failedTasks&quot;: 42,
&quot;inputBytes&quot;: &quot;A String&quot;,
&quot;inputRecords&quot;: &quot;A String&quot;,
&quot;isExcludedForStage&quot;: True or False,
&quot;killedTasks&quot;: 42,
&quot;memoryBytesSpilled&quot;: &quot;A String&quot;,
&quot;outputBytes&quot;: &quot;A String&quot;,
&quot;outputRecords&quot;: &quot;A String&quot;,
&quot;peakMemoryMetrics&quot;: {
&quot;metrics&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
},
&quot;shuffleRead&quot;: &quot;A String&quot;,
&quot;shuffleReadRecords&quot;: &quot;A String&quot;,
&quot;shuffleWrite&quot;: &quot;A String&quot;,
&quot;shuffleWriteRecords&quot;: &quot;A String&quot;,
&quot;stageAttemptId&quot;: 42,
&quot;stageId&quot;: &quot;A String&quot;,
&quot;succeededTasks&quot;: 42,
&quot;taskTimeMillis&quot;: &quot;A String&quot;,
},
},
&quot;failureReason&quot;: &quot;A String&quot;,
&quot;firstTaskLaunchedTime&quot;: &quot;A String&quot;,
&quot;isShufflePushEnabled&quot;: True or False,
&quot;jobIds&quot;: [
&quot;A String&quot;,
],
&quot;killedTasksSummary&quot;: {
&quot;a_key&quot;: 42,
},
&quot;locality&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;name&quot;: &quot;A String&quot;,
&quot;numActiveTasks&quot;: 42,
&quot;numCompleteTasks&quot;: 42,
&quot;numCompletedIndices&quot;: 42,
&quot;numFailedTasks&quot;: 42,
&quot;numKilledTasks&quot;: 42,
&quot;numTasks&quot;: 42,
&quot;parentStageIds&quot;: [
&quot;A String&quot;,
],
&quot;peakExecutorMetrics&quot;: {
&quot;metrics&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
},
&quot;rddIds&quot;: [
&quot;A String&quot;,
],
&quot;resourceProfileId&quot;: 42,
&quot;schedulingPool&quot;: &quot;A String&quot;,
&quot;shuffleMergersCount&quot;: 42,
&quot;speculationSummary&quot;: { # Details of the speculation task when speculative execution is enabled.
&quot;numActiveTasks&quot;: 42,
&quot;numCompletedTasks&quot;: 42,
&quot;numFailedTasks&quot;: 42,
&quot;numKilledTasks&quot;: 42,
&quot;numTasks&quot;: 42,
&quot;stageAttemptId&quot;: 42,
&quot;stageId&quot;: &quot;A String&quot;,
},
&quot;stageAttemptId&quot;: 42,
&quot;stageId&quot;: &quot;A String&quot;,
&quot;stageMetrics&quot;: { # Stage Level Aggregated Metrics
&quot;diskBytesSpilled&quot;: &quot;A String&quot;,
&quot;executorCpuTimeNanos&quot;: &quot;A String&quot;,
&quot;executorDeserializeCpuTimeNanos&quot;: &quot;A String&quot;,
&quot;executorDeserializeTimeMillis&quot;: &quot;A String&quot;,
&quot;executorRunTimeMillis&quot;: &quot;A String&quot;,
&quot;jvmGcTimeMillis&quot;: &quot;A String&quot;,
&quot;memoryBytesSpilled&quot;: &quot;A String&quot;,
&quot;peakExecutionMemoryBytes&quot;: &quot;A String&quot;,
&quot;resultSerializationTimeMillis&quot;: &quot;A String&quot;,
&quot;resultSize&quot;: &quot;A String&quot;,
&quot;stageInputMetrics&quot;: { # Metrics about the input read by the stage.
&quot;bytesRead&quot;: &quot;A String&quot;,
&quot;recordsRead&quot;: &quot;A String&quot;,
},
&quot;stageOutputMetrics&quot;: { # Metrics about the output written by the stage.
&quot;bytesWritten&quot;: &quot;A String&quot;,
&quot;recordsWritten&quot;: &quot;A String&quot;,
},
&quot;stageShuffleReadMetrics&quot;: { # Shuffle data read for the stage.
&quot;bytesRead&quot;: &quot;A String&quot;,
&quot;fetchWaitTimeMillis&quot;: &quot;A String&quot;,
&quot;localBlocksFetched&quot;: &quot;A String&quot;,
&quot;localBytesRead&quot;: &quot;A String&quot;,
&quot;recordsRead&quot;: &quot;A String&quot;,
&quot;remoteBlocksFetched&quot;: &quot;A String&quot;,
&quot;remoteBytesRead&quot;: &quot;A String&quot;,
&quot;remoteBytesReadToDisk&quot;: &quot;A String&quot;,
&quot;remoteReqsDuration&quot;: &quot;A String&quot;,
&quot;stageShufflePushReadMetrics&quot;: {
&quot;corruptMergedBlockChunks&quot;: &quot;A String&quot;,
&quot;localMergedBlocksFetched&quot;: &quot;A String&quot;,
&quot;localMergedBytesRead&quot;: &quot;A String&quot;,
&quot;localMergedChunksFetched&quot;: &quot;A String&quot;,
&quot;mergedFetchFallbackCount&quot;: &quot;A String&quot;,
&quot;remoteMergedBlocksFetched&quot;: &quot;A String&quot;,
&quot;remoteMergedBytesRead&quot;: &quot;A String&quot;,
&quot;remoteMergedChunksFetched&quot;: &quot;A String&quot;,
&quot;remoteMergedReqsDuration&quot;: &quot;A String&quot;,
},
},
&quot;stageShuffleWriteMetrics&quot;: { # Shuffle data written for the stage.
&quot;bytesWritten&quot;: &quot;A String&quot;,
&quot;recordsWritten&quot;: &quot;A String&quot;,
&quot;writeTimeNanos&quot;: &quot;A String&quot;,
},
},
&quot;status&quot;: &quot;A String&quot;,
&quot;submissionTime&quot;: &quot;A String&quot;,
&quot;taskQuantileMetrics&quot;: { # Summary metrics fields. These are included in response only if present in summary_metrics_mask field in request
&quot;diskBytesSpilled&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;durationMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;executorCpuTimeNanos&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;executorDeserializeCpuTimeNanos&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;executorDeserializeTimeMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;executorRunTimeMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;gettingResultTimeMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;inputMetrics&quot;: {
&quot;bytesRead&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;recordsRead&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
},
&quot;jvmGcTimeMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;memoryBytesSpilled&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;outputMetrics&quot;: {
&quot;bytesWritten&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;recordsWritten&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
},
&quot;peakExecutionMemoryBytes&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;resultSerializationTimeMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;resultSize&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;schedulerDelayMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;shuffleReadMetrics&quot;: {
&quot;fetchWaitTimeMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;localBlocksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;readBytes&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;readRecords&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteBlocksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteBytesRead&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteBytesReadToDisk&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteReqsDuration&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;shufflePushReadMetrics&quot;: {
&quot;corruptMergedBlockChunks&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;localMergedBlocksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;localMergedBytesRead&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;localMergedChunksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;mergedFetchFallbackCount&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteMergedBlocksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteMergedBytesRead&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteMergedChunksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteMergedReqsDuration&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
},
&quot;totalBlocksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
},
&quot;shuffleWriteMetrics&quot;: {
&quot;writeBytes&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;writeRecords&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;writeTimeNanos&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
},
},
&quot;tasks&quot;: {
&quot;a_key&quot;: { # Data corresponding to tasks created by spark.
&quot;accumulatorUpdates&quot;: [
{
&quot;accumullableInfoId&quot;: &quot;A String&quot;,
&quot;name&quot;: &quot;A String&quot;,
&quot;update&quot;: &quot;A String&quot;,
&quot;value&quot;: &quot;A String&quot;,
},
],
&quot;attempt&quot;: 42,
&quot;durationMillis&quot;: &quot;A String&quot;,
&quot;errorMessage&quot;: &quot;A String&quot;,
&quot;executorId&quot;: &quot;A String&quot;,
&quot;executorLogs&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;gettingResultTimeMillis&quot;: &quot;A String&quot;,
&quot;hasMetrics&quot;: True or False,
&quot;host&quot;: &quot;A String&quot;,
&quot;index&quot;: 42,
&quot;launchTime&quot;: &quot;A String&quot;,
&quot;partitionId&quot;: 42,
&quot;resultFetchStart&quot;: &quot;A String&quot;,
&quot;schedulerDelayMillis&quot;: &quot;A String&quot;,
&quot;speculative&quot;: True or False,
&quot;stageAttemptId&quot;: 42,
&quot;stageId&quot;: &quot;A String&quot;,
&quot;status&quot;: &quot;A String&quot;,
&quot;taskId&quot;: &quot;A String&quot;,
&quot;taskLocality&quot;: &quot;A String&quot;,
&quot;taskMetrics&quot;: { # Executor Task Metrics
&quot;diskBytesSpilled&quot;: &quot;A String&quot;,
&quot;executorCpuTimeNanos&quot;: &quot;A String&quot;,
&quot;executorDeserializeCpuTimeNanos&quot;: &quot;A String&quot;,
&quot;executorDeserializeTimeMillis&quot;: &quot;A String&quot;,
&quot;executorRunTimeMillis&quot;: &quot;A String&quot;,
&quot;inputMetrics&quot;: { # Metrics about the input data read by the task.
&quot;bytesRead&quot;: &quot;A String&quot;,
&quot;recordsRead&quot;: &quot;A String&quot;,
},
&quot;jvmGcTimeMillis&quot;: &quot;A String&quot;,
&quot;memoryBytesSpilled&quot;: &quot;A String&quot;,
&quot;outputMetrics&quot;: { # Metrics about the data written by the task.
&quot;bytesWritten&quot;: &quot;A String&quot;,
&quot;recordsWritten&quot;: &quot;A String&quot;,
},
&quot;peakExecutionMemoryBytes&quot;: &quot;A String&quot;,
&quot;resultSerializationTimeMillis&quot;: &quot;A String&quot;,
&quot;resultSize&quot;: &quot;A String&quot;,
&quot;shuffleReadMetrics&quot;: { # Shuffle data read by the task.
&quot;fetchWaitTimeMillis&quot;: &quot;A String&quot;,
&quot;localBlocksFetched&quot;: &quot;A String&quot;,
&quot;localBytesRead&quot;: &quot;A String&quot;,
&quot;recordsRead&quot;: &quot;A String&quot;,
&quot;remoteBlocksFetched&quot;: &quot;A String&quot;,
&quot;remoteBytesRead&quot;: &quot;A String&quot;,
&quot;remoteBytesReadToDisk&quot;: &quot;A String&quot;,
&quot;remoteReqsDuration&quot;: &quot;A String&quot;,
&quot;shufflePushReadMetrics&quot;: {
&quot;corruptMergedBlockChunks&quot;: &quot;A String&quot;,
&quot;localMergedBlocksFetched&quot;: &quot;A String&quot;,
&quot;localMergedBytesRead&quot;: &quot;A String&quot;,
&quot;localMergedChunksFetched&quot;: &quot;A String&quot;,
&quot;mergedFetchFallbackCount&quot;: &quot;A String&quot;,
&quot;remoteMergedBlocksFetched&quot;: &quot;A String&quot;,
&quot;remoteMergedBytesRead&quot;: &quot;A String&quot;,
&quot;remoteMergedChunksFetched&quot;: &quot;A String&quot;,
&quot;remoteMergedReqsDuration&quot;: &quot;A String&quot;,
},
},
&quot;shuffleWriteMetrics&quot;: { # Shuffle data written by task.
&quot;bytesWritten&quot;: &quot;A String&quot;,
&quot;recordsWritten&quot;: &quot;A String&quot;,
&quot;writeTimeNanos&quot;: &quot;A String&quot;,
},
},
},
},
},
}</pre>
</div>
<div class="method">
<code class="details" id="accessStageRddGraph">accessStageRddGraph(name, parent=None, stageId=None, x__xgafv=None)</code>
<pre>Obtain RDD operation graph for a Spark Application Stage. Limits the number of clusters returned as part of the graph to 10000.
Args:
name: string, Required. The fully qualified name of the batch to retrieve in the format &quot;projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID/sparkApplications/APPLICATION_ID&quot; (required)
parent: string, Required. Parent (Batch) resource reference.
stageId: string, Required. Stage ID
x__xgafv: string, V1 error format.
Allowed values
1 - v1 error format
2 - v2 error format
Returns:
An object of the form:
{ # RDD operation graph for a Spark Application Stage limited to maximum 10000 clusters.
&quot;rddOperationGraph&quot;: { # Graph representing RDD dependencies. Consists of edges and a root cluster. # RDD operation graph for a Spark Application Stage.
&quot;edges&quot;: [
{ # A directed edge representing dependency between two RDDs.
&quot;fromId&quot;: 42,
&quot;toId&quot;: 42,
},
],
&quot;incomingEdges&quot;: [
{ # A directed edge representing dependency between two RDDs.
&quot;fromId&quot;: 42,
&quot;toId&quot;: 42,
},
],
&quot;outgoingEdges&quot;: [
{ # A directed edge representing dependency between two RDDs.
&quot;fromId&quot;: 42,
&quot;toId&quot;: 42,
},
],
&quot;rootCluster&quot;: { # A grouping of nodes representing higher level constructs (stage, job etc.).
&quot;childClusters&quot;: [
# Object with schema name: RddOperationCluster
],
&quot;childNodes&quot;: [
{ # A node in the RDD operation graph. Corresponds to a single RDD.
&quot;barrier&quot;: True or False,
&quot;cached&quot;: True or False,
&quot;callsite&quot;: &quot;A String&quot;,
&quot;name&quot;: &quot;A String&quot;,
&quot;nodeId&quot;: 42,
&quot;outputDeterministicLevel&quot;: &quot;A String&quot;,
},
],
&quot;name&quot;: &quot;A String&quot;,
&quot;rddClusterId&quot;: &quot;A String&quot;,
},
&quot;stageId&quot;: &quot;A String&quot;,
},
}</pre>
</div>
<div class="method">
<code class="details" id="close">close()</code>
<pre>Close httplib2 connections.</pre>
</div>
<div class="method">
<code class="details" id="search">search(parent, applicationStatus=None, maxEndTime=None, maxTime=None, minEndTime=None, minTime=None, pageSize=None, pageToken=None, x__xgafv=None)</code>
<pre>Obtain high level information and list of Spark Applications corresponding to a batch
Args:
parent: string, Required. The fully qualified name of the batch to retrieve in the format &quot;projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID&quot; (required)
applicationStatus: string, Optional. Search only applications in the chosen state.
Allowed values
APPLICATION_STATUS_UNSPECIFIED -
APPLICATION_STATUS_RUNNING -
APPLICATION_STATUS_COMPLETED -
maxEndTime: string, Optional. Latest end timestamp to list.
maxTime: string, Optional. Latest start timestamp to list.
minEndTime: string, Optional. Earliest end timestamp to list.
minTime: string, Optional. Earliest start timestamp to list.
pageSize: integer, Optional. Maximum number of applications to return in each response. The service may return fewer than this. The default page size is 10; the maximum page size is 100.
pageToken: string, Optional. A page token received from a previous SearchSparkApplications call. Provide this token to retrieve the subsequent page.
x__xgafv: string, V1 error format.
Allowed values
1 - v1 error format
2 - v2 error format
Returns:
An object of the form:
{ # A list of summary of Spark Applications
&quot;nextPageToken&quot;: &quot;A String&quot;, # This token is included in the response if there are more results to fetch. To fetch additional results, provide this value as the page_token in a subsequent SearchSparkApplicationsRequest.
&quot;sparkApplications&quot;: [ # Output only. High level information corresponding to an application.
{ # A summary of Spark Application
&quot;application&quot;: { # High level information corresponding to an application. # Output only. High level information corresponding to an application.
&quot;applicationContextIngestionStatus&quot;: &quot;A String&quot;,
&quot;applicationId&quot;: &quot;A String&quot;,
&quot;attempts&quot;: [
{ # Specific attempt of an application.
&quot;appSparkVersion&quot;: &quot;A String&quot;,
&quot;attemptId&quot;: &quot;A String&quot;,
&quot;completed&quot;: True or False,
&quot;durationMillis&quot;: &quot;A String&quot;,
&quot;endTime&quot;: &quot;A String&quot;,
&quot;lastUpdated&quot;: &quot;A String&quot;,
&quot;sparkUser&quot;: &quot;A String&quot;,
&quot;startTime&quot;: &quot;A String&quot;,
},
],
&quot;coresGranted&quot;: 42,
&quot;coresPerExecutor&quot;: 42,
&quot;maxCores&quot;: 42,
&quot;memoryPerExecutorMb&quot;: 42,
&quot;name&quot;: &quot;A String&quot;,
&quot;quantileDataStatus&quot;: &quot;A String&quot;,
},
&quot;name&quot;: &quot;A String&quot;, # Identifier. Name of the spark application
},
],
}</pre>
</div>
<div class="method">
<code class="details" id="searchExecutorStageSummary">searchExecutorStageSummary(name, pageSize=None, pageToken=None, parent=None, stageAttemptId=None, stageId=None, x__xgafv=None)</code>
<pre>Obtain executor summary with respect to a spark stage attempt.
Args:
name: string, Required. The fully qualified name of the batch to retrieve in the format &quot;projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID/sparkApplications/APPLICATION_ID&quot; (required)
pageSize: integer, Optional. Maximum number of executors to return in each response. The service may return fewer than this. The default page size is 10; the maximum page size is 100.
pageToken: string, Optional. A page token received from a previous AccessSparkApplicationExecutorsList call. Provide this token to retrieve the subsequent page.
parent: string, Required. Parent (Batch) resource reference.
stageAttemptId: integer, Required. Stage Attempt ID
stageId: string, Required. Stage ID
x__xgafv: string, V1 error format.
Allowed values
1 - v1 error format
2 - v2 error format
Returns:
An object of the form:
{ # List of Executors associated with a Spark Application Stage.
&quot;nextPageToken&quot;: &quot;A String&quot;, # This token is included in the response if there are more results to fetch. To fetch additional results, provide this value as the page_token in a subsequent SearchSparkApplicationExecutorsListRequest.
&quot;sparkApplicationStageExecutors&quot;: [ # Details about executors used by the application stage.
{ # Executor resources consumed by a stage.
&quot;diskBytesSpilled&quot;: &quot;A String&quot;,
&quot;executorId&quot;: &quot;A String&quot;,
&quot;failedTasks&quot;: 42,
&quot;inputBytes&quot;: &quot;A String&quot;,
&quot;inputRecords&quot;: &quot;A String&quot;,
&quot;isExcludedForStage&quot;: True or False,
&quot;killedTasks&quot;: 42,
&quot;memoryBytesSpilled&quot;: &quot;A String&quot;,
&quot;outputBytes&quot;: &quot;A String&quot;,
&quot;outputRecords&quot;: &quot;A String&quot;,
&quot;peakMemoryMetrics&quot;: {
&quot;metrics&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
},
&quot;shuffleRead&quot;: &quot;A String&quot;,
&quot;shuffleReadRecords&quot;: &quot;A String&quot;,
&quot;shuffleWrite&quot;: &quot;A String&quot;,
&quot;shuffleWriteRecords&quot;: &quot;A String&quot;,
&quot;stageAttemptId&quot;: 42,
&quot;stageId&quot;: &quot;A String&quot;,
&quot;succeededTasks&quot;: 42,
&quot;taskTimeMillis&quot;: &quot;A String&quot;,
},
],
}</pre>
</div>
<div class="method">
<code class="details" id="searchExecutorStageSummary_next">searchExecutorStageSummary_next()</code>
<pre>Retrieves the next page of results.
Args:
previous_request: The request for the previous page. (required)
previous_response: The response from the request for the previous page. (required)
Returns:
A request object that you can call &#x27;execute()&#x27; on to request the next
page. Returns None if there are no more items in the collection.
</pre>
</div>
<div class="method">
<code class="details" id="searchExecutors">searchExecutors(name, executorStatus=None, pageSize=None, pageToken=None, parent=None, x__xgafv=None)</code>
<pre>Obtain data corresponding to executors for a Spark Application.
Args:
name: string, Required. The fully qualified name of the batch to retrieve in the format &quot;projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID/sparkApplications/APPLICATION_ID&quot; (required)
executorStatus: string, Optional. Filter to select whether active/ dead or all executors should be selected.
Allowed values
EXECUTOR_STATUS_UNSPECIFIED -
EXECUTOR_STATUS_ACTIVE -
EXECUTOR_STATUS_DEAD -
pageSize: integer, Optional. Maximum number of executors to return in each response. The service may return fewer than this. The default page size is 10; the maximum page size is 100.
pageToken: string, Optional. A page token received from a previous AccessSparkApplicationExecutorsList call. Provide this token to retrieve the subsequent page.
parent: string, Required. Parent (Batch) resource reference.
x__xgafv: string, V1 error format.
Allowed values
1 - v1 error format
2 - v2 error format
Returns:
An object of the form:
{ # List of Executors associated with a Spark Application.
&quot;nextPageToken&quot;: &quot;A String&quot;, # This token is included in the response if there are more results to fetch. To fetch additional results, provide this value as the page_token in a subsequent SearchSparkApplicationExecutorsListRequest.
&quot;sparkApplicationExecutors&quot;: [ # Details about executors used by the application.
{ # Details about executors used by the application.
&quot;activeTasks&quot;: 42,
&quot;addTime&quot;: &quot;A String&quot;,
&quot;attributes&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;completedTasks&quot;: 42,
&quot;diskUsed&quot;: &quot;A String&quot;,
&quot;excludedInStages&quot;: [
&quot;A String&quot;,
],
&quot;executorId&quot;: &quot;A String&quot;,
&quot;executorLogs&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;failedTasks&quot;: 42,
&quot;hostPort&quot;: &quot;A String&quot;,
&quot;isActive&quot;: True or False,
&quot;isExcluded&quot;: True or False,
&quot;maxMemory&quot;: &quot;A String&quot;,
&quot;maxTasks&quot;: 42,
&quot;memoryMetrics&quot;: {
&quot;totalOffHeapStorageMemory&quot;: &quot;A String&quot;,
&quot;totalOnHeapStorageMemory&quot;: &quot;A String&quot;,
&quot;usedOffHeapStorageMemory&quot;: &quot;A String&quot;,
&quot;usedOnHeapStorageMemory&quot;: &quot;A String&quot;,
},
&quot;memoryUsed&quot;: &quot;A String&quot;,
&quot;peakMemoryMetrics&quot;: {
&quot;metrics&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
},
&quot;rddBlocks&quot;: 42,
&quot;removeReason&quot;: &quot;A String&quot;,
&quot;removeTime&quot;: &quot;A String&quot;,
&quot;resourceProfileId&quot;: 42,
&quot;resources&quot;: {
&quot;a_key&quot;: {
&quot;addresses&quot;: [
&quot;A String&quot;,
],
&quot;name&quot;: &quot;A String&quot;,
},
},
&quot;totalCores&quot;: 42,
&quot;totalDurationMillis&quot;: &quot;A String&quot;,
&quot;totalGcTimeMillis&quot;: &quot;A String&quot;,
&quot;totalInputBytes&quot;: &quot;A String&quot;,
&quot;totalShuffleRead&quot;: &quot;A String&quot;,
&quot;totalShuffleWrite&quot;: &quot;A String&quot;,
&quot;totalTasks&quot;: 42,
},
],
}</pre>
</div>
<div class="method">
<code class="details" id="searchExecutors_next">searchExecutors_next()</code>
<pre>Retrieves the next page of results.
Args:
previous_request: The request for the previous page. (required)
previous_response: The response from the request for the previous page. (required)
Returns:
A request object that you can call &#x27;execute()&#x27; on to request the next
page. Returns None if there are no more items in the collection.
</pre>
</div>
<div class="method">
<code class="details" id="searchJobs">searchJobs(name, jobStatus=None, pageSize=None, pageToken=None, parent=None, x__xgafv=None)</code>
<pre>Obtain list of spark jobs corresponding to a Spark Application.
Args:
name: string, Required. The fully qualified name of the batch to retrieve in the format &quot;projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID/sparkApplications/APPLICATION_ID&quot; (required)
jobStatus: string, Optional. List only jobs in the specific state.
Allowed values
JOB_EXECUTION_STATUS_UNSPECIFIED -
JOB_EXECUTION_STATUS_RUNNING -
JOB_EXECUTION_STATUS_SUCCEEDED -
JOB_EXECUTION_STATUS_FAILED -
JOB_EXECUTION_STATUS_UNKNOWN -
pageSize: integer, Optional. Maximum number of jobs to return in each response. The service may return fewer than this. The default page size is 10; the maximum page size is 100.
pageToken: string, Optional. A page token received from a previous SearchSparkApplicationJobs call. Provide this token to retrieve the subsequent page.
parent: string, Required. Parent (Batch) resource reference.
x__xgafv: string, V1 error format.
Allowed values
1 - v1 error format
2 - v2 error format
Returns:
An object of the form:
{ # A list of Jobs associated with a Spark Application.
&quot;nextPageToken&quot;: &quot;A String&quot;, # This token is included in the response if there are more results to fetch. To fetch additional results, provide this value as the page_token in a subsequent SearchSparkApplicationJobsRequest.
&quot;sparkApplicationJobs&quot;: [ # Output only. Data corresponding to a spark job.
{ # Data corresponding to a spark job.
&quot;completionTime&quot;: &quot;A String&quot;,
&quot;description&quot;: &quot;A String&quot;,
&quot;jobGroup&quot;: &quot;A String&quot;,
&quot;jobId&quot;: &quot;A String&quot;,
&quot;killTasksSummary&quot;: {
&quot;a_key&quot;: 42,
},
&quot;name&quot;: &quot;A String&quot;,
&quot;numActiveStages&quot;: 42,
&quot;numActiveTasks&quot;: 42,
&quot;numCompletedIndices&quot;: 42,
&quot;numCompletedStages&quot;: 42,
&quot;numCompletedTasks&quot;: 42,
&quot;numFailedStages&quot;: 42,
&quot;numFailedTasks&quot;: 42,
&quot;numKilledTasks&quot;: 42,
&quot;numSkippedStages&quot;: 42,
&quot;numSkippedTasks&quot;: 42,
&quot;numTasks&quot;: 42,
&quot;skippedStages&quot;: [
42,
],
&quot;sqlExecutionId&quot;: &quot;A String&quot;,
&quot;stageIds&quot;: [
&quot;A String&quot;,
],
&quot;status&quot;: &quot;A String&quot;,
&quot;submissionTime&quot;: &quot;A String&quot;,
},
],
}</pre>
</div>
<div class="method">
<code class="details" id="searchJobs_next">searchJobs_next()</code>
<pre>Retrieves the next page of results.
Args:
previous_request: The request for the previous page. (required)
previous_response: The response from the request for the previous page. (required)
Returns:
A request object that you can call &#x27;execute()&#x27; on to request the next
page. Returns None if there are no more items in the collection.
</pre>
</div>
<div class="method">
<code class="details" id="searchSqlQueries">searchSqlQueries(name, details=None, pageSize=None, pageToken=None, parent=None, planDescription=None, x__xgafv=None)</code>
<pre>Obtain data corresponding to SQL Queries for a Spark Application.
Args:
name: string, Required. The fully qualified name of the batch to retrieve in the format &quot;projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID/sparkApplications/APPLICATION_ID&quot; (required)
details: boolean, Optional. Lists/ hides details of Spark plan nodes. True is set to list and false to hide.
pageSize: integer, Optional. Maximum number of queries to return in each response. The service may return fewer than this. The default page size is 10; the maximum page size is 100.
pageToken: string, Optional. A page token received from a previous SearchSparkApplicationSqlQueries call. Provide this token to retrieve the subsequent page.
parent: string, Required. Parent (Batch) resource reference.
planDescription: boolean, Optional. Enables/ disables physical plan description on demand
x__xgafv: string, V1 error format.
Allowed values
1 - v1 error format
2 - v2 error format
Returns:
An object of the form:
{ # List of all queries for a Spark Application.
&quot;nextPageToken&quot;: &quot;A String&quot;, # This token is included in the response if there are more results to fetch. To fetch additional results, provide this value as the page_token in a subsequent SearchSparkApplicationSqlQueriesRequest.
&quot;sparkApplicationSqlQueries&quot;: [ # Output only. SQL Execution Data
{ # SQL Execution Data
&quot;completionTime&quot;: &quot;A String&quot;,
&quot;description&quot;: &quot;A String&quot;,
&quot;details&quot;: &quot;A String&quot;,
&quot;errorMessage&quot;: &quot;A String&quot;,
&quot;executionId&quot;: &quot;A String&quot;,
&quot;jobs&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;metricValues&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;metricValuesIsNull&quot;: True or False,
&quot;metrics&quot;: [
{ # Metrics related to SQL execution.
&quot;accumulatorId&quot;: &quot;A String&quot;,
&quot;metricType&quot;: &quot;A String&quot;,
&quot;name&quot;: &quot;A String&quot;,
},
],
&quot;modifiedConfigs&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;physicalPlanDescription&quot;: &quot;A String&quot;,
&quot;rootExecutionId&quot;: &quot;A String&quot;,
&quot;stages&quot;: [
&quot;A String&quot;,
],
&quot;submissionTime&quot;: &quot;A String&quot;,
},
],
}</pre>
</div>
<div class="method">
<code class="details" id="searchSqlQueries_next">searchSqlQueries_next()</code>
<pre>Retrieves the next page of results.
Args:
previous_request: The request for the previous page. (required)
previous_response: The response from the request for the previous page. (required)
Returns:
A request object that you can call &#x27;execute()&#x27; on to request the next
page. Returns None if there are no more items in the collection.
</pre>
</div>
<div class="method">
<code class="details" id="searchStageAttemptTasks">searchStageAttemptTasks(name, pageSize=None, pageToken=None, parent=None, sortRuntime=None, stageAttemptId=None, stageId=None, taskStatus=None, x__xgafv=None)</code>
<pre>Obtain data corresponding to tasks for a spark stage attempt for a Spark Application.
Args:
name: string, Required. The fully qualified name of the batch to retrieve in the format &quot;projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID/sparkApplications/APPLICATION_ID&quot; (required)
pageSize: integer, Optional. Maximum number of tasks to return in each response. The service may return fewer than this. The default page size is 10; the maximum page size is 100.
pageToken: string, Optional. A page token received from a previous ListSparkApplicationStageAttemptTasks call. Provide this token to retrieve the subsequent page.
parent: string, Required. Parent (Batch) resource reference.
sortRuntime: boolean, Optional. Sort the tasks by runtime.
stageAttemptId: integer, Optional. Stage Attempt ID
stageId: string, Optional. Stage ID
taskStatus: string, Optional. List only tasks in the state.
Allowed values
TASK_STATUS_UNSPECIFIED -
TASK_STATUS_RUNNING -
TASK_STATUS_SUCCESS -
TASK_STATUS_FAILED -
TASK_STATUS_KILLED -
TASK_STATUS_PENDING -
x__xgafv: string, V1 error format.
Allowed values
1 - v1 error format
2 - v2 error format
Returns:
An object of the form:
{ # List of tasks for a stage of a Spark Application
&quot;nextPageToken&quot;: &quot;A String&quot;, # This token is included in the response if there are more results to fetch. To fetch additional results, provide this value as the page_token in a subsequent ListSparkApplicationStageAttemptTasksRequest.
&quot;sparkApplicationStageAttemptTasks&quot;: [ # Output only. Data corresponding to tasks created by spark.
{ # Data corresponding to tasks created by spark.
&quot;accumulatorUpdates&quot;: [
{
&quot;accumullableInfoId&quot;: &quot;A String&quot;,
&quot;name&quot;: &quot;A String&quot;,
&quot;update&quot;: &quot;A String&quot;,
&quot;value&quot;: &quot;A String&quot;,
},
],
&quot;attempt&quot;: 42,
&quot;durationMillis&quot;: &quot;A String&quot;,
&quot;errorMessage&quot;: &quot;A String&quot;,
&quot;executorId&quot;: &quot;A String&quot;,
&quot;executorLogs&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;gettingResultTimeMillis&quot;: &quot;A String&quot;,
&quot;hasMetrics&quot;: True or False,
&quot;host&quot;: &quot;A String&quot;,
&quot;index&quot;: 42,
&quot;launchTime&quot;: &quot;A String&quot;,
&quot;partitionId&quot;: 42,
&quot;resultFetchStart&quot;: &quot;A String&quot;,
&quot;schedulerDelayMillis&quot;: &quot;A String&quot;,
&quot;speculative&quot;: True or False,
&quot;stageAttemptId&quot;: 42,
&quot;stageId&quot;: &quot;A String&quot;,
&quot;status&quot;: &quot;A String&quot;,
&quot;taskId&quot;: &quot;A String&quot;,
&quot;taskLocality&quot;: &quot;A String&quot;,
&quot;taskMetrics&quot;: { # Executor Task Metrics
&quot;diskBytesSpilled&quot;: &quot;A String&quot;,
&quot;executorCpuTimeNanos&quot;: &quot;A String&quot;,
&quot;executorDeserializeCpuTimeNanos&quot;: &quot;A String&quot;,
&quot;executorDeserializeTimeMillis&quot;: &quot;A String&quot;,
&quot;executorRunTimeMillis&quot;: &quot;A String&quot;,
&quot;inputMetrics&quot;: { # Metrics about the input data read by the task.
&quot;bytesRead&quot;: &quot;A String&quot;,
&quot;recordsRead&quot;: &quot;A String&quot;,
},
&quot;jvmGcTimeMillis&quot;: &quot;A String&quot;,
&quot;memoryBytesSpilled&quot;: &quot;A String&quot;,
&quot;outputMetrics&quot;: { # Metrics about the data written by the task.
&quot;bytesWritten&quot;: &quot;A String&quot;,
&quot;recordsWritten&quot;: &quot;A String&quot;,
},
&quot;peakExecutionMemoryBytes&quot;: &quot;A String&quot;,
&quot;resultSerializationTimeMillis&quot;: &quot;A String&quot;,
&quot;resultSize&quot;: &quot;A String&quot;,
&quot;shuffleReadMetrics&quot;: { # Shuffle data read by the task.
&quot;fetchWaitTimeMillis&quot;: &quot;A String&quot;,
&quot;localBlocksFetched&quot;: &quot;A String&quot;,
&quot;localBytesRead&quot;: &quot;A String&quot;,
&quot;recordsRead&quot;: &quot;A String&quot;,
&quot;remoteBlocksFetched&quot;: &quot;A String&quot;,
&quot;remoteBytesRead&quot;: &quot;A String&quot;,
&quot;remoteBytesReadToDisk&quot;: &quot;A String&quot;,
&quot;remoteReqsDuration&quot;: &quot;A String&quot;,
&quot;shufflePushReadMetrics&quot;: {
&quot;corruptMergedBlockChunks&quot;: &quot;A String&quot;,
&quot;localMergedBlocksFetched&quot;: &quot;A String&quot;,
&quot;localMergedBytesRead&quot;: &quot;A String&quot;,
&quot;localMergedChunksFetched&quot;: &quot;A String&quot;,
&quot;mergedFetchFallbackCount&quot;: &quot;A String&quot;,
&quot;remoteMergedBlocksFetched&quot;: &quot;A String&quot;,
&quot;remoteMergedBytesRead&quot;: &quot;A String&quot;,
&quot;remoteMergedChunksFetched&quot;: &quot;A String&quot;,
&quot;remoteMergedReqsDuration&quot;: &quot;A String&quot;,
},
},
&quot;shuffleWriteMetrics&quot;: { # Shuffle data written by task.
&quot;bytesWritten&quot;: &quot;A String&quot;,
&quot;recordsWritten&quot;: &quot;A String&quot;,
&quot;writeTimeNanos&quot;: &quot;A String&quot;,
},
},
},
],
}</pre>
</div>
<div class="method">
<code class="details" id="searchStageAttemptTasks_next">searchStageAttemptTasks_next()</code>
<pre>Retrieves the next page of results.
Args:
previous_request: The request for the previous page. (required)
previous_response: The response from the request for the previous page. (required)
Returns:
A request object that you can call &#x27;execute()&#x27; on to request the next
page. Returns None if there are no more items in the collection.
</pre>
</div>
<div class="method">
<code class="details" id="searchStageAttempts">searchStageAttempts(name, pageSize=None, pageToken=None, parent=None, stageId=None, summaryMetricsMask=None, x__xgafv=None)</code>
<pre>Obtain data corresponding to a spark stage attempts for a Spark Application.
Args:
name: string, Required. The fully qualified name of the batch to retrieve in the format &quot;projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID/sparkApplications/APPLICATION_ID&quot; (required)
pageSize: integer, Optional. Maximum number of stage attempts (paging based on stage_attempt_id) to return in each response. The service may return fewer than this. The default page size is 10; the maximum page size is 100.
pageToken: string, Optional. A page token received from a previous SearchSparkApplicationStageAttempts call. Provide this token to retrieve the subsequent page.
parent: string, Required. Parent (Batch) resource reference.
stageId: string, Required. Stage ID for which attempts are to be fetched
summaryMetricsMask: string, Optional. The list of summary metrics fields to include. Empty list will default to skip all summary metrics fields. Example, if the response should include TaskQuantileMetrics, the request should have task_quantile_metrics in summary_metrics_mask field
x__xgafv: string, V1 error format.
Allowed values
1 - v1 error format
2 - v2 error format
Returns:
An object of the form:
{ # A list of Stage Attempts for a Stage of a Spark Application.
&quot;nextPageToken&quot;: &quot;A String&quot;, # This token is included in the response if there are more results to fetch. To fetch additional results, provide this value as the page_token in a subsequent ListSparkApplicationStageAttemptsRequest.
&quot;sparkApplicationStageAttempts&quot;: [ # Output only. Data corresponding to a stage attempts
{ # Data corresponding to a stage.
&quot;accumulatorUpdates&quot;: [
{
&quot;accumullableInfoId&quot;: &quot;A String&quot;,
&quot;name&quot;: &quot;A String&quot;,
&quot;update&quot;: &quot;A String&quot;,
&quot;value&quot;: &quot;A String&quot;,
},
],
&quot;completionTime&quot;: &quot;A String&quot;,
&quot;description&quot;: &quot;A String&quot;,
&quot;details&quot;: &quot;A String&quot;,
&quot;executorMetricsDistributions&quot;: {
&quot;diskBytesSpilled&quot;: [
3.14,
],
&quot;failedTasks&quot;: [
3.14,
],
&quot;inputBytes&quot;: [
3.14,
],
&quot;inputRecords&quot;: [
3.14,
],
&quot;killedTasks&quot;: [
3.14,
],
&quot;memoryBytesSpilled&quot;: [
3.14,
],
&quot;outputBytes&quot;: [
3.14,
],
&quot;outputRecords&quot;: [
3.14,
],
&quot;peakMemoryMetrics&quot;: {
&quot;executorMetrics&quot;: [
{
&quot;metrics&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
},
],
&quot;quantiles&quot;: [
3.14,
],
},
&quot;quantiles&quot;: [
3.14,
],
&quot;shuffleRead&quot;: [
3.14,
],
&quot;shuffleReadRecords&quot;: [
3.14,
],
&quot;shuffleWrite&quot;: [
3.14,
],
&quot;shuffleWriteRecords&quot;: [
3.14,
],
&quot;succeededTasks&quot;: [
3.14,
],
&quot;taskTimeMillis&quot;: [
3.14,
],
},
&quot;executorSummary&quot;: {
&quot;a_key&quot;: { # Executor resources consumed by a stage.
&quot;diskBytesSpilled&quot;: &quot;A String&quot;,
&quot;executorId&quot;: &quot;A String&quot;,
&quot;failedTasks&quot;: 42,
&quot;inputBytes&quot;: &quot;A String&quot;,
&quot;inputRecords&quot;: &quot;A String&quot;,
&quot;isExcludedForStage&quot;: True or False,
&quot;killedTasks&quot;: 42,
&quot;memoryBytesSpilled&quot;: &quot;A String&quot;,
&quot;outputBytes&quot;: &quot;A String&quot;,
&quot;outputRecords&quot;: &quot;A String&quot;,
&quot;peakMemoryMetrics&quot;: {
&quot;metrics&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
},
&quot;shuffleRead&quot;: &quot;A String&quot;,
&quot;shuffleReadRecords&quot;: &quot;A String&quot;,
&quot;shuffleWrite&quot;: &quot;A String&quot;,
&quot;shuffleWriteRecords&quot;: &quot;A String&quot;,
&quot;stageAttemptId&quot;: 42,
&quot;stageId&quot;: &quot;A String&quot;,
&quot;succeededTasks&quot;: 42,
&quot;taskTimeMillis&quot;: &quot;A String&quot;,
},
},
&quot;failureReason&quot;: &quot;A String&quot;,
&quot;firstTaskLaunchedTime&quot;: &quot;A String&quot;,
&quot;isShufflePushEnabled&quot;: True or False,
&quot;jobIds&quot;: [
&quot;A String&quot;,
],
&quot;killedTasksSummary&quot;: {
&quot;a_key&quot;: 42,
},
&quot;locality&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;name&quot;: &quot;A String&quot;,
&quot;numActiveTasks&quot;: 42,
&quot;numCompleteTasks&quot;: 42,
&quot;numCompletedIndices&quot;: 42,
&quot;numFailedTasks&quot;: 42,
&quot;numKilledTasks&quot;: 42,
&quot;numTasks&quot;: 42,
&quot;parentStageIds&quot;: [
&quot;A String&quot;,
],
&quot;peakExecutorMetrics&quot;: {
&quot;metrics&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
},
&quot;rddIds&quot;: [
&quot;A String&quot;,
],
&quot;resourceProfileId&quot;: 42,
&quot;schedulingPool&quot;: &quot;A String&quot;,
&quot;shuffleMergersCount&quot;: 42,
&quot;speculationSummary&quot;: { # Details of the speculation task when speculative execution is enabled.
&quot;numActiveTasks&quot;: 42,
&quot;numCompletedTasks&quot;: 42,
&quot;numFailedTasks&quot;: 42,
&quot;numKilledTasks&quot;: 42,
&quot;numTasks&quot;: 42,
&quot;stageAttemptId&quot;: 42,
&quot;stageId&quot;: &quot;A String&quot;,
},
&quot;stageAttemptId&quot;: 42,
&quot;stageId&quot;: &quot;A String&quot;,
&quot;stageMetrics&quot;: { # Stage Level Aggregated Metrics
&quot;diskBytesSpilled&quot;: &quot;A String&quot;,
&quot;executorCpuTimeNanos&quot;: &quot;A String&quot;,
&quot;executorDeserializeCpuTimeNanos&quot;: &quot;A String&quot;,
&quot;executorDeserializeTimeMillis&quot;: &quot;A String&quot;,
&quot;executorRunTimeMillis&quot;: &quot;A String&quot;,
&quot;jvmGcTimeMillis&quot;: &quot;A String&quot;,
&quot;memoryBytesSpilled&quot;: &quot;A String&quot;,
&quot;peakExecutionMemoryBytes&quot;: &quot;A String&quot;,
&quot;resultSerializationTimeMillis&quot;: &quot;A String&quot;,
&quot;resultSize&quot;: &quot;A String&quot;,
&quot;stageInputMetrics&quot;: { # Metrics about the input read by the stage.
&quot;bytesRead&quot;: &quot;A String&quot;,
&quot;recordsRead&quot;: &quot;A String&quot;,
},
&quot;stageOutputMetrics&quot;: { # Metrics about the output written by the stage.
&quot;bytesWritten&quot;: &quot;A String&quot;,
&quot;recordsWritten&quot;: &quot;A String&quot;,
},
&quot;stageShuffleReadMetrics&quot;: { # Shuffle data read for the stage.
&quot;bytesRead&quot;: &quot;A String&quot;,
&quot;fetchWaitTimeMillis&quot;: &quot;A String&quot;,
&quot;localBlocksFetched&quot;: &quot;A String&quot;,
&quot;localBytesRead&quot;: &quot;A String&quot;,
&quot;recordsRead&quot;: &quot;A String&quot;,
&quot;remoteBlocksFetched&quot;: &quot;A String&quot;,
&quot;remoteBytesRead&quot;: &quot;A String&quot;,
&quot;remoteBytesReadToDisk&quot;: &quot;A String&quot;,
&quot;remoteReqsDuration&quot;: &quot;A String&quot;,
&quot;stageShufflePushReadMetrics&quot;: {
&quot;corruptMergedBlockChunks&quot;: &quot;A String&quot;,
&quot;localMergedBlocksFetched&quot;: &quot;A String&quot;,
&quot;localMergedBytesRead&quot;: &quot;A String&quot;,
&quot;localMergedChunksFetched&quot;: &quot;A String&quot;,
&quot;mergedFetchFallbackCount&quot;: &quot;A String&quot;,
&quot;remoteMergedBlocksFetched&quot;: &quot;A String&quot;,
&quot;remoteMergedBytesRead&quot;: &quot;A String&quot;,
&quot;remoteMergedChunksFetched&quot;: &quot;A String&quot;,
&quot;remoteMergedReqsDuration&quot;: &quot;A String&quot;,
},
},
&quot;stageShuffleWriteMetrics&quot;: { # Shuffle data written for the stage.
&quot;bytesWritten&quot;: &quot;A String&quot;,
&quot;recordsWritten&quot;: &quot;A String&quot;,
&quot;writeTimeNanos&quot;: &quot;A String&quot;,
},
},
&quot;status&quot;: &quot;A String&quot;,
&quot;submissionTime&quot;: &quot;A String&quot;,
&quot;taskQuantileMetrics&quot;: { # Summary metrics fields. These are included in response only if present in summary_metrics_mask field in request
&quot;diskBytesSpilled&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;durationMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;executorCpuTimeNanos&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;executorDeserializeCpuTimeNanos&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;executorDeserializeTimeMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;executorRunTimeMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;gettingResultTimeMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;inputMetrics&quot;: {
&quot;bytesRead&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;recordsRead&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
},
&quot;jvmGcTimeMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;memoryBytesSpilled&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;outputMetrics&quot;: {
&quot;bytesWritten&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;recordsWritten&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
},
&quot;peakExecutionMemoryBytes&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;resultSerializationTimeMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;resultSize&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;schedulerDelayMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;shuffleReadMetrics&quot;: {
&quot;fetchWaitTimeMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;localBlocksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;readBytes&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;readRecords&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteBlocksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteBytesRead&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteBytesReadToDisk&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteReqsDuration&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;shufflePushReadMetrics&quot;: {
&quot;corruptMergedBlockChunks&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;localMergedBlocksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;localMergedBytesRead&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;localMergedChunksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;mergedFetchFallbackCount&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteMergedBlocksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteMergedBytesRead&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteMergedChunksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteMergedReqsDuration&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
},
&quot;totalBlocksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
},
&quot;shuffleWriteMetrics&quot;: {
&quot;writeBytes&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;writeRecords&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;writeTimeNanos&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
},
},
&quot;tasks&quot;: {
&quot;a_key&quot;: { # Data corresponding to tasks created by spark.
&quot;accumulatorUpdates&quot;: [
{
&quot;accumullableInfoId&quot;: &quot;A String&quot;,
&quot;name&quot;: &quot;A String&quot;,
&quot;update&quot;: &quot;A String&quot;,
&quot;value&quot;: &quot;A String&quot;,
},
],
&quot;attempt&quot;: 42,
&quot;durationMillis&quot;: &quot;A String&quot;,
&quot;errorMessage&quot;: &quot;A String&quot;,
&quot;executorId&quot;: &quot;A String&quot;,
&quot;executorLogs&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;gettingResultTimeMillis&quot;: &quot;A String&quot;,
&quot;hasMetrics&quot;: True or False,
&quot;host&quot;: &quot;A String&quot;,
&quot;index&quot;: 42,
&quot;launchTime&quot;: &quot;A String&quot;,
&quot;partitionId&quot;: 42,
&quot;resultFetchStart&quot;: &quot;A String&quot;,
&quot;schedulerDelayMillis&quot;: &quot;A String&quot;,
&quot;speculative&quot;: True or False,
&quot;stageAttemptId&quot;: 42,
&quot;stageId&quot;: &quot;A String&quot;,
&quot;status&quot;: &quot;A String&quot;,
&quot;taskId&quot;: &quot;A String&quot;,
&quot;taskLocality&quot;: &quot;A String&quot;,
&quot;taskMetrics&quot;: { # Executor Task Metrics
&quot;diskBytesSpilled&quot;: &quot;A String&quot;,
&quot;executorCpuTimeNanos&quot;: &quot;A String&quot;,
&quot;executorDeserializeCpuTimeNanos&quot;: &quot;A String&quot;,
&quot;executorDeserializeTimeMillis&quot;: &quot;A String&quot;,
&quot;executorRunTimeMillis&quot;: &quot;A String&quot;,
&quot;inputMetrics&quot;: { # Metrics about the input data read by the task.
&quot;bytesRead&quot;: &quot;A String&quot;,
&quot;recordsRead&quot;: &quot;A String&quot;,
},
&quot;jvmGcTimeMillis&quot;: &quot;A String&quot;,
&quot;memoryBytesSpilled&quot;: &quot;A String&quot;,
&quot;outputMetrics&quot;: { # Metrics about the data written by the task.
&quot;bytesWritten&quot;: &quot;A String&quot;,
&quot;recordsWritten&quot;: &quot;A String&quot;,
},
&quot;peakExecutionMemoryBytes&quot;: &quot;A String&quot;,
&quot;resultSerializationTimeMillis&quot;: &quot;A String&quot;,
&quot;resultSize&quot;: &quot;A String&quot;,
&quot;shuffleReadMetrics&quot;: { # Shuffle data read by the task.
&quot;fetchWaitTimeMillis&quot;: &quot;A String&quot;,
&quot;localBlocksFetched&quot;: &quot;A String&quot;,
&quot;localBytesRead&quot;: &quot;A String&quot;,
&quot;recordsRead&quot;: &quot;A String&quot;,
&quot;remoteBlocksFetched&quot;: &quot;A String&quot;,
&quot;remoteBytesRead&quot;: &quot;A String&quot;,
&quot;remoteBytesReadToDisk&quot;: &quot;A String&quot;,
&quot;remoteReqsDuration&quot;: &quot;A String&quot;,
&quot;shufflePushReadMetrics&quot;: {
&quot;corruptMergedBlockChunks&quot;: &quot;A String&quot;,
&quot;localMergedBlocksFetched&quot;: &quot;A String&quot;,
&quot;localMergedBytesRead&quot;: &quot;A String&quot;,
&quot;localMergedChunksFetched&quot;: &quot;A String&quot;,
&quot;mergedFetchFallbackCount&quot;: &quot;A String&quot;,
&quot;remoteMergedBlocksFetched&quot;: &quot;A String&quot;,
&quot;remoteMergedBytesRead&quot;: &quot;A String&quot;,
&quot;remoteMergedChunksFetched&quot;: &quot;A String&quot;,
&quot;remoteMergedReqsDuration&quot;: &quot;A String&quot;,
},
},
&quot;shuffleWriteMetrics&quot;: { # Shuffle data written by task.
&quot;bytesWritten&quot;: &quot;A String&quot;,
&quot;recordsWritten&quot;: &quot;A String&quot;,
&quot;writeTimeNanos&quot;: &quot;A String&quot;,
},
},
},
},
},
],
}</pre>
</div>
<div class="method">
<code class="details" id="searchStageAttempts_next">searchStageAttempts_next()</code>
<pre>Retrieves the next page of results.
Args:
previous_request: The request for the previous page. (required)
previous_response: The response from the request for the previous page. (required)
Returns:
A request object that you can call &#x27;execute()&#x27; on to request the next
page. Returns None if there are no more items in the collection.
</pre>
</div>
<div class="method">
<code class="details" id="searchStages">searchStages(name, pageSize=None, pageToken=None, parent=None, stageStatus=None, summaryMetricsMask=None, x__xgafv=None)</code>
<pre>Obtain data corresponding to stages for a Spark Application.
Args:
name: string, Required. The fully qualified name of the batch to retrieve in the format &quot;projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID/sparkApplications/APPLICATION_ID&quot; (required)
pageSize: integer, Optional. Maximum number of stages (paging based on stage_id) to return in each response. The service may return fewer than this. The default page size is 10; the maximum page size is 100.
pageToken: string, Optional. A page token received from a previous FetchSparkApplicationStagesList call. Provide this token to retrieve the subsequent page.
parent: string, Required. Parent (Batch) resource reference.
stageStatus: string, Optional. List only stages in the given state.
Allowed values
STAGE_STATUS_UNSPECIFIED -
STAGE_STATUS_ACTIVE -
STAGE_STATUS_COMPLETE -
STAGE_STATUS_FAILED -
STAGE_STATUS_PENDING -
STAGE_STATUS_SKIPPED -
summaryMetricsMask: string, Optional. The list of summary metrics fields to include. Empty list will default to skip all summary metrics fields. Example, if the response should include TaskQuantileMetrics, the request should have task_quantile_metrics in summary_metrics_mask field
x__xgafv: string, V1 error format.
Allowed values
1 - v1 error format
2 - v2 error format
Returns:
An object of the form:
{ # A list of stages associated with a Spark Application.
&quot;nextPageToken&quot;: &quot;A String&quot;, # This token is included in the response if there are more results to fetch. To fetch additional results, provide this value as the page_token in a subsequent SearchSparkApplicationStages.
&quot;sparkApplicationStages&quot;: [ # Output only. Data corresponding to a stage.
{ # Data corresponding to a stage.
&quot;accumulatorUpdates&quot;: [
{
&quot;accumullableInfoId&quot;: &quot;A String&quot;,
&quot;name&quot;: &quot;A String&quot;,
&quot;update&quot;: &quot;A String&quot;,
&quot;value&quot;: &quot;A String&quot;,
},
],
&quot;completionTime&quot;: &quot;A String&quot;,
&quot;description&quot;: &quot;A String&quot;,
&quot;details&quot;: &quot;A String&quot;,
&quot;executorMetricsDistributions&quot;: {
&quot;diskBytesSpilled&quot;: [
3.14,
],
&quot;failedTasks&quot;: [
3.14,
],
&quot;inputBytes&quot;: [
3.14,
],
&quot;inputRecords&quot;: [
3.14,
],
&quot;killedTasks&quot;: [
3.14,
],
&quot;memoryBytesSpilled&quot;: [
3.14,
],
&quot;outputBytes&quot;: [
3.14,
],
&quot;outputRecords&quot;: [
3.14,
],
&quot;peakMemoryMetrics&quot;: {
&quot;executorMetrics&quot;: [
{
&quot;metrics&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
},
],
&quot;quantiles&quot;: [
3.14,
],
},
&quot;quantiles&quot;: [
3.14,
],
&quot;shuffleRead&quot;: [
3.14,
],
&quot;shuffleReadRecords&quot;: [
3.14,
],
&quot;shuffleWrite&quot;: [
3.14,
],
&quot;shuffleWriteRecords&quot;: [
3.14,
],
&quot;succeededTasks&quot;: [
3.14,
],
&quot;taskTimeMillis&quot;: [
3.14,
],
},
&quot;executorSummary&quot;: {
&quot;a_key&quot;: { # Executor resources consumed by a stage.
&quot;diskBytesSpilled&quot;: &quot;A String&quot;,
&quot;executorId&quot;: &quot;A String&quot;,
&quot;failedTasks&quot;: 42,
&quot;inputBytes&quot;: &quot;A String&quot;,
&quot;inputRecords&quot;: &quot;A String&quot;,
&quot;isExcludedForStage&quot;: True or False,
&quot;killedTasks&quot;: 42,
&quot;memoryBytesSpilled&quot;: &quot;A String&quot;,
&quot;outputBytes&quot;: &quot;A String&quot;,
&quot;outputRecords&quot;: &quot;A String&quot;,
&quot;peakMemoryMetrics&quot;: {
&quot;metrics&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
},
&quot;shuffleRead&quot;: &quot;A String&quot;,
&quot;shuffleReadRecords&quot;: &quot;A String&quot;,
&quot;shuffleWrite&quot;: &quot;A String&quot;,
&quot;shuffleWriteRecords&quot;: &quot;A String&quot;,
&quot;stageAttemptId&quot;: 42,
&quot;stageId&quot;: &quot;A String&quot;,
&quot;succeededTasks&quot;: 42,
&quot;taskTimeMillis&quot;: &quot;A String&quot;,
},
},
&quot;failureReason&quot;: &quot;A String&quot;,
&quot;firstTaskLaunchedTime&quot;: &quot;A String&quot;,
&quot;isShufflePushEnabled&quot;: True or False,
&quot;jobIds&quot;: [
&quot;A String&quot;,
],
&quot;killedTasksSummary&quot;: {
&quot;a_key&quot;: 42,
},
&quot;locality&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;name&quot;: &quot;A String&quot;,
&quot;numActiveTasks&quot;: 42,
&quot;numCompleteTasks&quot;: 42,
&quot;numCompletedIndices&quot;: 42,
&quot;numFailedTasks&quot;: 42,
&quot;numKilledTasks&quot;: 42,
&quot;numTasks&quot;: 42,
&quot;parentStageIds&quot;: [
&quot;A String&quot;,
],
&quot;peakExecutorMetrics&quot;: {
&quot;metrics&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
},
&quot;rddIds&quot;: [
&quot;A String&quot;,
],
&quot;resourceProfileId&quot;: 42,
&quot;schedulingPool&quot;: &quot;A String&quot;,
&quot;shuffleMergersCount&quot;: 42,
&quot;speculationSummary&quot;: { # Details of the speculation task when speculative execution is enabled.
&quot;numActiveTasks&quot;: 42,
&quot;numCompletedTasks&quot;: 42,
&quot;numFailedTasks&quot;: 42,
&quot;numKilledTasks&quot;: 42,
&quot;numTasks&quot;: 42,
&quot;stageAttemptId&quot;: 42,
&quot;stageId&quot;: &quot;A String&quot;,
},
&quot;stageAttemptId&quot;: 42,
&quot;stageId&quot;: &quot;A String&quot;,
&quot;stageMetrics&quot;: { # Stage Level Aggregated Metrics
&quot;diskBytesSpilled&quot;: &quot;A String&quot;,
&quot;executorCpuTimeNanos&quot;: &quot;A String&quot;,
&quot;executorDeserializeCpuTimeNanos&quot;: &quot;A String&quot;,
&quot;executorDeserializeTimeMillis&quot;: &quot;A String&quot;,
&quot;executorRunTimeMillis&quot;: &quot;A String&quot;,
&quot;jvmGcTimeMillis&quot;: &quot;A String&quot;,
&quot;memoryBytesSpilled&quot;: &quot;A String&quot;,
&quot;peakExecutionMemoryBytes&quot;: &quot;A String&quot;,
&quot;resultSerializationTimeMillis&quot;: &quot;A String&quot;,
&quot;resultSize&quot;: &quot;A String&quot;,
&quot;stageInputMetrics&quot;: { # Metrics about the input read by the stage.
&quot;bytesRead&quot;: &quot;A String&quot;,
&quot;recordsRead&quot;: &quot;A String&quot;,
},
&quot;stageOutputMetrics&quot;: { # Metrics about the output written by the stage.
&quot;bytesWritten&quot;: &quot;A String&quot;,
&quot;recordsWritten&quot;: &quot;A String&quot;,
},
&quot;stageShuffleReadMetrics&quot;: { # Shuffle data read for the stage.
&quot;bytesRead&quot;: &quot;A String&quot;,
&quot;fetchWaitTimeMillis&quot;: &quot;A String&quot;,
&quot;localBlocksFetched&quot;: &quot;A String&quot;,
&quot;localBytesRead&quot;: &quot;A String&quot;,
&quot;recordsRead&quot;: &quot;A String&quot;,
&quot;remoteBlocksFetched&quot;: &quot;A String&quot;,
&quot;remoteBytesRead&quot;: &quot;A String&quot;,
&quot;remoteBytesReadToDisk&quot;: &quot;A String&quot;,
&quot;remoteReqsDuration&quot;: &quot;A String&quot;,
&quot;stageShufflePushReadMetrics&quot;: {
&quot;corruptMergedBlockChunks&quot;: &quot;A String&quot;,
&quot;localMergedBlocksFetched&quot;: &quot;A String&quot;,
&quot;localMergedBytesRead&quot;: &quot;A String&quot;,
&quot;localMergedChunksFetched&quot;: &quot;A String&quot;,
&quot;mergedFetchFallbackCount&quot;: &quot;A String&quot;,
&quot;remoteMergedBlocksFetched&quot;: &quot;A String&quot;,
&quot;remoteMergedBytesRead&quot;: &quot;A String&quot;,
&quot;remoteMergedChunksFetched&quot;: &quot;A String&quot;,
&quot;remoteMergedReqsDuration&quot;: &quot;A String&quot;,
},
},
&quot;stageShuffleWriteMetrics&quot;: { # Shuffle data written for the stage.
&quot;bytesWritten&quot;: &quot;A String&quot;,
&quot;recordsWritten&quot;: &quot;A String&quot;,
&quot;writeTimeNanos&quot;: &quot;A String&quot;,
},
},
&quot;status&quot;: &quot;A String&quot;,
&quot;submissionTime&quot;: &quot;A String&quot;,
&quot;taskQuantileMetrics&quot;: { # Summary metrics fields. These are included in response only if present in summary_metrics_mask field in request
&quot;diskBytesSpilled&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;durationMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;executorCpuTimeNanos&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;executorDeserializeCpuTimeNanos&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;executorDeserializeTimeMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;executorRunTimeMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;gettingResultTimeMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;inputMetrics&quot;: {
&quot;bytesRead&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;recordsRead&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
},
&quot;jvmGcTimeMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;memoryBytesSpilled&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;outputMetrics&quot;: {
&quot;bytesWritten&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;recordsWritten&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
},
&quot;peakExecutionMemoryBytes&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;resultSerializationTimeMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;resultSize&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;schedulerDelayMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;shuffleReadMetrics&quot;: {
&quot;fetchWaitTimeMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;localBlocksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;readBytes&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;readRecords&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteBlocksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteBytesRead&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteBytesReadToDisk&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteReqsDuration&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;shufflePushReadMetrics&quot;: {
&quot;corruptMergedBlockChunks&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;localMergedBlocksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;localMergedBytesRead&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;localMergedChunksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;mergedFetchFallbackCount&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteMergedBlocksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteMergedBytesRead&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteMergedChunksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteMergedReqsDuration&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
},
&quot;totalBlocksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
},
&quot;shuffleWriteMetrics&quot;: {
&quot;writeBytes&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;writeRecords&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;writeTimeNanos&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
},
},
&quot;tasks&quot;: {
&quot;a_key&quot;: { # Data corresponding to tasks created by spark.
&quot;accumulatorUpdates&quot;: [
{
&quot;accumullableInfoId&quot;: &quot;A String&quot;,
&quot;name&quot;: &quot;A String&quot;,
&quot;update&quot;: &quot;A String&quot;,
&quot;value&quot;: &quot;A String&quot;,
},
],
&quot;attempt&quot;: 42,
&quot;durationMillis&quot;: &quot;A String&quot;,
&quot;errorMessage&quot;: &quot;A String&quot;,
&quot;executorId&quot;: &quot;A String&quot;,
&quot;executorLogs&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;gettingResultTimeMillis&quot;: &quot;A String&quot;,
&quot;hasMetrics&quot;: True or False,
&quot;host&quot;: &quot;A String&quot;,
&quot;index&quot;: 42,
&quot;launchTime&quot;: &quot;A String&quot;,
&quot;partitionId&quot;: 42,
&quot;resultFetchStart&quot;: &quot;A String&quot;,
&quot;schedulerDelayMillis&quot;: &quot;A String&quot;,
&quot;speculative&quot;: True or False,
&quot;stageAttemptId&quot;: 42,
&quot;stageId&quot;: &quot;A String&quot;,
&quot;status&quot;: &quot;A String&quot;,
&quot;taskId&quot;: &quot;A String&quot;,
&quot;taskLocality&quot;: &quot;A String&quot;,
&quot;taskMetrics&quot;: { # Executor Task Metrics
&quot;diskBytesSpilled&quot;: &quot;A String&quot;,
&quot;executorCpuTimeNanos&quot;: &quot;A String&quot;,
&quot;executorDeserializeCpuTimeNanos&quot;: &quot;A String&quot;,
&quot;executorDeserializeTimeMillis&quot;: &quot;A String&quot;,
&quot;executorRunTimeMillis&quot;: &quot;A String&quot;,
&quot;inputMetrics&quot;: { # Metrics about the input data read by the task.
&quot;bytesRead&quot;: &quot;A String&quot;,
&quot;recordsRead&quot;: &quot;A String&quot;,
},
&quot;jvmGcTimeMillis&quot;: &quot;A String&quot;,
&quot;memoryBytesSpilled&quot;: &quot;A String&quot;,
&quot;outputMetrics&quot;: { # Metrics about the data written by the task.
&quot;bytesWritten&quot;: &quot;A String&quot;,
&quot;recordsWritten&quot;: &quot;A String&quot;,
},
&quot;peakExecutionMemoryBytes&quot;: &quot;A String&quot;,
&quot;resultSerializationTimeMillis&quot;: &quot;A String&quot;,
&quot;resultSize&quot;: &quot;A String&quot;,
&quot;shuffleReadMetrics&quot;: { # Shuffle data read by the task.
&quot;fetchWaitTimeMillis&quot;: &quot;A String&quot;,
&quot;localBlocksFetched&quot;: &quot;A String&quot;,
&quot;localBytesRead&quot;: &quot;A String&quot;,
&quot;recordsRead&quot;: &quot;A String&quot;,
&quot;remoteBlocksFetched&quot;: &quot;A String&quot;,
&quot;remoteBytesRead&quot;: &quot;A String&quot;,
&quot;remoteBytesReadToDisk&quot;: &quot;A String&quot;,
&quot;remoteReqsDuration&quot;: &quot;A String&quot;,
&quot;shufflePushReadMetrics&quot;: {
&quot;corruptMergedBlockChunks&quot;: &quot;A String&quot;,
&quot;localMergedBlocksFetched&quot;: &quot;A String&quot;,
&quot;localMergedBytesRead&quot;: &quot;A String&quot;,
&quot;localMergedChunksFetched&quot;: &quot;A String&quot;,
&quot;mergedFetchFallbackCount&quot;: &quot;A String&quot;,
&quot;remoteMergedBlocksFetched&quot;: &quot;A String&quot;,
&quot;remoteMergedBytesRead&quot;: &quot;A String&quot;,
&quot;remoteMergedChunksFetched&quot;: &quot;A String&quot;,
&quot;remoteMergedReqsDuration&quot;: &quot;A String&quot;,
},
},
&quot;shuffleWriteMetrics&quot;: { # Shuffle data written by task.
&quot;bytesWritten&quot;: &quot;A String&quot;,
&quot;recordsWritten&quot;: &quot;A String&quot;,
&quot;writeTimeNanos&quot;: &quot;A String&quot;,
},
},
},
},
},
],
}</pre>
</div>
<div class="method">
<code class="details" id="searchStages_next">searchStages_next()</code>
<pre>Retrieves the next page of results.
Args:
previous_request: The request for the previous page. (required)
previous_response: The response from the request for the previous page. (required)
Returns:
A request object that you can call &#x27;execute()&#x27; on to request the next
page. Returns None if there are no more items in the collection.
</pre>
</div>
<div class="method">
<code class="details" id="search_next">search_next()</code>
<pre>Retrieves the next page of results.
Args:
previous_request: The request for the previous page. (required)
previous_response: The response from the request for the previous page. (required)
Returns:
A request object that you can call &#x27;execute()&#x27; on to request the next
page. Returns None if there are no more items in the collection.
</pre>
</div>
<div class="method">
<code class="details" id="summarizeExecutors">summarizeExecutors(name, parent=None, x__xgafv=None)</code>
<pre>Obtain summary of Executor Summary for a Spark Application
Args:
name: string, Required. The fully qualified name of the batch to retrieve in the format &quot;projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID/sparkApplications/APPLICATION_ID&quot; (required)
parent: string, Required. Parent (Batch) resource reference.
x__xgafv: string, V1 error format.
Allowed values
1 - v1 error format
2 - v2 error format
Returns:
An object of the form:
{ # Consolidated summary of executors for a Spark Application.
&quot;activeExecutorSummary&quot;: { # Consolidated summary about executors used by the application. # Consolidated summary for active executors.
&quot;activeTasks&quot;: 42,
&quot;completedTasks&quot;: 42,
&quot;count&quot;: 42,
&quot;diskUsed&quot;: &quot;A String&quot;,
&quot;failedTasks&quot;: 42,
&quot;isExcluded&quot;: 42,
&quot;maxMemory&quot;: &quot;A String&quot;,
&quot;memoryMetrics&quot;: {
&quot;totalOffHeapStorageMemory&quot;: &quot;A String&quot;,
&quot;totalOnHeapStorageMemory&quot;: &quot;A String&quot;,
&quot;usedOffHeapStorageMemory&quot;: &quot;A String&quot;,
&quot;usedOnHeapStorageMemory&quot;: &quot;A String&quot;,
},
&quot;memoryUsed&quot;: &quot;A String&quot;,
&quot;rddBlocks&quot;: 42,
&quot;totalCores&quot;: 42,
&quot;totalDurationMillis&quot;: &quot;A String&quot;,
&quot;totalGcTimeMillis&quot;: &quot;A String&quot;,
&quot;totalInputBytes&quot;: &quot;A String&quot;,
&quot;totalShuffleRead&quot;: &quot;A String&quot;,
&quot;totalShuffleWrite&quot;: &quot;A String&quot;,
&quot;totalTasks&quot;: 42,
},
&quot;applicationId&quot;: &quot;A String&quot;, # Spark Application Id
&quot;deadExecutorSummary&quot;: { # Consolidated summary about executors used by the application. # Consolidated summary for dead executors.
&quot;activeTasks&quot;: 42,
&quot;completedTasks&quot;: 42,
&quot;count&quot;: 42,
&quot;diskUsed&quot;: &quot;A String&quot;,
&quot;failedTasks&quot;: 42,
&quot;isExcluded&quot;: 42,
&quot;maxMemory&quot;: &quot;A String&quot;,
&quot;memoryMetrics&quot;: {
&quot;totalOffHeapStorageMemory&quot;: &quot;A String&quot;,
&quot;totalOnHeapStorageMemory&quot;: &quot;A String&quot;,
&quot;usedOffHeapStorageMemory&quot;: &quot;A String&quot;,
&quot;usedOnHeapStorageMemory&quot;: &quot;A String&quot;,
},
&quot;memoryUsed&quot;: &quot;A String&quot;,
&quot;rddBlocks&quot;: 42,
&quot;totalCores&quot;: 42,
&quot;totalDurationMillis&quot;: &quot;A String&quot;,
&quot;totalGcTimeMillis&quot;: &quot;A String&quot;,
&quot;totalInputBytes&quot;: &quot;A String&quot;,
&quot;totalShuffleRead&quot;: &quot;A String&quot;,
&quot;totalShuffleWrite&quot;: &quot;A String&quot;,
&quot;totalTasks&quot;: 42,
},
&quot;totalExecutorSummary&quot;: { # Consolidated summary about executors used by the application. # Overall consolidated summary for all executors.
&quot;activeTasks&quot;: 42,
&quot;completedTasks&quot;: 42,
&quot;count&quot;: 42,
&quot;diskUsed&quot;: &quot;A String&quot;,
&quot;failedTasks&quot;: 42,
&quot;isExcluded&quot;: 42,
&quot;maxMemory&quot;: &quot;A String&quot;,
&quot;memoryMetrics&quot;: {
&quot;totalOffHeapStorageMemory&quot;: &quot;A String&quot;,
&quot;totalOnHeapStorageMemory&quot;: &quot;A String&quot;,
&quot;usedOffHeapStorageMemory&quot;: &quot;A String&quot;,
&quot;usedOnHeapStorageMemory&quot;: &quot;A String&quot;,
},
&quot;memoryUsed&quot;: &quot;A String&quot;,
&quot;rddBlocks&quot;: 42,
&quot;totalCores&quot;: 42,
&quot;totalDurationMillis&quot;: &quot;A String&quot;,
&quot;totalGcTimeMillis&quot;: &quot;A String&quot;,
&quot;totalInputBytes&quot;: &quot;A String&quot;,
&quot;totalShuffleRead&quot;: &quot;A String&quot;,
&quot;totalShuffleWrite&quot;: &quot;A String&quot;,
&quot;totalTasks&quot;: 42,
},
}</pre>
</div>
<div class="method">
<code class="details" id="summarizeJobs">summarizeJobs(name, parent=None, x__xgafv=None)</code>
<pre>Obtain summary of Jobs for a Spark Application
Args:
name: string, Required. The fully qualified name of the batch to retrieve in the format &quot;projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID/sparkApplications/APPLICATION_ID&quot; (required)
parent: string, Required. Parent (Batch) resource reference.
x__xgafv: string, V1 error format.
Allowed values
1 - v1 error format
2 - v2 error format
Returns:
An object of the form:
{ # Summary of a Spark Application jobs.
&quot;jobsSummary&quot;: { # Data related to Jobs page summary # Summary of a Spark Application Jobs
&quot;activeJobs&quot;: 42, # Number of active jobs
&quot;applicationId&quot;: &quot;A String&quot;, # Spark Application Id
&quot;attempts&quot;: [ # Attempts info
{ # Specific attempt of an application.
&quot;appSparkVersion&quot;: &quot;A String&quot;,
&quot;attemptId&quot;: &quot;A String&quot;,
&quot;completed&quot;: True or False,
&quot;durationMillis&quot;: &quot;A String&quot;,
&quot;endTime&quot;: &quot;A String&quot;,
&quot;lastUpdated&quot;: &quot;A String&quot;,
&quot;sparkUser&quot;: &quot;A String&quot;,
&quot;startTime&quot;: &quot;A String&quot;,
},
],
&quot;completedJobs&quot;: 42, # Number of completed jobs
&quot;failedJobs&quot;: 42, # Number of failed jobs
&quot;schedulingMode&quot;: &quot;A String&quot;, # Spark Scheduling mode
},
}</pre>
</div>
<div class="method">
<code class="details" id="summarizeStageAttemptTasks">summarizeStageAttemptTasks(name, parent=None, stageAttemptId=None, stageId=None, x__xgafv=None)</code>
<pre>Obtain summary of Tasks for a Spark Application Stage Attempt
Args:
name: string, Required. The fully qualified name of the batch to retrieve in the format &quot;projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID/sparkApplications/APPLICATION_ID&quot; (required)
parent: string, Required. Parent (Batch) resource reference.
stageAttemptId: integer, Required. Stage Attempt ID
stageId: string, Required. Stage ID
x__xgafv: string, V1 error format.
Allowed values
1 - v1 error format
2 - v2 error format
Returns:
An object of the form:
{ # Summary of tasks for a Spark Application stage attempt.
&quot;stageAttemptTasksSummary&quot;: { # Data related to tasks summary for a Spark Stage Attempt # Summary of tasks for a Spark Application Stage Attempt
&quot;applicationId&quot;: &quot;A String&quot;,
&quot;numFailedTasks&quot;: 42,
&quot;numKilledTasks&quot;: 42,
&quot;numPendingTasks&quot;: 42,
&quot;numRunningTasks&quot;: 42,
&quot;numSuccessTasks&quot;: 42,
&quot;numTasks&quot;: 42,
&quot;stageAttemptId&quot;: 42,
&quot;stageId&quot;: &quot;A String&quot;,
},
}</pre>
</div>
<div class="method">
<code class="details" id="summarizeStages">summarizeStages(name, parent=None, x__xgafv=None)</code>
<pre>Obtain summary of Stages for a Spark Application
Args:
name: string, Required. The fully qualified name of the batch to retrieve in the format &quot;projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID/sparkApplications/APPLICATION_ID&quot; (required)
parent: string, Required. Parent (Batch) resource reference.
x__xgafv: string, V1 error format.
Allowed values
1 - v1 error format
2 - v2 error format
Returns:
An object of the form:
{ # Summary of a Spark Application stages.
&quot;stagesSummary&quot;: { # Data related to Stages page summary # Summary of a Spark Application Stages
&quot;applicationId&quot;: &quot;A String&quot;,
&quot;numActiveStages&quot;: 42,
&quot;numCompletedStages&quot;: 42,
&quot;numFailedStages&quot;: 42,
&quot;numPendingStages&quot;: 42,
&quot;numSkippedStages&quot;: 42,
},
}</pre>
</div>
<div class="method">
<code class="details" id="write">write(name, body=None, x__xgafv=None)</code>
<pre>Write wrapper objects from dataplane to spanner
Args:
name: string, Required. The fully qualified name of the spark application to write data about in the format &quot;projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID/sparkApplications/APPLICATION_ID&quot; (required)
body: object, The request body.
The object takes the form of:
{ # Write Spark Application data to internal storage systems
&quot;parent&quot;: &quot;A String&quot;, # Required. Parent (Batch) resource reference.
&quot;sparkWrapperObjects&quot;: [
{ # Outer message that contains the data obtained from spark listener, packaged with information that is required to process it.
&quot;appSummary&quot;: {
&quot;numCompletedJobs&quot;: 42,
&quot;numCompletedStages&quot;: 42,
},
&quot;applicationEnvironmentInfo&quot;: { # Details about the Environment that the application is running in.
&quot;classpathEntries&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;hadoopProperties&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;metricsProperties&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;resourceProfiles&quot;: [
{ # Resource profile that contains information about all the resources required by executors and tasks.
&quot;executorResources&quot;: {
&quot;a_key&quot;: { # Resources used per executor used by the application.
&quot;amount&quot;: &quot;A String&quot;,
&quot;discoveryScript&quot;: &quot;A String&quot;,
&quot;resourceName&quot;: &quot;A String&quot;,
&quot;vendor&quot;: &quot;A String&quot;,
},
},
&quot;resourceProfileId&quot;: 42,
&quot;taskResources&quot;: {
&quot;a_key&quot;: { # Resources used per task created by the application.
&quot;amount&quot;: 3.14,
&quot;resourceName&quot;: &quot;A String&quot;,
},
},
},
],
&quot;runtime&quot;: {
&quot;javaHome&quot;: &quot;A String&quot;,
&quot;javaVersion&quot;: &quot;A String&quot;,
&quot;scalaVersion&quot;: &quot;A String&quot;,
},
&quot;sparkProperties&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;systemProperties&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
},
&quot;applicationId&quot;: &quot;A String&quot;, # Application Id created by Spark.
&quot;applicationInfo&quot;: { # High level information corresponding to an application.
&quot;applicationContextIngestionStatus&quot;: &quot;A String&quot;,
&quot;applicationId&quot;: &quot;A String&quot;,
&quot;attempts&quot;: [
{ # Specific attempt of an application.
&quot;appSparkVersion&quot;: &quot;A String&quot;,
&quot;attemptId&quot;: &quot;A String&quot;,
&quot;completed&quot;: True or False,
&quot;durationMillis&quot;: &quot;A String&quot;,
&quot;endTime&quot;: &quot;A String&quot;,
&quot;lastUpdated&quot;: &quot;A String&quot;,
&quot;sparkUser&quot;: &quot;A String&quot;,
&quot;startTime&quot;: &quot;A String&quot;,
},
],
&quot;coresGranted&quot;: 42,
&quot;coresPerExecutor&quot;: 42,
&quot;maxCores&quot;: 42,
&quot;memoryPerExecutorMb&quot;: 42,
&quot;name&quot;: &quot;A String&quot;,
&quot;quantileDataStatus&quot;: &quot;A String&quot;,
},
&quot;eventTimestamp&quot;: &quot;A String&quot;, # VM Timestamp associated with the data object.
&quot;executorStageSummary&quot;: { # Executor resources consumed by a stage.
&quot;diskBytesSpilled&quot;: &quot;A String&quot;,
&quot;executorId&quot;: &quot;A String&quot;,
&quot;failedTasks&quot;: 42,
&quot;inputBytes&quot;: &quot;A String&quot;,
&quot;inputRecords&quot;: &quot;A String&quot;,
&quot;isExcludedForStage&quot;: True or False,
&quot;killedTasks&quot;: 42,
&quot;memoryBytesSpilled&quot;: &quot;A String&quot;,
&quot;outputBytes&quot;: &quot;A String&quot;,
&quot;outputRecords&quot;: &quot;A String&quot;,
&quot;peakMemoryMetrics&quot;: {
&quot;metrics&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
},
&quot;shuffleRead&quot;: &quot;A String&quot;,
&quot;shuffleReadRecords&quot;: &quot;A String&quot;,
&quot;shuffleWrite&quot;: &quot;A String&quot;,
&quot;shuffleWriteRecords&quot;: &quot;A String&quot;,
&quot;stageAttemptId&quot;: 42,
&quot;stageId&quot;: &quot;A String&quot;,
&quot;succeededTasks&quot;: 42,
&quot;taskTimeMillis&quot;: &quot;A String&quot;,
},
&quot;executorSummary&quot;: { # Details about executors used by the application.
&quot;activeTasks&quot;: 42,
&quot;addTime&quot;: &quot;A String&quot;,
&quot;attributes&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;completedTasks&quot;: 42,
&quot;diskUsed&quot;: &quot;A String&quot;,
&quot;excludedInStages&quot;: [
&quot;A String&quot;,
],
&quot;executorId&quot;: &quot;A String&quot;,
&quot;executorLogs&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;failedTasks&quot;: 42,
&quot;hostPort&quot;: &quot;A String&quot;,
&quot;isActive&quot;: True or False,
&quot;isExcluded&quot;: True or False,
&quot;maxMemory&quot;: &quot;A String&quot;,
&quot;maxTasks&quot;: 42,
&quot;memoryMetrics&quot;: {
&quot;totalOffHeapStorageMemory&quot;: &quot;A String&quot;,
&quot;totalOnHeapStorageMemory&quot;: &quot;A String&quot;,
&quot;usedOffHeapStorageMemory&quot;: &quot;A String&quot;,
&quot;usedOnHeapStorageMemory&quot;: &quot;A String&quot;,
},
&quot;memoryUsed&quot;: &quot;A String&quot;,
&quot;peakMemoryMetrics&quot;: {
&quot;metrics&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
},
&quot;rddBlocks&quot;: 42,
&quot;removeReason&quot;: &quot;A String&quot;,
&quot;removeTime&quot;: &quot;A String&quot;,
&quot;resourceProfileId&quot;: 42,
&quot;resources&quot;: {
&quot;a_key&quot;: {
&quot;addresses&quot;: [
&quot;A String&quot;,
],
&quot;name&quot;: &quot;A String&quot;,
},
},
&quot;totalCores&quot;: 42,
&quot;totalDurationMillis&quot;: &quot;A String&quot;,
&quot;totalGcTimeMillis&quot;: &quot;A String&quot;,
&quot;totalInputBytes&quot;: &quot;A String&quot;,
&quot;totalShuffleRead&quot;: &quot;A String&quot;,
&quot;totalShuffleWrite&quot;: &quot;A String&quot;,
&quot;totalTasks&quot;: 42,
},
&quot;jobData&quot;: { # Data corresponding to a spark job.
&quot;completionTime&quot;: &quot;A String&quot;,
&quot;description&quot;: &quot;A String&quot;,
&quot;jobGroup&quot;: &quot;A String&quot;,
&quot;jobId&quot;: &quot;A String&quot;,
&quot;killTasksSummary&quot;: {
&quot;a_key&quot;: 42,
},
&quot;name&quot;: &quot;A String&quot;,
&quot;numActiveStages&quot;: 42,
&quot;numActiveTasks&quot;: 42,
&quot;numCompletedIndices&quot;: 42,
&quot;numCompletedStages&quot;: 42,
&quot;numCompletedTasks&quot;: 42,
&quot;numFailedStages&quot;: 42,
&quot;numFailedTasks&quot;: 42,
&quot;numKilledTasks&quot;: 42,
&quot;numSkippedStages&quot;: 42,
&quot;numSkippedTasks&quot;: 42,
&quot;numTasks&quot;: 42,
&quot;skippedStages&quot;: [
42,
],
&quot;sqlExecutionId&quot;: &quot;A String&quot;,
&quot;stageIds&quot;: [
&quot;A String&quot;,
],
&quot;status&quot;: &quot;A String&quot;,
&quot;submissionTime&quot;: &quot;A String&quot;,
},
&quot;nativeBuildInfoUiData&quot;: { # Native Build Info
&quot;buildClass&quot;: &quot;A String&quot;, # Optional. Build class of Native.
&quot;buildInfo&quot;: [ # Optional. Build related details.
{ # Native Build Info
&quot;buildKey&quot;: &quot;A String&quot;, # Optional. Build key.
&quot;buildValue&quot;: &quot;A String&quot;, # Optional. Build value.
},
],
},
&quot;nativeSqlExecutionUiData&quot;: { # Native SQL Execution Data # Native SQL Execution Info
&quot;description&quot;: &quot;A String&quot;, # Optional. Description of the execution.
&quot;executionId&quot;: &quot;A String&quot;, # Required. Execution ID of the Native SQL Execution.
&quot;fallbackDescription&quot;: &quot;A String&quot;, # Optional. Description of the fallback.
&quot;fallbackNodeToReason&quot;: [ # Optional. Fallback node to reason.
{ # Native SQL Execution Data
&quot;fallbackNode&quot;: &quot;A String&quot;, # Optional. Fallback node information.
&quot;fallbackReason&quot;: &quot;A String&quot;, # Optional. Fallback to Spark reason.
},
],
&quot;numFallbackNodes&quot;: 42, # Optional. Number of nodes fallen back to Spark.
&quot;numNativeNodes&quot;: 42, # Optional. Number of nodes in Native.
},
&quot;poolData&quot;: { # Pool Data
&quot;name&quot;: &quot;A String&quot;,
&quot;stageIds&quot;: [
&quot;A String&quot;,
],
},
&quot;processSummary&quot;: { # Process Summary
&quot;addTime&quot;: &quot;A String&quot;,
&quot;hostPort&quot;: &quot;A String&quot;,
&quot;isActive&quot;: True or False,
&quot;processId&quot;: &quot;A String&quot;,
&quot;processLogs&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;removeTime&quot;: &quot;A String&quot;,
&quot;totalCores&quot;: 42,
},
&quot;rddOperationGraph&quot;: { # Graph representing RDD dependencies. Consists of edges and a root cluster.
&quot;edges&quot;: [
{ # A directed edge representing dependency between two RDDs.
&quot;fromId&quot;: 42,
&quot;toId&quot;: 42,
},
],
&quot;incomingEdges&quot;: [
{ # A directed edge representing dependency between two RDDs.
&quot;fromId&quot;: 42,
&quot;toId&quot;: 42,
},
],
&quot;outgoingEdges&quot;: [
{ # A directed edge representing dependency between two RDDs.
&quot;fromId&quot;: 42,
&quot;toId&quot;: 42,
},
],
&quot;rootCluster&quot;: { # A grouping of nodes representing higher level constructs (stage, job etc.).
&quot;childClusters&quot;: [
# Object with schema name: RddOperationCluster
],
&quot;childNodes&quot;: [
{ # A node in the RDD operation graph. Corresponds to a single RDD.
&quot;barrier&quot;: True or False,
&quot;cached&quot;: True or False,
&quot;callsite&quot;: &quot;A String&quot;,
&quot;name&quot;: &quot;A String&quot;,
&quot;nodeId&quot;: 42,
&quot;outputDeterministicLevel&quot;: &quot;A String&quot;,
},
],
&quot;name&quot;: &quot;A String&quot;,
&quot;rddClusterId&quot;: &quot;A String&quot;,
},
&quot;stageId&quot;: &quot;A String&quot;,
},
&quot;rddStorageInfo&quot;: { # Overall data about RDD storage.
&quot;dataDistribution&quot;: [
{ # Details about RDD usage.
&quot;address&quot;: &quot;A String&quot;,
&quot;diskUsed&quot;: &quot;A String&quot;,
&quot;memoryRemaining&quot;: &quot;A String&quot;,
&quot;memoryUsed&quot;: &quot;A String&quot;,
&quot;offHeapMemoryRemaining&quot;: &quot;A String&quot;,
&quot;offHeapMemoryUsed&quot;: &quot;A String&quot;,
&quot;onHeapMemoryRemaining&quot;: &quot;A String&quot;,
&quot;onHeapMemoryUsed&quot;: &quot;A String&quot;,
},
],
&quot;diskUsed&quot;: &quot;A String&quot;,
&quot;memoryUsed&quot;: &quot;A String&quot;,
&quot;name&quot;: &quot;A String&quot;,
&quot;numCachedPartitions&quot;: 42,
&quot;numPartitions&quot;: 42,
&quot;partitions&quot;: [
{ # Information about RDD partitions.
&quot;blockName&quot;: &quot;A String&quot;,
&quot;diskUsed&quot;: &quot;A String&quot;,
&quot;executors&quot;: [
&quot;A String&quot;,
],
&quot;memoryUsed&quot;: &quot;A String&quot;,
&quot;storageLevel&quot;: &quot;A String&quot;,
},
],
&quot;rddStorageId&quot;: 42,
&quot;storageLevel&quot;: &quot;A String&quot;,
},
&quot;resourceProfileInfo&quot;: { # Resource profile that contains information about all the resources required by executors and tasks.
&quot;executorResources&quot;: {
&quot;a_key&quot;: { # Resources used per executor used by the application.
&quot;amount&quot;: &quot;A String&quot;,
&quot;discoveryScript&quot;: &quot;A String&quot;,
&quot;resourceName&quot;: &quot;A String&quot;,
&quot;vendor&quot;: &quot;A String&quot;,
},
},
&quot;resourceProfileId&quot;: 42,
&quot;taskResources&quot;: {
&quot;a_key&quot;: { # Resources used per task created by the application.
&quot;amount&quot;: 3.14,
&quot;resourceName&quot;: &quot;A String&quot;,
},
},
},
&quot;sparkPlanGraph&quot;: { # A graph used for storing information of an executionPlan of DataFrame.
&quot;edges&quot;: [
{ # Represents a directed edge in the spark plan tree from child to parent.
&quot;fromId&quot;: &quot;A String&quot;,
&quot;toId&quot;: &quot;A String&quot;,
},
],
&quot;executionId&quot;: &quot;A String&quot;,
&quot;nodes&quot;: [
{ # Wrapper user to represent either a node or a cluster.
&quot;cluster&quot;: { # Represents a tree of spark plan.
&quot;desc&quot;: &quot;A String&quot;,
&quot;metrics&quot;: [
{ # Metrics related to SQL execution.
&quot;accumulatorId&quot;: &quot;A String&quot;,
&quot;metricType&quot;: &quot;A String&quot;,
&quot;name&quot;: &quot;A String&quot;,
},
],
&quot;name&quot;: &quot;A String&quot;,
&quot;nodes&quot;: [
# Object with schema name: SparkPlanGraphNodeWrapper
],
&quot;sparkPlanGraphClusterId&quot;: &quot;A String&quot;,
},
&quot;node&quot;: { # Represents a node in the spark plan tree.
&quot;desc&quot;: &quot;A String&quot;,
&quot;metrics&quot;: [
{ # Metrics related to SQL execution.
&quot;accumulatorId&quot;: &quot;A String&quot;,
&quot;metricType&quot;: &quot;A String&quot;,
&quot;name&quot;: &quot;A String&quot;,
},
],
&quot;name&quot;: &quot;A String&quot;,
&quot;sparkPlanGraphNodeId&quot;: &quot;A String&quot;,
},
},
],
},
&quot;speculationStageSummary&quot;: { # Details of the speculation task when speculative execution is enabled.
&quot;numActiveTasks&quot;: 42,
&quot;numCompletedTasks&quot;: 42,
&quot;numFailedTasks&quot;: 42,
&quot;numKilledTasks&quot;: 42,
&quot;numTasks&quot;: 42,
&quot;stageAttemptId&quot;: 42,
&quot;stageId&quot;: &quot;A String&quot;,
},
&quot;sqlExecutionUiData&quot;: { # SQL Execution Data
&quot;completionTime&quot;: &quot;A String&quot;,
&quot;description&quot;: &quot;A String&quot;,
&quot;details&quot;: &quot;A String&quot;,
&quot;errorMessage&quot;: &quot;A String&quot;,
&quot;executionId&quot;: &quot;A String&quot;,
&quot;jobs&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;metricValues&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;metricValuesIsNull&quot;: True or False,
&quot;metrics&quot;: [
{ # Metrics related to SQL execution.
&quot;accumulatorId&quot;: &quot;A String&quot;,
&quot;metricType&quot;: &quot;A String&quot;,
&quot;name&quot;: &quot;A String&quot;,
},
],
&quot;modifiedConfigs&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;physicalPlanDescription&quot;: &quot;A String&quot;,
&quot;rootExecutionId&quot;: &quot;A String&quot;,
&quot;stages&quot;: [
&quot;A String&quot;,
],
&quot;submissionTime&quot;: &quot;A String&quot;,
},
&quot;stageData&quot;: { # Data corresponding to a stage.
&quot;accumulatorUpdates&quot;: [
{
&quot;accumullableInfoId&quot;: &quot;A String&quot;,
&quot;name&quot;: &quot;A String&quot;,
&quot;update&quot;: &quot;A String&quot;,
&quot;value&quot;: &quot;A String&quot;,
},
],
&quot;completionTime&quot;: &quot;A String&quot;,
&quot;description&quot;: &quot;A String&quot;,
&quot;details&quot;: &quot;A String&quot;,
&quot;executorMetricsDistributions&quot;: {
&quot;diskBytesSpilled&quot;: [
3.14,
],
&quot;failedTasks&quot;: [
3.14,
],
&quot;inputBytes&quot;: [
3.14,
],
&quot;inputRecords&quot;: [
3.14,
],
&quot;killedTasks&quot;: [
3.14,
],
&quot;memoryBytesSpilled&quot;: [
3.14,
],
&quot;outputBytes&quot;: [
3.14,
],
&quot;outputRecords&quot;: [
3.14,
],
&quot;peakMemoryMetrics&quot;: {
&quot;executorMetrics&quot;: [
{
&quot;metrics&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
},
],
&quot;quantiles&quot;: [
3.14,
],
},
&quot;quantiles&quot;: [
3.14,
],
&quot;shuffleRead&quot;: [
3.14,
],
&quot;shuffleReadRecords&quot;: [
3.14,
],
&quot;shuffleWrite&quot;: [
3.14,
],
&quot;shuffleWriteRecords&quot;: [
3.14,
],
&quot;succeededTasks&quot;: [
3.14,
],
&quot;taskTimeMillis&quot;: [
3.14,
],
},
&quot;executorSummary&quot;: {
&quot;a_key&quot;: { # Executor resources consumed by a stage.
&quot;diskBytesSpilled&quot;: &quot;A String&quot;,
&quot;executorId&quot;: &quot;A String&quot;,
&quot;failedTasks&quot;: 42,
&quot;inputBytes&quot;: &quot;A String&quot;,
&quot;inputRecords&quot;: &quot;A String&quot;,
&quot;isExcludedForStage&quot;: True or False,
&quot;killedTasks&quot;: 42,
&quot;memoryBytesSpilled&quot;: &quot;A String&quot;,
&quot;outputBytes&quot;: &quot;A String&quot;,
&quot;outputRecords&quot;: &quot;A String&quot;,
&quot;peakMemoryMetrics&quot;: {
&quot;metrics&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
},
&quot;shuffleRead&quot;: &quot;A String&quot;,
&quot;shuffleReadRecords&quot;: &quot;A String&quot;,
&quot;shuffleWrite&quot;: &quot;A String&quot;,
&quot;shuffleWriteRecords&quot;: &quot;A String&quot;,
&quot;stageAttemptId&quot;: 42,
&quot;stageId&quot;: &quot;A String&quot;,
&quot;succeededTasks&quot;: 42,
&quot;taskTimeMillis&quot;: &quot;A String&quot;,
},
},
&quot;failureReason&quot;: &quot;A String&quot;,
&quot;firstTaskLaunchedTime&quot;: &quot;A String&quot;,
&quot;isShufflePushEnabled&quot;: True or False,
&quot;jobIds&quot;: [
&quot;A String&quot;,
],
&quot;killedTasksSummary&quot;: {
&quot;a_key&quot;: 42,
},
&quot;locality&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;name&quot;: &quot;A String&quot;,
&quot;numActiveTasks&quot;: 42,
&quot;numCompleteTasks&quot;: 42,
&quot;numCompletedIndices&quot;: 42,
&quot;numFailedTasks&quot;: 42,
&quot;numKilledTasks&quot;: 42,
&quot;numTasks&quot;: 42,
&quot;parentStageIds&quot;: [
&quot;A String&quot;,
],
&quot;peakExecutorMetrics&quot;: {
&quot;metrics&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
},
&quot;rddIds&quot;: [
&quot;A String&quot;,
],
&quot;resourceProfileId&quot;: 42,
&quot;schedulingPool&quot;: &quot;A String&quot;,
&quot;shuffleMergersCount&quot;: 42,
&quot;speculationSummary&quot;: { # Details of the speculation task when speculative execution is enabled.
&quot;numActiveTasks&quot;: 42,
&quot;numCompletedTasks&quot;: 42,
&quot;numFailedTasks&quot;: 42,
&quot;numKilledTasks&quot;: 42,
&quot;numTasks&quot;: 42,
&quot;stageAttemptId&quot;: 42,
&quot;stageId&quot;: &quot;A String&quot;,
},
&quot;stageAttemptId&quot;: 42,
&quot;stageId&quot;: &quot;A String&quot;,
&quot;stageMetrics&quot;: { # Stage Level Aggregated Metrics
&quot;diskBytesSpilled&quot;: &quot;A String&quot;,
&quot;executorCpuTimeNanos&quot;: &quot;A String&quot;,
&quot;executorDeserializeCpuTimeNanos&quot;: &quot;A String&quot;,
&quot;executorDeserializeTimeMillis&quot;: &quot;A String&quot;,
&quot;executorRunTimeMillis&quot;: &quot;A String&quot;,
&quot;jvmGcTimeMillis&quot;: &quot;A String&quot;,
&quot;memoryBytesSpilled&quot;: &quot;A String&quot;,
&quot;peakExecutionMemoryBytes&quot;: &quot;A String&quot;,
&quot;resultSerializationTimeMillis&quot;: &quot;A String&quot;,
&quot;resultSize&quot;: &quot;A String&quot;,
&quot;stageInputMetrics&quot;: { # Metrics about the input read by the stage.
&quot;bytesRead&quot;: &quot;A String&quot;,
&quot;recordsRead&quot;: &quot;A String&quot;,
},
&quot;stageOutputMetrics&quot;: { # Metrics about the output written by the stage.
&quot;bytesWritten&quot;: &quot;A String&quot;,
&quot;recordsWritten&quot;: &quot;A String&quot;,
},
&quot;stageShuffleReadMetrics&quot;: { # Shuffle data read for the stage.
&quot;bytesRead&quot;: &quot;A String&quot;,
&quot;fetchWaitTimeMillis&quot;: &quot;A String&quot;,
&quot;localBlocksFetched&quot;: &quot;A String&quot;,
&quot;localBytesRead&quot;: &quot;A String&quot;,
&quot;recordsRead&quot;: &quot;A String&quot;,
&quot;remoteBlocksFetched&quot;: &quot;A String&quot;,
&quot;remoteBytesRead&quot;: &quot;A String&quot;,
&quot;remoteBytesReadToDisk&quot;: &quot;A String&quot;,
&quot;remoteReqsDuration&quot;: &quot;A String&quot;,
&quot;stageShufflePushReadMetrics&quot;: {
&quot;corruptMergedBlockChunks&quot;: &quot;A String&quot;,
&quot;localMergedBlocksFetched&quot;: &quot;A String&quot;,
&quot;localMergedBytesRead&quot;: &quot;A String&quot;,
&quot;localMergedChunksFetched&quot;: &quot;A String&quot;,
&quot;mergedFetchFallbackCount&quot;: &quot;A String&quot;,
&quot;remoteMergedBlocksFetched&quot;: &quot;A String&quot;,
&quot;remoteMergedBytesRead&quot;: &quot;A String&quot;,
&quot;remoteMergedChunksFetched&quot;: &quot;A String&quot;,
&quot;remoteMergedReqsDuration&quot;: &quot;A String&quot;,
},
},
&quot;stageShuffleWriteMetrics&quot;: { # Shuffle data written for the stage.
&quot;bytesWritten&quot;: &quot;A String&quot;,
&quot;recordsWritten&quot;: &quot;A String&quot;,
&quot;writeTimeNanos&quot;: &quot;A String&quot;,
},
},
&quot;status&quot;: &quot;A String&quot;,
&quot;submissionTime&quot;: &quot;A String&quot;,
&quot;taskQuantileMetrics&quot;: { # Summary metrics fields. These are included in response only if present in summary_metrics_mask field in request
&quot;diskBytesSpilled&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;durationMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;executorCpuTimeNanos&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;executorDeserializeCpuTimeNanos&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;executorDeserializeTimeMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;executorRunTimeMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;gettingResultTimeMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;inputMetrics&quot;: {
&quot;bytesRead&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;recordsRead&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
},
&quot;jvmGcTimeMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;memoryBytesSpilled&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;outputMetrics&quot;: {
&quot;bytesWritten&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;recordsWritten&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
},
&quot;peakExecutionMemoryBytes&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;resultSerializationTimeMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;resultSize&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;schedulerDelayMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;shuffleReadMetrics&quot;: {
&quot;fetchWaitTimeMillis&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;localBlocksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;readBytes&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;readRecords&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteBlocksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteBytesRead&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteBytesReadToDisk&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteReqsDuration&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;shufflePushReadMetrics&quot;: {
&quot;corruptMergedBlockChunks&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;localMergedBlocksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;localMergedBytesRead&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;localMergedChunksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;mergedFetchFallbackCount&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteMergedBlocksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteMergedBytesRead&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteMergedChunksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;remoteMergedReqsDuration&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
},
&quot;totalBlocksFetched&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
},
&quot;shuffleWriteMetrics&quot;: {
&quot;writeBytes&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;writeRecords&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
&quot;writeTimeNanos&quot;: { # Quantile metrics data related to Tasks. Units can be seconds, bytes, milliseconds, etc depending on the message type.
&quot;count&quot;: &quot;A String&quot;,
&quot;maximum&quot;: &quot;A String&quot;,
&quot;minimum&quot;: &quot;A String&quot;,
&quot;percentile25&quot;: &quot;A String&quot;,
&quot;percentile50&quot;: &quot;A String&quot;,
&quot;percentile75&quot;: &quot;A String&quot;,
&quot;sum&quot;: &quot;A String&quot;,
},
},
},
&quot;tasks&quot;: {
&quot;a_key&quot;: { # Data corresponding to tasks created by spark.
&quot;accumulatorUpdates&quot;: [
{
&quot;accumullableInfoId&quot;: &quot;A String&quot;,
&quot;name&quot;: &quot;A String&quot;,
&quot;update&quot;: &quot;A String&quot;,
&quot;value&quot;: &quot;A String&quot;,
},
],
&quot;attempt&quot;: 42,
&quot;durationMillis&quot;: &quot;A String&quot;,
&quot;errorMessage&quot;: &quot;A String&quot;,
&quot;executorId&quot;: &quot;A String&quot;,
&quot;executorLogs&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;gettingResultTimeMillis&quot;: &quot;A String&quot;,
&quot;hasMetrics&quot;: True or False,
&quot;host&quot;: &quot;A String&quot;,
&quot;index&quot;: 42,
&quot;launchTime&quot;: &quot;A String&quot;,
&quot;partitionId&quot;: 42,
&quot;resultFetchStart&quot;: &quot;A String&quot;,
&quot;schedulerDelayMillis&quot;: &quot;A String&quot;,
&quot;speculative&quot;: True or False,
&quot;stageAttemptId&quot;: 42,
&quot;stageId&quot;: &quot;A String&quot;,
&quot;status&quot;: &quot;A String&quot;,
&quot;taskId&quot;: &quot;A String&quot;,
&quot;taskLocality&quot;: &quot;A String&quot;,
&quot;taskMetrics&quot;: { # Executor Task Metrics
&quot;diskBytesSpilled&quot;: &quot;A String&quot;,
&quot;executorCpuTimeNanos&quot;: &quot;A String&quot;,
&quot;executorDeserializeCpuTimeNanos&quot;: &quot;A String&quot;,
&quot;executorDeserializeTimeMillis&quot;: &quot;A String&quot;,
&quot;executorRunTimeMillis&quot;: &quot;A String&quot;,
&quot;inputMetrics&quot;: { # Metrics about the input data read by the task.
&quot;bytesRead&quot;: &quot;A String&quot;,
&quot;recordsRead&quot;: &quot;A String&quot;,
},
&quot;jvmGcTimeMillis&quot;: &quot;A String&quot;,
&quot;memoryBytesSpilled&quot;: &quot;A String&quot;,
&quot;outputMetrics&quot;: { # Metrics about the data written by the task.
&quot;bytesWritten&quot;: &quot;A String&quot;,
&quot;recordsWritten&quot;: &quot;A String&quot;,
},
&quot;peakExecutionMemoryBytes&quot;: &quot;A String&quot;,
&quot;resultSerializationTimeMillis&quot;: &quot;A String&quot;,
&quot;resultSize&quot;: &quot;A String&quot;,
&quot;shuffleReadMetrics&quot;: { # Shuffle data read by the task.
&quot;fetchWaitTimeMillis&quot;: &quot;A String&quot;,
&quot;localBlocksFetched&quot;: &quot;A String&quot;,
&quot;localBytesRead&quot;: &quot;A String&quot;,
&quot;recordsRead&quot;: &quot;A String&quot;,
&quot;remoteBlocksFetched&quot;: &quot;A String&quot;,
&quot;remoteBytesRead&quot;: &quot;A String&quot;,
&quot;remoteBytesReadToDisk&quot;: &quot;A String&quot;,
&quot;remoteReqsDuration&quot;: &quot;A String&quot;,
&quot;shufflePushReadMetrics&quot;: {
&quot;corruptMergedBlockChunks&quot;: &quot;A String&quot;,
&quot;localMergedBlocksFetched&quot;: &quot;A String&quot;,
&quot;localMergedBytesRead&quot;: &quot;A String&quot;,
&quot;localMergedChunksFetched&quot;: &quot;A String&quot;,
&quot;mergedFetchFallbackCount&quot;: &quot;A String&quot;,
&quot;remoteMergedBlocksFetched&quot;: &quot;A String&quot;,
&quot;remoteMergedBytesRead&quot;: &quot;A String&quot;,
&quot;remoteMergedChunksFetched&quot;: &quot;A String&quot;,
&quot;remoteMergedReqsDuration&quot;: &quot;A String&quot;,
},
},
&quot;shuffleWriteMetrics&quot;: { # Shuffle data written by task.
&quot;bytesWritten&quot;: &quot;A String&quot;,
&quot;recordsWritten&quot;: &quot;A String&quot;,
&quot;writeTimeNanos&quot;: &quot;A String&quot;,
},
},
},
},
},
&quot;streamBlockData&quot;: { # Stream Block Data.
&quot;deserialized&quot;: True or False,
&quot;diskSize&quot;: &quot;A String&quot;,
&quot;executorId&quot;: &quot;A String&quot;,
&quot;hostPort&quot;: &quot;A String&quot;,
&quot;memSize&quot;: &quot;A String&quot;,
&quot;name&quot;: &quot;A String&quot;,
&quot;storageLevel&quot;: &quot;A String&quot;,
&quot;useDisk&quot;: True or False,
&quot;useMemory&quot;: True or False,
},
&quot;streamingQueryData&quot;: { # Streaming
&quot;endTimestamp&quot;: &quot;A String&quot;,
&quot;exception&quot;: &quot;A String&quot;,
&quot;isActive&quot;: True or False,
&quot;name&quot;: &quot;A String&quot;,
&quot;runId&quot;: &quot;A String&quot;,
&quot;startTimestamp&quot;: &quot;A String&quot;,
&quot;streamingQueryId&quot;: &quot;A String&quot;,
},
&quot;streamingQueryProgress&quot;: {
&quot;batchDuration&quot;: &quot;A String&quot;,
&quot;batchId&quot;: &quot;A String&quot;,
&quot;durationMillis&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;eventTime&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;name&quot;: &quot;A String&quot;,
&quot;observedMetrics&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;runId&quot;: &quot;A String&quot;,
&quot;sink&quot;: {
&quot;description&quot;: &quot;A String&quot;,
&quot;metrics&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;numOutputRows&quot;: &quot;A String&quot;,
},
&quot;sources&quot;: [
{
&quot;description&quot;: &quot;A String&quot;,
&quot;endOffset&quot;: &quot;A String&quot;,
&quot;inputRowsPerSecond&quot;: 3.14,
&quot;latestOffset&quot;: &quot;A String&quot;,
&quot;metrics&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;numInputRows&quot;: &quot;A String&quot;,
&quot;processedRowsPerSecond&quot;: 3.14,
&quot;startOffset&quot;: &quot;A String&quot;,
},
],
&quot;stateOperators&quot;: [
{
&quot;allRemovalsTimeMs&quot;: &quot;A String&quot;,
&quot;allUpdatesTimeMs&quot;: &quot;A String&quot;,
&quot;commitTimeMs&quot;: &quot;A String&quot;,
&quot;customMetrics&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;memoryUsedBytes&quot;: &quot;A String&quot;,
&quot;numRowsDroppedByWatermark&quot;: &quot;A String&quot;,
&quot;numRowsRemoved&quot;: &quot;A String&quot;,
&quot;numRowsTotal&quot;: &quot;A String&quot;,
&quot;numRowsUpdated&quot;: &quot;A String&quot;,
&quot;numShufflePartitions&quot;: &quot;A String&quot;,
&quot;numStateStoreInstances&quot;: &quot;A String&quot;,
&quot;operatorName&quot;: &quot;A String&quot;,
},
],
&quot;streamingQueryProgressId&quot;: &quot;A String&quot;,
&quot;timestamp&quot;: &quot;A String&quot;,
},
&quot;taskData&quot;: { # Data corresponding to tasks created by spark.
&quot;accumulatorUpdates&quot;: [
{
&quot;accumullableInfoId&quot;: &quot;A String&quot;,
&quot;name&quot;: &quot;A String&quot;,
&quot;update&quot;: &quot;A String&quot;,
&quot;value&quot;: &quot;A String&quot;,
},
],
&quot;attempt&quot;: 42,
&quot;durationMillis&quot;: &quot;A String&quot;,
&quot;errorMessage&quot;: &quot;A String&quot;,
&quot;executorId&quot;: &quot;A String&quot;,
&quot;executorLogs&quot;: {
&quot;a_key&quot;: &quot;A String&quot;,
},
&quot;gettingResultTimeMillis&quot;: &quot;A String&quot;,
&quot;hasMetrics&quot;: True or False,
&quot;host&quot;: &quot;A String&quot;,
&quot;index&quot;: 42,
&quot;launchTime&quot;: &quot;A String&quot;,
&quot;partitionId&quot;: 42,
&quot;resultFetchStart&quot;: &quot;A String&quot;,
&quot;schedulerDelayMillis&quot;: &quot;A String&quot;,
&quot;speculative&quot;: True or False,
&quot;stageAttemptId&quot;: 42,
&quot;stageId&quot;: &quot;A String&quot;,
&quot;status&quot;: &quot;A String&quot;,
&quot;taskId&quot;: &quot;A String&quot;,
&quot;taskLocality&quot;: &quot;A String&quot;,
&quot;taskMetrics&quot;: { # Executor Task Metrics
&quot;diskBytesSpilled&quot;: &quot;A String&quot;,
&quot;executorCpuTimeNanos&quot;: &quot;A String&quot;,
&quot;executorDeserializeCpuTimeNanos&quot;: &quot;A String&quot;,
&quot;executorDeserializeTimeMillis&quot;: &quot;A String&quot;,
&quot;executorRunTimeMillis&quot;: &quot;A String&quot;,
&quot;inputMetrics&quot;: { # Metrics about the input data read by the task.
&quot;bytesRead&quot;: &quot;A String&quot;,
&quot;recordsRead&quot;: &quot;A String&quot;,
},
&quot;jvmGcTimeMillis&quot;: &quot;A String&quot;,
&quot;memoryBytesSpilled&quot;: &quot;A String&quot;,
&quot;outputMetrics&quot;: { # Metrics about the data written by the task.
&quot;bytesWritten&quot;: &quot;A String&quot;,
&quot;recordsWritten&quot;: &quot;A String&quot;,
},
&quot;peakExecutionMemoryBytes&quot;: &quot;A String&quot;,
&quot;resultSerializationTimeMillis&quot;: &quot;A String&quot;,
&quot;resultSize&quot;: &quot;A String&quot;,
&quot;shuffleReadMetrics&quot;: { # Shuffle data read by the task.
&quot;fetchWaitTimeMillis&quot;: &quot;A String&quot;,
&quot;localBlocksFetched&quot;: &quot;A String&quot;,
&quot;localBytesRead&quot;: &quot;A String&quot;,
&quot;recordsRead&quot;: &quot;A String&quot;,
&quot;remoteBlocksFetched&quot;: &quot;A String&quot;,
&quot;remoteBytesRead&quot;: &quot;A String&quot;,
&quot;remoteBytesReadToDisk&quot;: &quot;A String&quot;,
&quot;remoteReqsDuration&quot;: &quot;A String&quot;,
&quot;shufflePushReadMetrics&quot;: {
&quot;corruptMergedBlockChunks&quot;: &quot;A String&quot;,
&quot;localMergedBlocksFetched&quot;: &quot;A String&quot;,
&quot;localMergedBytesRead&quot;: &quot;A String&quot;,
&quot;localMergedChunksFetched&quot;: &quot;A String&quot;,
&quot;mergedFetchFallbackCount&quot;: &quot;A String&quot;,
&quot;remoteMergedBlocksFetched&quot;: &quot;A String&quot;,
&quot;remoteMergedBytesRead&quot;: &quot;A String&quot;,
&quot;remoteMergedChunksFetched&quot;: &quot;A String&quot;,
&quot;remoteMergedReqsDuration&quot;: &quot;A String&quot;,
},
},
&quot;shuffleWriteMetrics&quot;: { # Shuffle data written by task.
&quot;bytesWritten&quot;: &quot;A String&quot;,
&quot;recordsWritten&quot;: &quot;A String&quot;,
&quot;writeTimeNanos&quot;: &quot;A String&quot;,
},
},
},
},
],
}
x__xgafv: string, V1 error format.
Allowed values
1 - v1 error format
2 - v2 error format
Returns:
An object of the form:
{ # Response returned as an acknowledgement of receipt of data.
}</pre>
</div>
</body></html>