Class: Google::Apis::DataprocV1::Batch

Inherits:
Object
  • Object
show all
Includes:
Core::Hashable, Core::JsonObjectSupport
Defined in:
lib/google/apis/dataproc_v1/classes.rb,
lib/google/apis/dataproc_v1/representations.rb,
lib/google/apis/dataproc_v1/representations.rb

Overview

A representation of a batch workload in the service.

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(**args) ⇒ Batch

Returns a new instance of Batch.



509
510
511
# File 'lib/google/apis/dataproc_v1/classes.rb', line 509

def initialize(**args)
   update!(**args)
end

Instance Attribute Details

#create_timeString

Output only. The time when the batch was created. Corresponds to the JSON property createTime

Returns:

  • (String)


417
418
419
# File 'lib/google/apis/dataproc_v1/classes.rb', line 417

def create_time
  @create_time
end

#creatorString

Output only. The email address of the user who created the batch. Corresponds to the JSON property creator

Returns:

  • (String)


422
423
424
# File 'lib/google/apis/dataproc_v1/classes.rb', line 422

def creator
  @creator
end

#environment_configGoogle::Apis::DataprocV1::EnvironmentConfig

Environment configuration for a workload. Corresponds to the JSON property environmentConfig



427
428
429
# File 'lib/google/apis/dataproc_v1/classes.rb', line 427

def environment_config
  @environment_config
end

#labelsHash<String,String>

Optional. The labels to associate with this batch. Label keys must contain 1 to 63 characters, and must conform to RFC 1035 (https://www.ietf.org/rfc/ rfc1035.txt). Label values may be empty, but, if present, must contain 1 to 63 characters, and must conform to RFC 1035 (https://www.ietf.org/rfc/rfc1035.txt) . No more than 32 labels can be associated with a batch. Corresponds to the JSON property labels

Returns:

  • (Hash<String,String>)


436
437
438
# File 'lib/google/apis/dataproc_v1/classes.rb', line 436

def labels
  @labels
end

#nameString

Output only. The resource name of the batch. Corresponds to the JSON property name

Returns:

  • (String)


441
442
443
# File 'lib/google/apis/dataproc_v1/classes.rb', line 441

def name
  @name
end

#operationString

Output only. The resource name of the operation associated with this batch. Corresponds to the JSON property operation

Returns:

  • (String)


446
447
448
# File 'lib/google/apis/dataproc_v1/classes.rb', line 446

def operation
  @operation
end

#pyspark_batchGoogle::Apis::DataprocV1::PySparkBatch

A configuration for running an Apache PySpark (https://spark.apache.org/docs/ latest/api/python/getting_started/quickstart.html) batch workload. Corresponds to the JSON property pysparkBatch



452
453
454
# File 'lib/google/apis/dataproc_v1/classes.rb', line 452

def pyspark_batch
  @pyspark_batch
end

#runtime_configGoogle::Apis::DataprocV1::RuntimeConfig

Runtime configuration for a workload. Corresponds to the JSON property runtimeConfig



457
458
459
# File 'lib/google/apis/dataproc_v1/classes.rb', line 457

def runtime_config
  @runtime_config
end

#runtime_infoGoogle::Apis::DataprocV1::RuntimeInfo

Runtime information about workload execution. Corresponds to the JSON property runtimeInfo



462
463
464
# File 'lib/google/apis/dataproc_v1/classes.rb', line 462

def runtime_info
  @runtime_info
end

#spark_batchGoogle::Apis::DataprocV1::SparkBatch

A configuration for running an Apache Spark (https://spark.apache.org/) batch workload. Corresponds to the JSON property sparkBatch



468
469
470
# File 'lib/google/apis/dataproc_v1/classes.rb', line 468

def spark_batch
  @spark_batch
end

#spark_r_batchGoogle::Apis::DataprocV1::SparkRBatch

A configuration for running an Apache SparkR (https://spark.apache.org/docs/ latest/sparkr.html) batch workload. Corresponds to the JSON property sparkRBatch



474
475
476
# File 'lib/google/apis/dataproc_v1/classes.rb', line 474

def spark_r_batch
  @spark_r_batch
end

#spark_sql_batchGoogle::Apis::DataprocV1::SparkSqlBatch

A configuration for running Apache Spark SQL (https://spark.apache.org/sql/) queries as a batch workload. Corresponds to the JSON property sparkSqlBatch



480
481
482
# File 'lib/google/apis/dataproc_v1/classes.rb', line 480

def spark_sql_batch
  @spark_sql_batch
end

#stateString

Output only. The state of the batch. Corresponds to the JSON property state

Returns:

  • (String)


485
486
487
# File 'lib/google/apis/dataproc_v1/classes.rb', line 485

def state
  @state
end

#state_historyArray<Google::Apis::DataprocV1::StateHistory>

Output only. Historical state information for the batch. Corresponds to the JSON property stateHistory



490
491
492
# File 'lib/google/apis/dataproc_v1/classes.rb', line 490

def state_history
  @state_history
end

#state_messageString

Output only. Batch state details, such as a failure description if the state is FAILED. Corresponds to the JSON property stateMessage

Returns:

  • (String)


496
497
498
# File 'lib/google/apis/dataproc_v1/classes.rb', line 496

def state_message
  @state_message
end

#state_timeString

Output only. The time when the batch entered a current state. Corresponds to the JSON property stateTime

Returns:

  • (String)


501
502
503
# File 'lib/google/apis/dataproc_v1/classes.rb', line 501

def state_time
  @state_time
end

#uuidString

Output only. A batch UUID (Unique Universal Identifier). The service generates this value when it creates the batch. Corresponds to the JSON property uuid

Returns:

  • (String)


507
508
509
# File 'lib/google/apis/dataproc_v1/classes.rb', line 507

def uuid
  @uuid
end

Instance Method Details

#update!(**args) ⇒ Object

Update properties of this object



514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
# File 'lib/google/apis/dataproc_v1/classes.rb', line 514

def update!(**args)
  @create_time = args[:create_time] if args.key?(:create_time)
  @creator = args[:creator] if args.key?(:creator)
  @environment_config = args[:environment_config] if args.key?(:environment_config)
  @labels = args[:labels] if args.key?(:labels)
  @name = args[:name] if args.key?(:name)
  @operation = args[:operation] if args.key?(:operation)
  @pyspark_batch = args[:pyspark_batch] if args.key?(:pyspark_batch)
  @runtime_config = args[:runtime_config] if args.key?(:runtime_config)
  @runtime_info = args[:runtime_info] if args.key?(:runtime_info)
  @spark_batch = args[:spark_batch] if args.key?(:spark_batch)
  @spark_r_batch = args[:spark_r_batch] if args.key?(:spark_r_batch)
  @spark_sql_batch = args[:spark_sql_batch] if args.key?(:spark_sql_batch)
  @state = args[:state] if args.key?(:state)
  @state_history = args[:state_history] if args.key?(:state_history)
  @state_message = args[:state_message] if args.key?(:state_message)
  @state_time = args[:state_time] if args.key?(:state_time)
  @uuid = args[:uuid] if args.key?(:uuid)
end