mirror of
https://github.com/genuinetools/reg.git
synced 2024-09-19 16:51:01 -04:00
3063 lines
110 KiB
Go
3063 lines
110 KiB
Go
|
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||
|
// source: google/cloud/dataproc/v1/jobs.proto
|
||
|
|
||
|
package dataproc // import "google.golang.org/genproto/googleapis/cloud/dataproc/v1"
|
||
|
|
||
|
import proto "github.com/golang/protobuf/proto"
|
||
|
import fmt "fmt"
|
||
|
import math "math"
|
||
|
import empty "github.com/golang/protobuf/ptypes/empty"
|
||
|
import timestamp "github.com/golang/protobuf/ptypes/timestamp"
|
||
|
import _ "google.golang.org/genproto/googleapis/api/annotations"
|
||
|
import field_mask "google.golang.org/genproto/protobuf/field_mask"
|
||
|
|
||
|
import (
|
||
|
context "golang.org/x/net/context"
|
||
|
grpc "google.golang.org/grpc"
|
||
|
)
|
||
|
|
||
|
// Reference imports to suppress errors if they are not otherwise used.
|
||
|
var _ = proto.Marshal
|
||
|
var _ = fmt.Errorf
|
||
|
var _ = math.Inf
|
||
|
|
||
|
// This is a compile-time assertion to ensure that this generated file
|
||
|
// is compatible with the proto package it is being compiled against.
|
||
|
// A compilation error at this line likely means your copy of the
|
||
|
// proto package needs to be updated.
|
||
|
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||
|
|
||
|
// The Log4j level for job execution. When running an
|
||
|
// [Apache Hive](http://hive.apache.org/) job, Cloud
|
||
|
// Dataproc configures the Hive client to an equivalent verbosity level.
|
||
|
type LoggingConfig_Level int32
|
||
|
|
||
|
const (
|
||
|
// Level is unspecified. Use default level for log4j.
|
||
|
LoggingConfig_LEVEL_UNSPECIFIED LoggingConfig_Level = 0
|
||
|
// Use ALL level for log4j.
|
||
|
LoggingConfig_ALL LoggingConfig_Level = 1
|
||
|
// Use TRACE level for log4j.
|
||
|
LoggingConfig_TRACE LoggingConfig_Level = 2
|
||
|
// Use DEBUG level for log4j.
|
||
|
LoggingConfig_DEBUG LoggingConfig_Level = 3
|
||
|
// Use INFO level for log4j.
|
||
|
LoggingConfig_INFO LoggingConfig_Level = 4
|
||
|
// Use WARN level for log4j.
|
||
|
LoggingConfig_WARN LoggingConfig_Level = 5
|
||
|
// Use ERROR level for log4j.
|
||
|
LoggingConfig_ERROR LoggingConfig_Level = 6
|
||
|
// Use FATAL level for log4j.
|
||
|
LoggingConfig_FATAL LoggingConfig_Level = 7
|
||
|
// Turn off log4j.
|
||
|
LoggingConfig_OFF LoggingConfig_Level = 8
|
||
|
)
|
||
|
|
||
|
var LoggingConfig_Level_name = map[int32]string{
|
||
|
0: "LEVEL_UNSPECIFIED",
|
||
|
1: "ALL",
|
||
|
2: "TRACE",
|
||
|
3: "DEBUG",
|
||
|
4: "INFO",
|
||
|
5: "WARN",
|
||
|
6: "ERROR",
|
||
|
7: "FATAL",
|
||
|
8: "OFF",
|
||
|
}
|
||
|
var LoggingConfig_Level_value = map[string]int32{
|
||
|
"LEVEL_UNSPECIFIED": 0,
|
||
|
"ALL": 1,
|
||
|
"TRACE": 2,
|
||
|
"DEBUG": 3,
|
||
|
"INFO": 4,
|
||
|
"WARN": 5,
|
||
|
"ERROR": 6,
|
||
|
"FATAL": 7,
|
||
|
"OFF": 8,
|
||
|
}
|
||
|
|
||
|
func (x LoggingConfig_Level) String() string {
|
||
|
return proto.EnumName(LoggingConfig_Level_name, int32(x))
|
||
|
}
|
||
|
func (LoggingConfig_Level) EnumDescriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{0, 0}
|
||
|
}
|
||
|
|
||
|
// The job state.
|
||
|
type JobStatus_State int32
|
||
|
|
||
|
const (
|
||
|
// The job state is unknown.
|
||
|
JobStatus_STATE_UNSPECIFIED JobStatus_State = 0
|
||
|
// The job is pending; it has been submitted, but is not yet running.
|
||
|
JobStatus_PENDING JobStatus_State = 1
|
||
|
// Job has been received by the service and completed initial setup;
|
||
|
// it will soon be submitted to the cluster.
|
||
|
JobStatus_SETUP_DONE JobStatus_State = 8
|
||
|
// The job is running on the cluster.
|
||
|
JobStatus_RUNNING JobStatus_State = 2
|
||
|
// A CancelJob request has been received, but is pending.
|
||
|
JobStatus_CANCEL_PENDING JobStatus_State = 3
|
||
|
// Transient in-flight resources have been canceled, and the request to
|
||
|
// cancel the running job has been issued to the cluster.
|
||
|
JobStatus_CANCEL_STARTED JobStatus_State = 7
|
||
|
// The job cancellation was successful.
|
||
|
JobStatus_CANCELLED JobStatus_State = 4
|
||
|
// The job has completed successfully.
|
||
|
JobStatus_DONE JobStatus_State = 5
|
||
|
// The job has completed, but encountered an error.
|
||
|
JobStatus_ERROR JobStatus_State = 6
|
||
|
// Job attempt has failed. The detail field contains failure details for
|
||
|
// this attempt.
|
||
|
//
|
||
|
// Applies to restartable jobs only.
|
||
|
JobStatus_ATTEMPT_FAILURE JobStatus_State = 9
|
||
|
)
|
||
|
|
||
|
var JobStatus_State_name = map[int32]string{
|
||
|
0: "STATE_UNSPECIFIED",
|
||
|
1: "PENDING",
|
||
|
8: "SETUP_DONE",
|
||
|
2: "RUNNING",
|
||
|
3: "CANCEL_PENDING",
|
||
|
7: "CANCEL_STARTED",
|
||
|
4: "CANCELLED",
|
||
|
5: "DONE",
|
||
|
6: "ERROR",
|
||
|
9: "ATTEMPT_FAILURE",
|
||
|
}
|
||
|
var JobStatus_State_value = map[string]int32{
|
||
|
"STATE_UNSPECIFIED": 0,
|
||
|
"PENDING": 1,
|
||
|
"SETUP_DONE": 8,
|
||
|
"RUNNING": 2,
|
||
|
"CANCEL_PENDING": 3,
|
||
|
"CANCEL_STARTED": 7,
|
||
|
"CANCELLED": 4,
|
||
|
"DONE": 5,
|
||
|
"ERROR": 6,
|
||
|
"ATTEMPT_FAILURE": 9,
|
||
|
}
|
||
|
|
||
|
func (x JobStatus_State) String() string {
|
||
|
return proto.EnumName(JobStatus_State_name, int32(x))
|
||
|
}
|
||
|
func (JobStatus_State) EnumDescriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{9, 0}
|
||
|
}
|
||
|
|
||
|
type JobStatus_Substate int32
|
||
|
|
||
|
const (
|
||
|
JobStatus_UNSPECIFIED JobStatus_Substate = 0
|
||
|
// The Job is submitted to the agent.
|
||
|
//
|
||
|
// Applies to RUNNING state.
|
||
|
JobStatus_SUBMITTED JobStatus_Substate = 1
|
||
|
// The Job has been received and is awaiting execution (it may be waiting
|
||
|
// for a condition to be met). See the "details" field for the reason for
|
||
|
// the delay.
|
||
|
//
|
||
|
// Applies to RUNNING state.
|
||
|
JobStatus_QUEUED JobStatus_Substate = 2
|
||
|
// The agent-reported status is out of date, which may be caused by a
|
||
|
// loss of communication between the agent and Cloud Dataproc. If the
|
||
|
// agent does not send a timely update, the job will fail.
|
||
|
//
|
||
|
// Applies to RUNNING state.
|
||
|
JobStatus_STALE_STATUS JobStatus_Substate = 3
|
||
|
)
|
||
|
|
||
|
var JobStatus_Substate_name = map[int32]string{
|
||
|
0: "UNSPECIFIED",
|
||
|
1: "SUBMITTED",
|
||
|
2: "QUEUED",
|
||
|
3: "STALE_STATUS",
|
||
|
}
|
||
|
var JobStatus_Substate_value = map[string]int32{
|
||
|
"UNSPECIFIED": 0,
|
||
|
"SUBMITTED": 1,
|
||
|
"QUEUED": 2,
|
||
|
"STALE_STATUS": 3,
|
||
|
}
|
||
|
|
||
|
func (x JobStatus_Substate) String() string {
|
||
|
return proto.EnumName(JobStatus_Substate_name, int32(x))
|
||
|
}
|
||
|
func (JobStatus_Substate) EnumDescriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{9, 1}
|
||
|
}
|
||
|
|
||
|
// The application state, corresponding to
|
||
|
// <code>YarnProtos.YarnApplicationStateProto</code>.
|
||
|
type YarnApplication_State int32
|
||
|
|
||
|
const (
|
||
|
// Status is unspecified.
|
||
|
YarnApplication_STATE_UNSPECIFIED YarnApplication_State = 0
|
||
|
// Status is NEW.
|
||
|
YarnApplication_NEW YarnApplication_State = 1
|
||
|
// Status is NEW_SAVING.
|
||
|
YarnApplication_NEW_SAVING YarnApplication_State = 2
|
||
|
// Status is SUBMITTED.
|
||
|
YarnApplication_SUBMITTED YarnApplication_State = 3
|
||
|
// Status is ACCEPTED.
|
||
|
YarnApplication_ACCEPTED YarnApplication_State = 4
|
||
|
// Status is RUNNING.
|
||
|
YarnApplication_RUNNING YarnApplication_State = 5
|
||
|
// Status is FINISHED.
|
||
|
YarnApplication_FINISHED YarnApplication_State = 6
|
||
|
// Status is FAILED.
|
||
|
YarnApplication_FAILED YarnApplication_State = 7
|
||
|
// Status is KILLED.
|
||
|
YarnApplication_KILLED YarnApplication_State = 8
|
||
|
)
|
||
|
|
||
|
var YarnApplication_State_name = map[int32]string{
|
||
|
0: "STATE_UNSPECIFIED",
|
||
|
1: "NEW",
|
||
|
2: "NEW_SAVING",
|
||
|
3: "SUBMITTED",
|
||
|
4: "ACCEPTED",
|
||
|
5: "RUNNING",
|
||
|
6: "FINISHED",
|
||
|
7: "FAILED",
|
||
|
8: "KILLED",
|
||
|
}
|
||
|
var YarnApplication_State_value = map[string]int32{
|
||
|
"STATE_UNSPECIFIED": 0,
|
||
|
"NEW": 1,
|
||
|
"NEW_SAVING": 2,
|
||
|
"SUBMITTED": 3,
|
||
|
"ACCEPTED": 4,
|
||
|
"RUNNING": 5,
|
||
|
"FINISHED": 6,
|
||
|
"FAILED": 7,
|
||
|
"KILLED": 8,
|
||
|
}
|
||
|
|
||
|
func (x YarnApplication_State) String() string {
|
||
|
return proto.EnumName(YarnApplication_State_name, int32(x))
|
||
|
}
|
||
|
func (YarnApplication_State) EnumDescriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{11, 0}
|
||
|
}
|
||
|
|
||
|
// A matcher that specifies categories of job states.
|
||
|
type ListJobsRequest_JobStateMatcher int32
|
||
|
|
||
|
const (
|
||
|
// Match all jobs, regardless of state.
|
||
|
ListJobsRequest_ALL ListJobsRequest_JobStateMatcher = 0
|
||
|
// Only match jobs in non-terminal states: PENDING, RUNNING, or
|
||
|
// CANCEL_PENDING.
|
||
|
ListJobsRequest_ACTIVE ListJobsRequest_JobStateMatcher = 1
|
||
|
// Only match jobs in terminal states: CANCELLED, DONE, or ERROR.
|
||
|
ListJobsRequest_NON_ACTIVE ListJobsRequest_JobStateMatcher = 2
|
||
|
)
|
||
|
|
||
|
var ListJobsRequest_JobStateMatcher_name = map[int32]string{
|
||
|
0: "ALL",
|
||
|
1: "ACTIVE",
|
||
|
2: "NON_ACTIVE",
|
||
|
}
|
||
|
var ListJobsRequest_JobStateMatcher_value = map[string]int32{
|
||
|
"ALL": 0,
|
||
|
"ACTIVE": 1,
|
||
|
"NON_ACTIVE": 2,
|
||
|
}
|
||
|
|
||
|
func (x ListJobsRequest_JobStateMatcher) String() string {
|
||
|
return proto.EnumName(ListJobsRequest_JobStateMatcher_name, int32(x))
|
||
|
}
|
||
|
func (ListJobsRequest_JobStateMatcher) EnumDescriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{16, 0}
|
||
|
}
|
||
|
|
||
|
// The runtime logging config of the job.
|
||
|
type LoggingConfig struct {
|
||
|
// The per-package log levels for the driver. This may include
|
||
|
// "root" package name to configure rootLogger.
|
||
|
// Examples:
|
||
|
// 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
|
||
|
DriverLogLevels map[string]LoggingConfig_Level `protobuf:"bytes,2,rep,name=driver_log_levels,json=driverLogLevels,proto3" json:"driver_log_levels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3,enum=google.cloud.dataproc.v1.LoggingConfig_Level"`
|
||
|
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||
|
XXX_unrecognized []byte `json:"-"`
|
||
|
XXX_sizecache int32 `json:"-"`
|
||
|
}
|
||
|
|
||
|
func (m *LoggingConfig) Reset() { *m = LoggingConfig{} }
|
||
|
func (m *LoggingConfig) String() string { return proto.CompactTextString(m) }
|
||
|
func (*LoggingConfig) ProtoMessage() {}
|
||
|
func (*LoggingConfig) Descriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{0}
|
||
|
}
|
||
|
func (m *LoggingConfig) XXX_Unmarshal(b []byte) error {
|
||
|
return xxx_messageInfo_LoggingConfig.Unmarshal(m, b)
|
||
|
}
|
||
|
func (m *LoggingConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||
|
return xxx_messageInfo_LoggingConfig.Marshal(b, m, deterministic)
|
||
|
}
|
||
|
func (dst *LoggingConfig) XXX_Merge(src proto.Message) {
|
||
|
xxx_messageInfo_LoggingConfig.Merge(dst, src)
|
||
|
}
|
||
|
func (m *LoggingConfig) XXX_Size() int {
|
||
|
return xxx_messageInfo_LoggingConfig.Size(m)
|
||
|
}
|
||
|
func (m *LoggingConfig) XXX_DiscardUnknown() {
|
||
|
xxx_messageInfo_LoggingConfig.DiscardUnknown(m)
|
||
|
}
|
||
|
|
||
|
var xxx_messageInfo_LoggingConfig proto.InternalMessageInfo
|
||
|
|
||
|
func (m *LoggingConfig) GetDriverLogLevels() map[string]LoggingConfig_Level {
|
||
|
if m != nil {
|
||
|
return m.DriverLogLevels
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
// A Cloud Dataproc job for running
|
||
|
// [Apache Hadoop MapReduce](https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduceTutorial.html)
|
||
|
// jobs on [Apache Hadoop YARN](https://hadoop.apache.org/docs/r2.7.1/hadoop-yarn/hadoop-yarn-site/YARN.html).
|
||
|
type HadoopJob struct {
|
||
|
// Required. Indicates the location of the driver's main class. Specify
|
||
|
// either the jar file that contains the main class or the main class name.
|
||
|
// To specify both, add the jar file to `jar_file_uris`, and then specify
|
||
|
// the main class name in this property.
|
||
|
//
|
||
|
// Types that are valid to be assigned to Driver:
|
||
|
// *HadoopJob_MainJarFileUri
|
||
|
// *HadoopJob_MainClass
|
||
|
Driver isHadoopJob_Driver `protobuf_oneof:"driver"`
|
||
|
// Optional. The arguments to pass to the driver. Do not
|
||
|
// include arguments, such as `-libjars` or `-Dfoo=bar`, that can be set as job
|
||
|
// properties, since a collision may occur that causes an incorrect job
|
||
|
// submission.
|
||
|
Args []string `protobuf:"bytes,3,rep,name=args,proto3" json:"args,omitempty"`
|
||
|
// Optional. Jar file URIs to add to the CLASSPATHs of the
|
||
|
// Hadoop driver and tasks.
|
||
|
JarFileUris []string `protobuf:"bytes,4,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"`
|
||
|
// Optional. HCFS (Hadoop Compatible Filesystem) URIs of files to be copied
|
||
|
// to the working directory of Hadoop drivers and distributed tasks. Useful
|
||
|
// for naively parallel tasks.
|
||
|
FileUris []string `protobuf:"bytes,5,rep,name=file_uris,json=fileUris,proto3" json:"file_uris,omitempty"`
|
||
|
// Optional. HCFS URIs of archives to be extracted in the working directory of
|
||
|
// Hadoop drivers and tasks. Supported file types:
|
||
|
// .jar, .tar, .tar.gz, .tgz, or .zip.
|
||
|
ArchiveUris []string `protobuf:"bytes,6,rep,name=archive_uris,json=archiveUris,proto3" json:"archive_uris,omitempty"`
|
||
|
// Optional. A mapping of property names to values, used to configure Hadoop.
|
||
|
// Properties that conflict with values set by the Cloud Dataproc API may be
|
||
|
// overwritten. Can include properties set in /etc/hadoop/conf/*-site and
|
||
|
// classes in user code.
|
||
|
Properties map[string]string `protobuf:"bytes,7,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||
|
// Optional. The runtime log config for job execution.
|
||
|
LoggingConfig *LoggingConfig `protobuf:"bytes,8,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"`
|
||
|
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||
|
XXX_unrecognized []byte `json:"-"`
|
||
|
XXX_sizecache int32 `json:"-"`
|
||
|
}
|
||
|
|
||
|
func (m *HadoopJob) Reset() { *m = HadoopJob{} }
|
||
|
func (m *HadoopJob) String() string { return proto.CompactTextString(m) }
|
||
|
func (*HadoopJob) ProtoMessage() {}
|
||
|
func (*HadoopJob) Descriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{1}
|
||
|
}
|
||
|
func (m *HadoopJob) XXX_Unmarshal(b []byte) error {
|
||
|
return xxx_messageInfo_HadoopJob.Unmarshal(m, b)
|
||
|
}
|
||
|
func (m *HadoopJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||
|
return xxx_messageInfo_HadoopJob.Marshal(b, m, deterministic)
|
||
|
}
|
||
|
func (dst *HadoopJob) XXX_Merge(src proto.Message) {
|
||
|
xxx_messageInfo_HadoopJob.Merge(dst, src)
|
||
|
}
|
||
|
func (m *HadoopJob) XXX_Size() int {
|
||
|
return xxx_messageInfo_HadoopJob.Size(m)
|
||
|
}
|
||
|
func (m *HadoopJob) XXX_DiscardUnknown() {
|
||
|
xxx_messageInfo_HadoopJob.DiscardUnknown(m)
|
||
|
}
|
||
|
|
||
|
var xxx_messageInfo_HadoopJob proto.InternalMessageInfo
|
||
|
|
||
|
type isHadoopJob_Driver interface {
|
||
|
isHadoopJob_Driver()
|
||
|
}
|
||
|
|
||
|
type HadoopJob_MainJarFileUri struct {
|
||
|
MainJarFileUri string `protobuf:"bytes,1,opt,name=main_jar_file_uri,json=mainJarFileUri,proto3,oneof"`
|
||
|
}
|
||
|
type HadoopJob_MainClass struct {
|
||
|
MainClass string `protobuf:"bytes,2,opt,name=main_class,json=mainClass,proto3,oneof"`
|
||
|
}
|
||
|
|
||
|
func (*HadoopJob_MainJarFileUri) isHadoopJob_Driver() {}
|
||
|
func (*HadoopJob_MainClass) isHadoopJob_Driver() {}
|
||
|
|
||
|
func (m *HadoopJob) GetDriver() isHadoopJob_Driver {
|
||
|
if m != nil {
|
||
|
return m.Driver
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *HadoopJob) GetMainJarFileUri() string {
|
||
|
if x, ok := m.GetDriver().(*HadoopJob_MainJarFileUri); ok {
|
||
|
return x.MainJarFileUri
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *HadoopJob) GetMainClass() string {
|
||
|
if x, ok := m.GetDriver().(*HadoopJob_MainClass); ok {
|
||
|
return x.MainClass
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *HadoopJob) GetArgs() []string {
|
||
|
if m != nil {
|
||
|
return m.Args
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *HadoopJob) GetJarFileUris() []string {
|
||
|
if m != nil {
|
||
|
return m.JarFileUris
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *HadoopJob) GetFileUris() []string {
|
||
|
if m != nil {
|
||
|
return m.FileUris
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *HadoopJob) GetArchiveUris() []string {
|
||
|
if m != nil {
|
||
|
return m.ArchiveUris
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *HadoopJob) GetProperties() map[string]string {
|
||
|
if m != nil {
|
||
|
return m.Properties
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *HadoopJob) GetLoggingConfig() *LoggingConfig {
|
||
|
if m != nil {
|
||
|
return m.LoggingConfig
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
// XXX_OneofFuncs is for the internal use of the proto package.
|
||
|
func (*HadoopJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
|
||
|
return _HadoopJob_OneofMarshaler, _HadoopJob_OneofUnmarshaler, _HadoopJob_OneofSizer, []interface{}{
|
||
|
(*HadoopJob_MainJarFileUri)(nil),
|
||
|
(*HadoopJob_MainClass)(nil),
|
||
|
}
|
||
|
}
|
||
|
|
||
|
func _HadoopJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
|
||
|
m := msg.(*HadoopJob)
|
||
|
// driver
|
||
|
switch x := m.Driver.(type) {
|
||
|
case *HadoopJob_MainJarFileUri:
|
||
|
b.EncodeVarint(1<<3 | proto.WireBytes)
|
||
|
b.EncodeStringBytes(x.MainJarFileUri)
|
||
|
case *HadoopJob_MainClass:
|
||
|
b.EncodeVarint(2<<3 | proto.WireBytes)
|
||
|
b.EncodeStringBytes(x.MainClass)
|
||
|
case nil:
|
||
|
default:
|
||
|
return fmt.Errorf("HadoopJob.Driver has unexpected type %T", x)
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func _HadoopJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
|
||
|
m := msg.(*HadoopJob)
|
||
|
switch tag {
|
||
|
case 1: // driver.main_jar_file_uri
|
||
|
if wire != proto.WireBytes {
|
||
|
return true, proto.ErrInternalBadWireType
|
||
|
}
|
||
|
x, err := b.DecodeStringBytes()
|
||
|
m.Driver = &HadoopJob_MainJarFileUri{x}
|
||
|
return true, err
|
||
|
case 2: // driver.main_class
|
||
|
if wire != proto.WireBytes {
|
||
|
return true, proto.ErrInternalBadWireType
|
||
|
}
|
||
|
x, err := b.DecodeStringBytes()
|
||
|
m.Driver = &HadoopJob_MainClass{x}
|
||
|
return true, err
|
||
|
default:
|
||
|
return false, nil
|
||
|
}
|
||
|
}
|
||
|
|
||
|
func _HadoopJob_OneofSizer(msg proto.Message) (n int) {
|
||
|
m := msg.(*HadoopJob)
|
||
|
// driver
|
||
|
switch x := m.Driver.(type) {
|
||
|
case *HadoopJob_MainJarFileUri:
|
||
|
n += 1 // tag and wire
|
||
|
n += proto.SizeVarint(uint64(len(x.MainJarFileUri)))
|
||
|
n += len(x.MainJarFileUri)
|
||
|
case *HadoopJob_MainClass:
|
||
|
n += 1 // tag and wire
|
||
|
n += proto.SizeVarint(uint64(len(x.MainClass)))
|
||
|
n += len(x.MainClass)
|
||
|
case nil:
|
||
|
default:
|
||
|
panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
|
||
|
}
|
||
|
return n
|
||
|
}
|
||
|
|
||
|
// A Cloud Dataproc job for running [Apache Spark](http://spark.apache.org/)
|
||
|
// applications on YARN.
|
||
|
type SparkJob struct {
|
||
|
// Required. The specification of the main method to call to drive the job.
|
||
|
// Specify either the jar file that contains the main class or the main class
|
||
|
// name. To pass both a main jar and a main class in that jar, add the jar to
|
||
|
// `CommonJob.jar_file_uris`, and then specify the main class name in `main_class`.
|
||
|
//
|
||
|
// Types that are valid to be assigned to Driver:
|
||
|
// *SparkJob_MainJarFileUri
|
||
|
// *SparkJob_MainClass
|
||
|
Driver isSparkJob_Driver `protobuf_oneof:"driver"`
|
||
|
// Optional. The arguments to pass to the driver. Do not include arguments,
|
||
|
// such as `--conf`, that can be set as job properties, since a collision may
|
||
|
// occur that causes an incorrect job submission.
|
||
|
Args []string `protobuf:"bytes,3,rep,name=args,proto3" json:"args,omitempty"`
|
||
|
// Optional. HCFS URIs of jar files to add to the CLASSPATHs of the
|
||
|
// Spark driver and tasks.
|
||
|
JarFileUris []string `protobuf:"bytes,4,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"`
|
||
|
// Optional. HCFS URIs of files to be copied to the working directory of
|
||
|
// Spark drivers and distributed tasks. Useful for naively parallel tasks.
|
||
|
FileUris []string `protobuf:"bytes,5,rep,name=file_uris,json=fileUris,proto3" json:"file_uris,omitempty"`
|
||
|
// Optional. HCFS URIs of archives to be extracted in the working directory
|
||
|
// of Spark drivers and tasks. Supported file types:
|
||
|
// .jar, .tar, .tar.gz, .tgz, and .zip.
|
||
|
ArchiveUris []string `protobuf:"bytes,6,rep,name=archive_uris,json=archiveUris,proto3" json:"archive_uris,omitempty"`
|
||
|
// Optional. A mapping of property names to values, used to configure Spark.
|
||
|
// Properties that conflict with values set by the Cloud Dataproc API may be
|
||
|
// overwritten. Can include properties set in
|
||
|
// /etc/spark/conf/spark-defaults.conf and classes in user code.
|
||
|
Properties map[string]string `protobuf:"bytes,7,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||
|
// Optional. The runtime log config for job execution.
|
||
|
LoggingConfig *LoggingConfig `protobuf:"bytes,8,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"`
|
||
|
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||
|
XXX_unrecognized []byte `json:"-"`
|
||
|
XXX_sizecache int32 `json:"-"`
|
||
|
}
|
||
|
|
||
|
func (m *SparkJob) Reset() { *m = SparkJob{} }
|
||
|
func (m *SparkJob) String() string { return proto.CompactTextString(m) }
|
||
|
func (*SparkJob) ProtoMessage() {}
|
||
|
func (*SparkJob) Descriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{2}
|
||
|
}
|
||
|
func (m *SparkJob) XXX_Unmarshal(b []byte) error {
|
||
|
return xxx_messageInfo_SparkJob.Unmarshal(m, b)
|
||
|
}
|
||
|
func (m *SparkJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||
|
return xxx_messageInfo_SparkJob.Marshal(b, m, deterministic)
|
||
|
}
|
||
|
func (dst *SparkJob) XXX_Merge(src proto.Message) {
|
||
|
xxx_messageInfo_SparkJob.Merge(dst, src)
|
||
|
}
|
||
|
func (m *SparkJob) XXX_Size() int {
|
||
|
return xxx_messageInfo_SparkJob.Size(m)
|
||
|
}
|
||
|
func (m *SparkJob) XXX_DiscardUnknown() {
|
||
|
xxx_messageInfo_SparkJob.DiscardUnknown(m)
|
||
|
}
|
||
|
|
||
|
var xxx_messageInfo_SparkJob proto.InternalMessageInfo
|
||
|
|
||
|
type isSparkJob_Driver interface {
|
||
|
isSparkJob_Driver()
|
||
|
}
|
||
|
|
||
|
type SparkJob_MainJarFileUri struct {
|
||
|
MainJarFileUri string `protobuf:"bytes,1,opt,name=main_jar_file_uri,json=mainJarFileUri,proto3,oneof"`
|
||
|
}
|
||
|
type SparkJob_MainClass struct {
|
||
|
MainClass string `protobuf:"bytes,2,opt,name=main_class,json=mainClass,proto3,oneof"`
|
||
|
}
|
||
|
|
||
|
func (*SparkJob_MainJarFileUri) isSparkJob_Driver() {}
|
||
|
func (*SparkJob_MainClass) isSparkJob_Driver() {}
|
||
|
|
||
|
func (m *SparkJob) GetDriver() isSparkJob_Driver {
|
||
|
if m != nil {
|
||
|
return m.Driver
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *SparkJob) GetMainJarFileUri() string {
|
||
|
if x, ok := m.GetDriver().(*SparkJob_MainJarFileUri); ok {
|
||
|
return x.MainJarFileUri
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *SparkJob) GetMainClass() string {
|
||
|
if x, ok := m.GetDriver().(*SparkJob_MainClass); ok {
|
||
|
return x.MainClass
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *SparkJob) GetArgs() []string {
|
||
|
if m != nil {
|
||
|
return m.Args
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *SparkJob) GetJarFileUris() []string {
|
||
|
if m != nil {
|
||
|
return m.JarFileUris
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *SparkJob) GetFileUris() []string {
|
||
|
if m != nil {
|
||
|
return m.FileUris
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *SparkJob) GetArchiveUris() []string {
|
||
|
if m != nil {
|
||
|
return m.ArchiveUris
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *SparkJob) GetProperties() map[string]string {
|
||
|
if m != nil {
|
||
|
return m.Properties
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *SparkJob) GetLoggingConfig() *LoggingConfig {
|
||
|
if m != nil {
|
||
|
return m.LoggingConfig
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
// XXX_OneofFuncs is for the internal use of the proto package.
|
||
|
func (*SparkJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
|
||
|
return _SparkJob_OneofMarshaler, _SparkJob_OneofUnmarshaler, _SparkJob_OneofSizer, []interface{}{
|
||
|
(*SparkJob_MainJarFileUri)(nil),
|
||
|
(*SparkJob_MainClass)(nil),
|
||
|
}
|
||
|
}
|
||
|
|
||
|
func _SparkJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
|
||
|
m := msg.(*SparkJob)
|
||
|
// driver
|
||
|
switch x := m.Driver.(type) {
|
||
|
case *SparkJob_MainJarFileUri:
|
||
|
b.EncodeVarint(1<<3 | proto.WireBytes)
|
||
|
b.EncodeStringBytes(x.MainJarFileUri)
|
||
|
case *SparkJob_MainClass:
|
||
|
b.EncodeVarint(2<<3 | proto.WireBytes)
|
||
|
b.EncodeStringBytes(x.MainClass)
|
||
|
case nil:
|
||
|
default:
|
||
|
return fmt.Errorf("SparkJob.Driver has unexpected type %T", x)
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func _SparkJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
|
||
|
m := msg.(*SparkJob)
|
||
|
switch tag {
|
||
|
case 1: // driver.main_jar_file_uri
|
||
|
if wire != proto.WireBytes {
|
||
|
return true, proto.ErrInternalBadWireType
|
||
|
}
|
||
|
x, err := b.DecodeStringBytes()
|
||
|
m.Driver = &SparkJob_MainJarFileUri{x}
|
||
|
return true, err
|
||
|
case 2: // driver.main_class
|
||
|
if wire != proto.WireBytes {
|
||
|
return true, proto.ErrInternalBadWireType
|
||
|
}
|
||
|
x, err := b.DecodeStringBytes()
|
||
|
m.Driver = &SparkJob_MainClass{x}
|
||
|
return true, err
|
||
|
default:
|
||
|
return false, nil
|
||
|
}
|
||
|
}
|
||
|
|
||
|
func _SparkJob_OneofSizer(msg proto.Message) (n int) {
|
||
|
m := msg.(*SparkJob)
|
||
|
// driver
|
||
|
switch x := m.Driver.(type) {
|
||
|
case *SparkJob_MainJarFileUri:
|
||
|
n += 1 // tag and wire
|
||
|
n += proto.SizeVarint(uint64(len(x.MainJarFileUri)))
|
||
|
n += len(x.MainJarFileUri)
|
||
|
case *SparkJob_MainClass:
|
||
|
n += 1 // tag and wire
|
||
|
n += proto.SizeVarint(uint64(len(x.MainClass)))
|
||
|
n += len(x.MainClass)
|
||
|
case nil:
|
||
|
default:
|
||
|
panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
|
||
|
}
|
||
|
return n
|
||
|
}
|
||
|
|
||
|
// A Cloud Dataproc job for running
|
||
|
// [Apache PySpark](https://spark.apache.org/docs/0.9.0/python-programming-guide.html)
|
||
|
// applications on YARN.
|
||
|
type PySparkJob struct {
|
||
|
// Required. The HCFS URI of the main Python file to use as the driver. Must
|
||
|
// be a .py file.
|
||
|
MainPythonFileUri string `protobuf:"bytes,1,opt,name=main_python_file_uri,json=mainPythonFileUri,proto3" json:"main_python_file_uri,omitempty"`
|
||
|
// Optional. The arguments to pass to the driver. Do not include arguments,
|
||
|
// such as `--conf`, that can be set as job properties, since a collision may
|
||
|
// occur that causes an incorrect job submission.
|
||
|
Args []string `protobuf:"bytes,2,rep,name=args,proto3" json:"args,omitempty"`
|
||
|
// Optional. HCFS file URIs of Python files to pass to the PySpark
|
||
|
// framework. Supported file types: .py, .egg, and .zip.
|
||
|
PythonFileUris []string `protobuf:"bytes,3,rep,name=python_file_uris,json=pythonFileUris,proto3" json:"python_file_uris,omitempty"`
|
||
|
// Optional. HCFS URIs of jar files to add to the CLASSPATHs of the
|
||
|
// Python driver and tasks.
|
||
|
JarFileUris []string `protobuf:"bytes,4,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"`
|
||
|
// Optional. HCFS URIs of files to be copied to the working directory of
|
||
|
// Python drivers and distributed tasks. Useful for naively parallel tasks.
|
||
|
FileUris []string `protobuf:"bytes,5,rep,name=file_uris,json=fileUris,proto3" json:"file_uris,omitempty"`
|
||
|
// Optional. HCFS URIs of archives to be extracted in the working directory of
|
||
|
// .jar, .tar, .tar.gz, .tgz, and .zip.
|
||
|
ArchiveUris []string `protobuf:"bytes,6,rep,name=archive_uris,json=archiveUris,proto3" json:"archive_uris,omitempty"`
|
||
|
// Optional. A mapping of property names to values, used to configure PySpark.
|
||
|
// Properties that conflict with values set by the Cloud Dataproc API may be
|
||
|
// overwritten. Can include properties set in
|
||
|
// /etc/spark/conf/spark-defaults.conf and classes in user code.
|
||
|
Properties map[string]string `protobuf:"bytes,7,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||
|
// Optional. The runtime log config for job execution.
|
||
|
LoggingConfig *LoggingConfig `protobuf:"bytes,8,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"`
|
||
|
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||
|
XXX_unrecognized []byte `json:"-"`
|
||
|
XXX_sizecache int32 `json:"-"`
|
||
|
}
|
||
|
|
||
|
func (m *PySparkJob) Reset() { *m = PySparkJob{} }
|
||
|
func (m *PySparkJob) String() string { return proto.CompactTextString(m) }
|
||
|
func (*PySparkJob) ProtoMessage() {}
|
||
|
func (*PySparkJob) Descriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{3}
|
||
|
}
|
||
|
func (m *PySparkJob) XXX_Unmarshal(b []byte) error {
|
||
|
return xxx_messageInfo_PySparkJob.Unmarshal(m, b)
|
||
|
}
|
||
|
func (m *PySparkJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||
|
return xxx_messageInfo_PySparkJob.Marshal(b, m, deterministic)
|
||
|
}
|
||
|
func (dst *PySparkJob) XXX_Merge(src proto.Message) {
|
||
|
xxx_messageInfo_PySparkJob.Merge(dst, src)
|
||
|
}
|
||
|
func (m *PySparkJob) XXX_Size() int {
|
||
|
return xxx_messageInfo_PySparkJob.Size(m)
|
||
|
}
|
||
|
func (m *PySparkJob) XXX_DiscardUnknown() {
|
||
|
xxx_messageInfo_PySparkJob.DiscardUnknown(m)
|
||
|
}
|
||
|
|
||
|
var xxx_messageInfo_PySparkJob proto.InternalMessageInfo
|
||
|
|
||
|
func (m *PySparkJob) GetMainPythonFileUri() string {
|
||
|
if m != nil {
|
||
|
return m.MainPythonFileUri
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *PySparkJob) GetArgs() []string {
|
||
|
if m != nil {
|
||
|
return m.Args
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *PySparkJob) GetPythonFileUris() []string {
|
||
|
if m != nil {
|
||
|
return m.PythonFileUris
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *PySparkJob) GetJarFileUris() []string {
|
||
|
if m != nil {
|
||
|
return m.JarFileUris
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *PySparkJob) GetFileUris() []string {
|
||
|
if m != nil {
|
||
|
return m.FileUris
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *PySparkJob) GetArchiveUris() []string {
|
||
|
if m != nil {
|
||
|
return m.ArchiveUris
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *PySparkJob) GetProperties() map[string]string {
|
||
|
if m != nil {
|
||
|
return m.Properties
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *PySparkJob) GetLoggingConfig() *LoggingConfig {
|
||
|
if m != nil {
|
||
|
return m.LoggingConfig
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
// A list of queries to run on a cluster.
|
||
|
type QueryList struct {
|
||
|
// Required. The queries to execute. You do not need to terminate a query
|
||
|
// with a semicolon. Multiple queries can be specified in one string
|
||
|
// by separating each with a semicolon. Here is an example of an Cloud
|
||
|
// Dataproc API snippet that uses a QueryList to specify a HiveJob:
|
||
|
//
|
||
|
// "hiveJob": {
|
||
|
// "queryList": {
|
||
|
// "queries": [
|
||
|
// "query1",
|
||
|
// "query2",
|
||
|
// "query3;query4",
|
||
|
// ]
|
||
|
// }
|
||
|
// }
|
||
|
Queries []string `protobuf:"bytes,1,rep,name=queries,proto3" json:"queries,omitempty"`
|
||
|
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||
|
XXX_unrecognized []byte `json:"-"`
|
||
|
XXX_sizecache int32 `json:"-"`
|
||
|
}
|
||
|
|
||
|
func (m *QueryList) Reset() { *m = QueryList{} }
|
||
|
func (m *QueryList) String() string { return proto.CompactTextString(m) }
|
||
|
func (*QueryList) ProtoMessage() {}
|
||
|
func (*QueryList) Descriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{4}
|
||
|
}
|
||
|
func (m *QueryList) XXX_Unmarshal(b []byte) error {
|
||
|
return xxx_messageInfo_QueryList.Unmarshal(m, b)
|
||
|
}
|
||
|
func (m *QueryList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||
|
return xxx_messageInfo_QueryList.Marshal(b, m, deterministic)
|
||
|
}
|
||
|
func (dst *QueryList) XXX_Merge(src proto.Message) {
|
||
|
xxx_messageInfo_QueryList.Merge(dst, src)
|
||
|
}
|
||
|
func (m *QueryList) XXX_Size() int {
|
||
|
return xxx_messageInfo_QueryList.Size(m)
|
||
|
}
|
||
|
func (m *QueryList) XXX_DiscardUnknown() {
|
||
|
xxx_messageInfo_QueryList.DiscardUnknown(m)
|
||
|
}
|
||
|
|
||
|
var xxx_messageInfo_QueryList proto.InternalMessageInfo
|
||
|
|
||
|
func (m *QueryList) GetQueries() []string {
|
||
|
if m != nil {
|
||
|
return m.Queries
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
// A Cloud Dataproc job for running [Apache Hive](https://hive.apache.org/)
|
||
|
// queries on YARN.
|
||
|
type HiveJob struct {
|
||
|
// Required. The sequence of Hive queries to execute, specified as either
|
||
|
// an HCFS file URI or a list of queries.
|
||
|
//
|
||
|
// Types that are valid to be assigned to Queries:
|
||
|
// *HiveJob_QueryFileUri
|
||
|
// *HiveJob_QueryList
|
||
|
Queries isHiveJob_Queries `protobuf_oneof:"queries"`
|
||
|
// Optional. Whether to continue executing queries if a query fails.
|
||
|
// The default value is `false`. Setting to `true` can be useful when executing
|
||
|
// independent parallel queries.
|
||
|
ContinueOnFailure bool `protobuf:"varint,3,opt,name=continue_on_failure,json=continueOnFailure,proto3" json:"continue_on_failure,omitempty"`
|
||
|
// Optional. Mapping of query variable names to values (equivalent to the
|
||
|
// Hive command: `SET name="value";`).
|
||
|
ScriptVariables map[string]string `protobuf:"bytes,4,rep,name=script_variables,json=scriptVariables,proto3" json:"script_variables,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||
|
// Optional. A mapping of property names and values, used to configure Hive.
|
||
|
// Properties that conflict with values set by the Cloud Dataproc API may be
|
||
|
// overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml,
|
||
|
// /etc/hive/conf/hive-site.xml, and classes in user code.
|
||
|
Properties map[string]string `protobuf:"bytes,5,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||
|
// Optional. HCFS URIs of jar files to add to the CLASSPATH of the
|
||
|
// Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes
|
||
|
// and UDFs.
|
||
|
JarFileUris []string `protobuf:"bytes,6,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"`
|
||
|
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||
|
XXX_unrecognized []byte `json:"-"`
|
||
|
XXX_sizecache int32 `json:"-"`
|
||
|
}
|
||
|
|
||
|
func (m *HiveJob) Reset() { *m = HiveJob{} }
|
||
|
func (m *HiveJob) String() string { return proto.CompactTextString(m) }
|
||
|
func (*HiveJob) ProtoMessage() {}
|
||
|
func (*HiveJob) Descriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{5}
|
||
|
}
|
||
|
func (m *HiveJob) XXX_Unmarshal(b []byte) error {
|
||
|
return xxx_messageInfo_HiveJob.Unmarshal(m, b)
|
||
|
}
|
||
|
func (m *HiveJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||
|
return xxx_messageInfo_HiveJob.Marshal(b, m, deterministic)
|
||
|
}
|
||
|
func (dst *HiveJob) XXX_Merge(src proto.Message) {
|
||
|
xxx_messageInfo_HiveJob.Merge(dst, src)
|
||
|
}
|
||
|
func (m *HiveJob) XXX_Size() int {
|
||
|
return xxx_messageInfo_HiveJob.Size(m)
|
||
|
}
|
||
|
func (m *HiveJob) XXX_DiscardUnknown() {
|
||
|
xxx_messageInfo_HiveJob.DiscardUnknown(m)
|
||
|
}
|
||
|
|
||
|
var xxx_messageInfo_HiveJob proto.InternalMessageInfo
|
||
|
|
||
|
type isHiveJob_Queries interface {
|
||
|
isHiveJob_Queries()
|
||
|
}
|
||
|
|
||
|
type HiveJob_QueryFileUri struct {
|
||
|
QueryFileUri string `protobuf:"bytes,1,opt,name=query_file_uri,json=queryFileUri,proto3,oneof"`
|
||
|
}
|
||
|
type HiveJob_QueryList struct {
|
||
|
QueryList *QueryList `protobuf:"bytes,2,opt,name=query_list,json=queryList,proto3,oneof"`
|
||
|
}
|
||
|
|
||
|
func (*HiveJob_QueryFileUri) isHiveJob_Queries() {}
|
||
|
func (*HiveJob_QueryList) isHiveJob_Queries() {}
|
||
|
|
||
|
func (m *HiveJob) GetQueries() isHiveJob_Queries {
|
||
|
if m != nil {
|
||
|
return m.Queries
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *HiveJob) GetQueryFileUri() string {
|
||
|
if x, ok := m.GetQueries().(*HiveJob_QueryFileUri); ok {
|
||
|
return x.QueryFileUri
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *HiveJob) GetQueryList() *QueryList {
|
||
|
if x, ok := m.GetQueries().(*HiveJob_QueryList); ok {
|
||
|
return x.QueryList
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *HiveJob) GetContinueOnFailure() bool {
|
||
|
if m != nil {
|
||
|
return m.ContinueOnFailure
|
||
|
}
|
||
|
return false
|
||
|
}
|
||
|
|
||
|
func (m *HiveJob) GetScriptVariables() map[string]string {
|
||
|
if m != nil {
|
||
|
return m.ScriptVariables
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *HiveJob) GetProperties() map[string]string {
|
||
|
if m != nil {
|
||
|
return m.Properties
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *HiveJob) GetJarFileUris() []string {
|
||
|
if m != nil {
|
||
|
return m.JarFileUris
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
// XXX_OneofFuncs is for the internal use of the proto package.
|
||
|
func (*HiveJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
|
||
|
return _HiveJob_OneofMarshaler, _HiveJob_OneofUnmarshaler, _HiveJob_OneofSizer, []interface{}{
|
||
|
(*HiveJob_QueryFileUri)(nil),
|
||
|
(*HiveJob_QueryList)(nil),
|
||
|
}
|
||
|
}
|
||
|
|
||
|
func _HiveJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
|
||
|
m := msg.(*HiveJob)
|
||
|
// queries
|
||
|
switch x := m.Queries.(type) {
|
||
|
case *HiveJob_QueryFileUri:
|
||
|
b.EncodeVarint(1<<3 | proto.WireBytes)
|
||
|
b.EncodeStringBytes(x.QueryFileUri)
|
||
|
case *HiveJob_QueryList:
|
||
|
b.EncodeVarint(2<<3 | proto.WireBytes)
|
||
|
if err := b.EncodeMessage(x.QueryList); err != nil {
|
||
|
return err
|
||
|
}
|
||
|
case nil:
|
||
|
default:
|
||
|
return fmt.Errorf("HiveJob.Queries has unexpected type %T", x)
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func _HiveJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
|
||
|
m := msg.(*HiveJob)
|
||
|
switch tag {
|
||
|
case 1: // queries.query_file_uri
|
||
|
if wire != proto.WireBytes {
|
||
|
return true, proto.ErrInternalBadWireType
|
||
|
}
|
||
|
x, err := b.DecodeStringBytes()
|
||
|
m.Queries = &HiveJob_QueryFileUri{x}
|
||
|
return true, err
|
||
|
case 2: // queries.query_list
|
||
|
if wire != proto.WireBytes {
|
||
|
return true, proto.ErrInternalBadWireType
|
||
|
}
|
||
|
msg := new(QueryList)
|
||
|
err := b.DecodeMessage(msg)
|
||
|
m.Queries = &HiveJob_QueryList{msg}
|
||
|
return true, err
|
||
|
default:
|
||
|
return false, nil
|
||
|
}
|
||
|
}
|
||
|
|
||
|
func _HiveJob_OneofSizer(msg proto.Message) (n int) {
|
||
|
m := msg.(*HiveJob)
|
||
|
// queries
|
||
|
switch x := m.Queries.(type) {
|
||
|
case *HiveJob_QueryFileUri:
|
||
|
n += 1 // tag and wire
|
||
|
n += proto.SizeVarint(uint64(len(x.QueryFileUri)))
|
||
|
n += len(x.QueryFileUri)
|
||
|
case *HiveJob_QueryList:
|
||
|
s := proto.Size(x.QueryList)
|
||
|
n += 1 // tag and wire
|
||
|
n += proto.SizeVarint(uint64(s))
|
||
|
n += s
|
||
|
case nil:
|
||
|
default:
|
||
|
panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
|
||
|
}
|
||
|
return n
|
||
|
}
|
||
|
|
||
|
// A Cloud Dataproc job for running [Apache Spark SQL](http://spark.apache.org/sql/)
|
||
|
// queries.
|
||
|
type SparkSqlJob struct {
|
||
|
// Required. The sequence of Spark SQL queries to execute, specified as
|
||
|
// either an HCFS file URI or as a list of queries.
|
||
|
//
|
||
|
// Types that are valid to be assigned to Queries:
|
||
|
// *SparkSqlJob_QueryFileUri
|
||
|
// *SparkSqlJob_QueryList
|
||
|
Queries isSparkSqlJob_Queries `protobuf_oneof:"queries"`
|
||
|
// Optional. Mapping of query variable names to values (equivalent to the
|
||
|
// Spark SQL command: SET `name="value";`).
|
||
|
ScriptVariables map[string]string `protobuf:"bytes,3,rep,name=script_variables,json=scriptVariables,proto3" json:"script_variables,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||
|
// Optional. A mapping of property names to values, used to configure
|
||
|
// Spark SQL's SparkConf. Properties that conflict with values set by the
|
||
|
// Cloud Dataproc API may be overwritten.
|
||
|
Properties map[string]string `protobuf:"bytes,4,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||
|
// Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
|
||
|
JarFileUris []string `protobuf:"bytes,56,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"`
|
||
|
// Optional. The runtime log config for job execution.
|
||
|
LoggingConfig *LoggingConfig `protobuf:"bytes,6,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"`
|
||
|
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||
|
XXX_unrecognized []byte `json:"-"`
|
||
|
XXX_sizecache int32 `json:"-"`
|
||
|
}
|
||
|
|
||
|
func (m *SparkSqlJob) Reset() { *m = SparkSqlJob{} }
|
||
|
func (m *SparkSqlJob) String() string { return proto.CompactTextString(m) }
|
||
|
func (*SparkSqlJob) ProtoMessage() {}
|
||
|
func (*SparkSqlJob) Descriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{6}
|
||
|
}
|
||
|
func (m *SparkSqlJob) XXX_Unmarshal(b []byte) error {
|
||
|
return xxx_messageInfo_SparkSqlJob.Unmarshal(m, b)
|
||
|
}
|
||
|
func (m *SparkSqlJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||
|
return xxx_messageInfo_SparkSqlJob.Marshal(b, m, deterministic)
|
||
|
}
|
||
|
func (dst *SparkSqlJob) XXX_Merge(src proto.Message) {
|
||
|
xxx_messageInfo_SparkSqlJob.Merge(dst, src)
|
||
|
}
|
||
|
func (m *SparkSqlJob) XXX_Size() int {
|
||
|
return xxx_messageInfo_SparkSqlJob.Size(m)
|
||
|
}
|
||
|
func (m *SparkSqlJob) XXX_DiscardUnknown() {
|
||
|
xxx_messageInfo_SparkSqlJob.DiscardUnknown(m)
|
||
|
}
|
||
|
|
||
|
var xxx_messageInfo_SparkSqlJob proto.InternalMessageInfo
|
||
|
|
||
|
type isSparkSqlJob_Queries interface {
|
||
|
isSparkSqlJob_Queries()
|
||
|
}
|
||
|
|
||
|
type SparkSqlJob_QueryFileUri struct {
|
||
|
QueryFileUri string `protobuf:"bytes,1,opt,name=query_file_uri,json=queryFileUri,proto3,oneof"`
|
||
|
}
|
||
|
type SparkSqlJob_QueryList struct {
|
||
|
QueryList *QueryList `protobuf:"bytes,2,opt,name=query_list,json=queryList,proto3,oneof"`
|
||
|
}
|
||
|
|
||
|
func (*SparkSqlJob_QueryFileUri) isSparkSqlJob_Queries() {}
|
||
|
func (*SparkSqlJob_QueryList) isSparkSqlJob_Queries() {}
|
||
|
|
||
|
func (m *SparkSqlJob) GetQueries() isSparkSqlJob_Queries {
|
||
|
if m != nil {
|
||
|
return m.Queries
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *SparkSqlJob) GetQueryFileUri() string {
|
||
|
if x, ok := m.GetQueries().(*SparkSqlJob_QueryFileUri); ok {
|
||
|
return x.QueryFileUri
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *SparkSqlJob) GetQueryList() *QueryList {
|
||
|
if x, ok := m.GetQueries().(*SparkSqlJob_QueryList); ok {
|
||
|
return x.QueryList
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *SparkSqlJob) GetScriptVariables() map[string]string {
|
||
|
if m != nil {
|
||
|
return m.ScriptVariables
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *SparkSqlJob) GetProperties() map[string]string {
|
||
|
if m != nil {
|
||
|
return m.Properties
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *SparkSqlJob) GetJarFileUris() []string {
|
||
|
if m != nil {
|
||
|
return m.JarFileUris
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *SparkSqlJob) GetLoggingConfig() *LoggingConfig {
|
||
|
if m != nil {
|
||
|
return m.LoggingConfig
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
// XXX_OneofFuncs is for the internal use of the proto package.
|
||
|
func (*SparkSqlJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
|
||
|
return _SparkSqlJob_OneofMarshaler, _SparkSqlJob_OneofUnmarshaler, _SparkSqlJob_OneofSizer, []interface{}{
|
||
|
(*SparkSqlJob_QueryFileUri)(nil),
|
||
|
(*SparkSqlJob_QueryList)(nil),
|
||
|
}
|
||
|
}
|
||
|
|
||
|
func _SparkSqlJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
|
||
|
m := msg.(*SparkSqlJob)
|
||
|
// queries
|
||
|
switch x := m.Queries.(type) {
|
||
|
case *SparkSqlJob_QueryFileUri:
|
||
|
b.EncodeVarint(1<<3 | proto.WireBytes)
|
||
|
b.EncodeStringBytes(x.QueryFileUri)
|
||
|
case *SparkSqlJob_QueryList:
|
||
|
b.EncodeVarint(2<<3 | proto.WireBytes)
|
||
|
if err := b.EncodeMessage(x.QueryList); err != nil {
|
||
|
return err
|
||
|
}
|
||
|
case nil:
|
||
|
default:
|
||
|
return fmt.Errorf("SparkSqlJob.Queries has unexpected type %T", x)
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func _SparkSqlJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
|
||
|
m := msg.(*SparkSqlJob)
|
||
|
switch tag {
|
||
|
case 1: // queries.query_file_uri
|
||
|
if wire != proto.WireBytes {
|
||
|
return true, proto.ErrInternalBadWireType
|
||
|
}
|
||
|
x, err := b.DecodeStringBytes()
|
||
|
m.Queries = &SparkSqlJob_QueryFileUri{x}
|
||
|
return true, err
|
||
|
case 2: // queries.query_list
|
||
|
if wire != proto.WireBytes {
|
||
|
return true, proto.ErrInternalBadWireType
|
||
|
}
|
||
|
msg := new(QueryList)
|
||
|
err := b.DecodeMessage(msg)
|
||
|
m.Queries = &SparkSqlJob_QueryList{msg}
|
||
|
return true, err
|
||
|
default:
|
||
|
return false, nil
|
||
|
}
|
||
|
}
|
||
|
|
||
|
func _SparkSqlJob_OneofSizer(msg proto.Message) (n int) {
|
||
|
m := msg.(*SparkSqlJob)
|
||
|
// queries
|
||
|
switch x := m.Queries.(type) {
|
||
|
case *SparkSqlJob_QueryFileUri:
|
||
|
n += 1 // tag and wire
|
||
|
n += proto.SizeVarint(uint64(len(x.QueryFileUri)))
|
||
|
n += len(x.QueryFileUri)
|
||
|
case *SparkSqlJob_QueryList:
|
||
|
s := proto.Size(x.QueryList)
|
||
|
n += 1 // tag and wire
|
||
|
n += proto.SizeVarint(uint64(s))
|
||
|
n += s
|
||
|
case nil:
|
||
|
default:
|
||
|
panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
|
||
|
}
|
||
|
return n
|
||
|
}
|
||
|
|
||
|
// A Cloud Dataproc job for running [Apache Pig](https://pig.apache.org/)
|
||
|
// queries on YARN.
|
||
|
type PigJob struct {
|
||
|
// Required. The sequence of Pig queries to execute, specified as an HCFS
|
||
|
// file URI or a list of queries.
|
||
|
//
|
||
|
// Types that are valid to be assigned to Queries:
|
||
|
// *PigJob_QueryFileUri
|
||
|
// *PigJob_QueryList
|
||
|
Queries isPigJob_Queries `protobuf_oneof:"queries"`
|
||
|
// Optional. Whether to continue executing queries if a query fails.
|
||
|
// The default value is `false`. Setting to `true` can be useful when executing
|
||
|
// independent parallel queries.
|
||
|
ContinueOnFailure bool `protobuf:"varint,3,opt,name=continue_on_failure,json=continueOnFailure,proto3" json:"continue_on_failure,omitempty"`
|
||
|
// Optional. Mapping of query variable names to values (equivalent to the Pig
|
||
|
// command: `name=[value]`).
|
||
|
ScriptVariables map[string]string `protobuf:"bytes,4,rep,name=script_variables,json=scriptVariables,proto3" json:"script_variables,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||
|
// Optional. A mapping of property names to values, used to configure Pig.
|
||
|
// Properties that conflict with values set by the Cloud Dataproc API may be
|
||
|
// overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml,
|
||
|
// /etc/pig/conf/pig.properties, and classes in user code.
|
||
|
Properties map[string]string `protobuf:"bytes,5,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||
|
// Optional. HCFS URIs of jar files to add to the CLASSPATH of
|
||
|
// the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.
|
||
|
JarFileUris []string `protobuf:"bytes,6,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"`
|
||
|
// Optional. The runtime log config for job execution.
|
||
|
LoggingConfig *LoggingConfig `protobuf:"bytes,7,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"`
|
||
|
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||
|
XXX_unrecognized []byte `json:"-"`
|
||
|
XXX_sizecache int32 `json:"-"`
|
||
|
}
|
||
|
|
||
|
func (m *PigJob) Reset() { *m = PigJob{} }
|
||
|
func (m *PigJob) String() string { return proto.CompactTextString(m) }
|
||
|
func (*PigJob) ProtoMessage() {}
|
||
|
func (*PigJob) Descriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{7}
|
||
|
}
|
||
|
func (m *PigJob) XXX_Unmarshal(b []byte) error {
|
||
|
return xxx_messageInfo_PigJob.Unmarshal(m, b)
|
||
|
}
|
||
|
func (m *PigJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||
|
return xxx_messageInfo_PigJob.Marshal(b, m, deterministic)
|
||
|
}
|
||
|
func (dst *PigJob) XXX_Merge(src proto.Message) {
|
||
|
xxx_messageInfo_PigJob.Merge(dst, src)
|
||
|
}
|
||
|
func (m *PigJob) XXX_Size() int {
|
||
|
return xxx_messageInfo_PigJob.Size(m)
|
||
|
}
|
||
|
func (m *PigJob) XXX_DiscardUnknown() {
|
||
|
xxx_messageInfo_PigJob.DiscardUnknown(m)
|
||
|
}
|
||
|
|
||
|
var xxx_messageInfo_PigJob proto.InternalMessageInfo
|
||
|
|
||
|
type isPigJob_Queries interface {
|
||
|
isPigJob_Queries()
|
||
|
}
|
||
|
|
||
|
type PigJob_QueryFileUri struct {
|
||
|
QueryFileUri string `protobuf:"bytes,1,opt,name=query_file_uri,json=queryFileUri,proto3,oneof"`
|
||
|
}
|
||
|
type PigJob_QueryList struct {
|
||
|
QueryList *QueryList `protobuf:"bytes,2,opt,name=query_list,json=queryList,proto3,oneof"`
|
||
|
}
|
||
|
|
||
|
func (*PigJob_QueryFileUri) isPigJob_Queries() {}
|
||
|
func (*PigJob_QueryList) isPigJob_Queries() {}
|
||
|
|
||
|
func (m *PigJob) GetQueries() isPigJob_Queries {
|
||
|
if m != nil {
|
||
|
return m.Queries
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *PigJob) GetQueryFileUri() string {
|
||
|
if x, ok := m.GetQueries().(*PigJob_QueryFileUri); ok {
|
||
|
return x.QueryFileUri
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *PigJob) GetQueryList() *QueryList {
|
||
|
if x, ok := m.GetQueries().(*PigJob_QueryList); ok {
|
||
|
return x.QueryList
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *PigJob) GetContinueOnFailure() bool {
|
||
|
if m != nil {
|
||
|
return m.ContinueOnFailure
|
||
|
}
|
||
|
return false
|
||
|
}
|
||
|
|
||
|
func (m *PigJob) GetScriptVariables() map[string]string {
|
||
|
if m != nil {
|
||
|
return m.ScriptVariables
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *PigJob) GetProperties() map[string]string {
|
||
|
if m != nil {
|
||
|
return m.Properties
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *PigJob) GetJarFileUris() []string {
|
||
|
if m != nil {
|
||
|
return m.JarFileUris
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *PigJob) GetLoggingConfig() *LoggingConfig {
|
||
|
if m != nil {
|
||
|
return m.LoggingConfig
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
// XXX_OneofFuncs is for the internal use of the proto package.
|
||
|
func (*PigJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
|
||
|
return _PigJob_OneofMarshaler, _PigJob_OneofUnmarshaler, _PigJob_OneofSizer, []interface{}{
|
||
|
(*PigJob_QueryFileUri)(nil),
|
||
|
(*PigJob_QueryList)(nil),
|
||
|
}
|
||
|
}
|
||
|
|
||
|
func _PigJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
|
||
|
m := msg.(*PigJob)
|
||
|
// queries
|
||
|
switch x := m.Queries.(type) {
|
||
|
case *PigJob_QueryFileUri:
|
||
|
b.EncodeVarint(1<<3 | proto.WireBytes)
|
||
|
b.EncodeStringBytes(x.QueryFileUri)
|
||
|
case *PigJob_QueryList:
|
||
|
b.EncodeVarint(2<<3 | proto.WireBytes)
|
||
|
if err := b.EncodeMessage(x.QueryList); err != nil {
|
||
|
return err
|
||
|
}
|
||
|
case nil:
|
||
|
default:
|
||
|
return fmt.Errorf("PigJob.Queries has unexpected type %T", x)
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func _PigJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
|
||
|
m := msg.(*PigJob)
|
||
|
switch tag {
|
||
|
case 1: // queries.query_file_uri
|
||
|
if wire != proto.WireBytes {
|
||
|
return true, proto.ErrInternalBadWireType
|
||
|
}
|
||
|
x, err := b.DecodeStringBytes()
|
||
|
m.Queries = &PigJob_QueryFileUri{x}
|
||
|
return true, err
|
||
|
case 2: // queries.query_list
|
||
|
if wire != proto.WireBytes {
|
||
|
return true, proto.ErrInternalBadWireType
|
||
|
}
|
||
|
msg := new(QueryList)
|
||
|
err := b.DecodeMessage(msg)
|
||
|
m.Queries = &PigJob_QueryList{msg}
|
||
|
return true, err
|
||
|
default:
|
||
|
return false, nil
|
||
|
}
|
||
|
}
|
||
|
|
||
|
func _PigJob_OneofSizer(msg proto.Message) (n int) {
|
||
|
m := msg.(*PigJob)
|
||
|
// queries
|
||
|
switch x := m.Queries.(type) {
|
||
|
case *PigJob_QueryFileUri:
|
||
|
n += 1 // tag and wire
|
||
|
n += proto.SizeVarint(uint64(len(x.QueryFileUri)))
|
||
|
n += len(x.QueryFileUri)
|
||
|
case *PigJob_QueryList:
|
||
|
s := proto.Size(x.QueryList)
|
||
|
n += 1 // tag and wire
|
||
|
n += proto.SizeVarint(uint64(s))
|
||
|
n += s
|
||
|
case nil:
|
||
|
default:
|
||
|
panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
|
||
|
}
|
||
|
return n
|
||
|
}
|
||
|
|
||
|
// Cloud Dataproc job config.
|
||
|
type JobPlacement struct {
|
||
|
// Required. The name of the cluster where the job will be submitted.
|
||
|
ClusterName string `protobuf:"bytes,1,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"`
|
||
|
// Output-only. A cluster UUID generated by the Cloud Dataproc service when
|
||
|
// the job is submitted.
|
||
|
ClusterUuid string `protobuf:"bytes,2,opt,name=cluster_uuid,json=clusterUuid,proto3" json:"cluster_uuid,omitempty"`
|
||
|
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||
|
XXX_unrecognized []byte `json:"-"`
|
||
|
XXX_sizecache int32 `json:"-"`
|
||
|
}
|
||
|
|
||
|
func (m *JobPlacement) Reset() { *m = JobPlacement{} }
|
||
|
func (m *JobPlacement) String() string { return proto.CompactTextString(m) }
|
||
|
func (*JobPlacement) ProtoMessage() {}
|
||
|
func (*JobPlacement) Descriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{8}
|
||
|
}
|
||
|
func (m *JobPlacement) XXX_Unmarshal(b []byte) error {
|
||
|
return xxx_messageInfo_JobPlacement.Unmarshal(m, b)
|
||
|
}
|
||
|
func (m *JobPlacement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||
|
return xxx_messageInfo_JobPlacement.Marshal(b, m, deterministic)
|
||
|
}
|
||
|
func (dst *JobPlacement) XXX_Merge(src proto.Message) {
|
||
|
xxx_messageInfo_JobPlacement.Merge(dst, src)
|
||
|
}
|
||
|
func (m *JobPlacement) XXX_Size() int {
|
||
|
return xxx_messageInfo_JobPlacement.Size(m)
|
||
|
}
|
||
|
func (m *JobPlacement) XXX_DiscardUnknown() {
|
||
|
xxx_messageInfo_JobPlacement.DiscardUnknown(m)
|
||
|
}
|
||
|
|
||
|
var xxx_messageInfo_JobPlacement proto.InternalMessageInfo
|
||
|
|
||
|
func (m *JobPlacement) GetClusterName() string {
|
||
|
if m != nil {
|
||
|
return m.ClusterName
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *JobPlacement) GetClusterUuid() string {
|
||
|
if m != nil {
|
||
|
return m.ClusterUuid
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
// Cloud Dataproc job status.
|
||
|
type JobStatus struct {
|
||
|
// Output-only. A state message specifying the overall job state.
|
||
|
State JobStatus_State `protobuf:"varint,1,opt,name=state,proto3,enum=google.cloud.dataproc.v1.JobStatus_State" json:"state,omitempty"`
|
||
|
// Output-only. Optional job state details, such as an error
|
||
|
// description if the state is <code>ERROR</code>.
|
||
|
Details string `protobuf:"bytes,2,opt,name=details,proto3" json:"details,omitempty"`
|
||
|
// Output-only. The time when this state was entered.
|
||
|
StateStartTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=state_start_time,json=stateStartTime,proto3" json:"state_start_time,omitempty"`
|
||
|
// Output-only. Additional state information, which includes
|
||
|
// status reported by the agent.
|
||
|
Substate JobStatus_Substate `protobuf:"varint,7,opt,name=substate,proto3,enum=google.cloud.dataproc.v1.JobStatus_Substate" json:"substate,omitempty"`
|
||
|
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||
|
XXX_unrecognized []byte `json:"-"`
|
||
|
XXX_sizecache int32 `json:"-"`
|
||
|
}
|
||
|
|
||
|
func (m *JobStatus) Reset() { *m = JobStatus{} }
|
||
|
func (m *JobStatus) String() string { return proto.CompactTextString(m) }
|
||
|
func (*JobStatus) ProtoMessage() {}
|
||
|
func (*JobStatus) Descriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{9}
|
||
|
}
|
||
|
func (m *JobStatus) XXX_Unmarshal(b []byte) error {
|
||
|
return xxx_messageInfo_JobStatus.Unmarshal(m, b)
|
||
|
}
|
||
|
func (m *JobStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||
|
return xxx_messageInfo_JobStatus.Marshal(b, m, deterministic)
|
||
|
}
|
||
|
func (dst *JobStatus) XXX_Merge(src proto.Message) {
|
||
|
xxx_messageInfo_JobStatus.Merge(dst, src)
|
||
|
}
|
||
|
func (m *JobStatus) XXX_Size() int {
|
||
|
return xxx_messageInfo_JobStatus.Size(m)
|
||
|
}
|
||
|
func (m *JobStatus) XXX_DiscardUnknown() {
|
||
|
xxx_messageInfo_JobStatus.DiscardUnknown(m)
|
||
|
}
|
||
|
|
||
|
var xxx_messageInfo_JobStatus proto.InternalMessageInfo
|
||
|
|
||
|
func (m *JobStatus) GetState() JobStatus_State {
|
||
|
if m != nil {
|
||
|
return m.State
|
||
|
}
|
||
|
return JobStatus_STATE_UNSPECIFIED
|
||
|
}
|
||
|
|
||
|
func (m *JobStatus) GetDetails() string {
|
||
|
if m != nil {
|
||
|
return m.Details
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *JobStatus) GetStateStartTime() *timestamp.Timestamp {
|
||
|
if m != nil {
|
||
|
return m.StateStartTime
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *JobStatus) GetSubstate() JobStatus_Substate {
|
||
|
if m != nil {
|
||
|
return m.Substate
|
||
|
}
|
||
|
return JobStatus_UNSPECIFIED
|
||
|
}
|
||
|
|
||
|
// Encapsulates the full scoping used to reference a job.
|
||
|
type JobReference struct {
|
||
|
// Required. The ID of the Google Cloud Platform project that the job
|
||
|
// belongs to.
|
||
|
ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
|
||
|
// Optional. The job ID, which must be unique within the project. The job ID
|
||
|
// is generated by the server upon job submission or provided by the user as a
|
||
|
// means to perform retries without creating duplicate jobs. The ID must
|
||
|
// contain only letters (a-z, A-Z), numbers (0-9), underscores (_), or
|
||
|
// hyphens (-). The maximum length is 100 characters.
|
||
|
JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"`
|
||
|
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||
|
XXX_unrecognized []byte `json:"-"`
|
||
|
XXX_sizecache int32 `json:"-"`
|
||
|
}
|
||
|
|
||
|
func (m *JobReference) Reset() { *m = JobReference{} }
|
||
|
func (m *JobReference) String() string { return proto.CompactTextString(m) }
|
||
|
func (*JobReference) ProtoMessage() {}
|
||
|
func (*JobReference) Descriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{10}
|
||
|
}
|
||
|
func (m *JobReference) XXX_Unmarshal(b []byte) error {
|
||
|
return xxx_messageInfo_JobReference.Unmarshal(m, b)
|
||
|
}
|
||
|
func (m *JobReference) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||
|
return xxx_messageInfo_JobReference.Marshal(b, m, deterministic)
|
||
|
}
|
||
|
func (dst *JobReference) XXX_Merge(src proto.Message) {
|
||
|
xxx_messageInfo_JobReference.Merge(dst, src)
|
||
|
}
|
||
|
func (m *JobReference) XXX_Size() int {
|
||
|
return xxx_messageInfo_JobReference.Size(m)
|
||
|
}
|
||
|
func (m *JobReference) XXX_DiscardUnknown() {
|
||
|
xxx_messageInfo_JobReference.DiscardUnknown(m)
|
||
|
}
|
||
|
|
||
|
var xxx_messageInfo_JobReference proto.InternalMessageInfo
|
||
|
|
||
|
func (m *JobReference) GetProjectId() string {
|
||
|
if m != nil {
|
||
|
return m.ProjectId
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *JobReference) GetJobId() string {
|
||
|
if m != nil {
|
||
|
return m.JobId
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
// A YARN application created by a job. Application information is a subset of
|
||
|
// <code>org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto</code>.
|
||
|
//
|
||
|
// **Beta Feature**: This report is available for testing purposes only. It may
|
||
|
// be changed before final release.
|
||
|
type YarnApplication struct {
|
||
|
// Required. The application name.
|
||
|
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
|
||
|
// Required. The application state.
|
||
|
State YarnApplication_State `protobuf:"varint,2,opt,name=state,proto3,enum=google.cloud.dataproc.v1.YarnApplication_State" json:"state,omitempty"`
|
||
|
// Required. The numerical progress of the application, from 1 to 100.
|
||
|
Progress float32 `protobuf:"fixed32,3,opt,name=progress,proto3" json:"progress,omitempty"`
|
||
|
// Optional. The HTTP URL of the ApplicationMaster, HistoryServer, or
|
||
|
// TimelineServer that provides application-specific information. The URL uses
|
||
|
// the internal hostname, and requires a proxy server for resolution and,
|
||
|
// possibly, access.
|
||
|
TrackingUrl string `protobuf:"bytes,4,opt,name=tracking_url,json=trackingUrl,proto3" json:"tracking_url,omitempty"`
|
||
|
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||
|
XXX_unrecognized []byte `json:"-"`
|
||
|
XXX_sizecache int32 `json:"-"`
|
||
|
}
|
||
|
|
||
|
func (m *YarnApplication) Reset() { *m = YarnApplication{} }
|
||
|
func (m *YarnApplication) String() string { return proto.CompactTextString(m) }
|
||
|
func (*YarnApplication) ProtoMessage() {}
|
||
|
func (*YarnApplication) Descriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{11}
|
||
|
}
|
||
|
func (m *YarnApplication) XXX_Unmarshal(b []byte) error {
|
||
|
return xxx_messageInfo_YarnApplication.Unmarshal(m, b)
|
||
|
}
|
||
|
func (m *YarnApplication) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||
|
return xxx_messageInfo_YarnApplication.Marshal(b, m, deterministic)
|
||
|
}
|
||
|
func (dst *YarnApplication) XXX_Merge(src proto.Message) {
|
||
|
xxx_messageInfo_YarnApplication.Merge(dst, src)
|
||
|
}
|
||
|
func (m *YarnApplication) XXX_Size() int {
|
||
|
return xxx_messageInfo_YarnApplication.Size(m)
|
||
|
}
|
||
|
func (m *YarnApplication) XXX_DiscardUnknown() {
|
||
|
xxx_messageInfo_YarnApplication.DiscardUnknown(m)
|
||
|
}
|
||
|
|
||
|
var xxx_messageInfo_YarnApplication proto.InternalMessageInfo
|
||
|
|
||
|
func (m *YarnApplication) GetName() string {
|
||
|
if m != nil {
|
||
|
return m.Name
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *YarnApplication) GetState() YarnApplication_State {
|
||
|
if m != nil {
|
||
|
return m.State
|
||
|
}
|
||
|
return YarnApplication_STATE_UNSPECIFIED
|
||
|
}
|
||
|
|
||
|
func (m *YarnApplication) GetProgress() float32 {
|
||
|
if m != nil {
|
||
|
return m.Progress
|
||
|
}
|
||
|
return 0
|
||
|
}
|
||
|
|
||
|
func (m *YarnApplication) GetTrackingUrl() string {
|
||
|
if m != nil {
|
||
|
return m.TrackingUrl
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
// A Cloud Dataproc job resource.
|
||
|
type Job struct {
|
||
|
// Optional. The fully qualified reference to the job, which can be used to
|
||
|
// obtain the equivalent REST path of the job resource. If this property
|
||
|
// is not specified when a job is created, the server generates a
|
||
|
// <code>job_id</code>.
|
||
|
Reference *JobReference `protobuf:"bytes,1,opt,name=reference,proto3" json:"reference,omitempty"`
|
||
|
// Required. Job information, including how, when, and where to
|
||
|
// run the job.
|
||
|
Placement *JobPlacement `protobuf:"bytes,2,opt,name=placement,proto3" json:"placement,omitempty"`
|
||
|
// Required. The application/framework-specific portion of the job.
|
||
|
//
|
||
|
// Types that are valid to be assigned to TypeJob:
|
||
|
// *Job_HadoopJob
|
||
|
// *Job_SparkJob
|
||
|
// *Job_PysparkJob
|
||
|
// *Job_HiveJob
|
||
|
// *Job_PigJob
|
||
|
// *Job_SparkSqlJob
|
||
|
TypeJob isJob_TypeJob `protobuf_oneof:"type_job"`
|
||
|
// Output-only. The job status. Additional application-specific
|
||
|
// status information may be contained in the <code>type_job</code>
|
||
|
// and <code>yarn_applications</code> fields.
|
||
|
Status *JobStatus `protobuf:"bytes,8,opt,name=status,proto3" json:"status,omitempty"`
|
||
|
// Output-only. The previous job status.
|
||
|
StatusHistory []*JobStatus `protobuf:"bytes,13,rep,name=status_history,json=statusHistory,proto3" json:"status_history,omitempty"`
|
||
|
// Output-only. The collection of YARN applications spun up by this job.
|
||
|
//
|
||
|
// **Beta** Feature: This report is available for testing purposes only. It may
|
||
|
// be changed before final release.
|
||
|
YarnApplications []*YarnApplication `protobuf:"bytes,9,rep,name=yarn_applications,json=yarnApplications,proto3" json:"yarn_applications,omitempty"`
|
||
|
// Output-only. A URI pointing to the location of the stdout of the job's
|
||
|
// driver program.
|
||
|
DriverOutputResourceUri string `protobuf:"bytes,17,opt,name=driver_output_resource_uri,json=driverOutputResourceUri,proto3" json:"driver_output_resource_uri,omitempty"`
|
||
|
// Output-only. If present, the location of miscellaneous control files
|
||
|
// which may be used as part of job setup and handling. If not present,
|
||
|
// control files may be placed in the same location as `driver_output_uri`.
|
||
|
DriverControlFilesUri string `protobuf:"bytes,15,opt,name=driver_control_files_uri,json=driverControlFilesUri,proto3" json:"driver_control_files_uri,omitempty"`
|
||
|
// Optional. The labels to associate with this job.
|
||
|
// Label **keys** must contain 1 to 63 characters, and must conform to
|
||
|
// [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
|
||
|
// Label **values** may be empty, but, if present, must contain 1 to 63
|
||
|
// characters, and must conform to [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
|
||
|
// No more than 32 labels can be associated with a job.
|
||
|
Labels map[string]string `protobuf:"bytes,18,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||
|
// Optional. Job scheduling configuration.
|
||
|
Scheduling *JobScheduling `protobuf:"bytes,20,opt,name=scheduling,proto3" json:"scheduling,omitempty"`
|
||
|
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||
|
XXX_unrecognized []byte `json:"-"`
|
||
|
XXX_sizecache int32 `json:"-"`
|
||
|
}
|
||
|
|
||
|
func (m *Job) Reset() { *m = Job{} }
|
||
|
func (m *Job) String() string { return proto.CompactTextString(m) }
|
||
|
func (*Job) ProtoMessage() {}
|
||
|
func (*Job) Descriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{12}
|
||
|
}
|
||
|
func (m *Job) XXX_Unmarshal(b []byte) error {
|
||
|
return xxx_messageInfo_Job.Unmarshal(m, b)
|
||
|
}
|
||
|
func (m *Job) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||
|
return xxx_messageInfo_Job.Marshal(b, m, deterministic)
|
||
|
}
|
||
|
func (dst *Job) XXX_Merge(src proto.Message) {
|
||
|
xxx_messageInfo_Job.Merge(dst, src)
|
||
|
}
|
||
|
func (m *Job) XXX_Size() int {
|
||
|
return xxx_messageInfo_Job.Size(m)
|
||
|
}
|
||
|
func (m *Job) XXX_DiscardUnknown() {
|
||
|
xxx_messageInfo_Job.DiscardUnknown(m)
|
||
|
}
|
||
|
|
||
|
var xxx_messageInfo_Job proto.InternalMessageInfo
|
||
|
|
||
|
type isJob_TypeJob interface {
|
||
|
isJob_TypeJob()
|
||
|
}
|
||
|
|
||
|
type Job_HadoopJob struct {
|
||
|
HadoopJob *HadoopJob `protobuf:"bytes,3,opt,name=hadoop_job,json=hadoopJob,proto3,oneof"`
|
||
|
}
|
||
|
type Job_SparkJob struct {
|
||
|
SparkJob *SparkJob `protobuf:"bytes,4,opt,name=spark_job,json=sparkJob,proto3,oneof"`
|
||
|
}
|
||
|
type Job_PysparkJob struct {
|
||
|
PysparkJob *PySparkJob `protobuf:"bytes,5,opt,name=pyspark_job,json=pysparkJob,proto3,oneof"`
|
||
|
}
|
||
|
type Job_HiveJob struct {
|
||
|
HiveJob *HiveJob `protobuf:"bytes,6,opt,name=hive_job,json=hiveJob,proto3,oneof"`
|
||
|
}
|
||
|
type Job_PigJob struct {
|
||
|
PigJob *PigJob `protobuf:"bytes,7,opt,name=pig_job,json=pigJob,proto3,oneof"`
|
||
|
}
|
||
|
type Job_SparkSqlJob struct {
|
||
|
SparkSqlJob *SparkSqlJob `protobuf:"bytes,12,opt,name=spark_sql_job,json=sparkSqlJob,proto3,oneof"`
|
||
|
}
|
||
|
|
||
|
func (*Job_HadoopJob) isJob_TypeJob() {}
|
||
|
func (*Job_SparkJob) isJob_TypeJob() {}
|
||
|
func (*Job_PysparkJob) isJob_TypeJob() {}
|
||
|
func (*Job_HiveJob) isJob_TypeJob() {}
|
||
|
func (*Job_PigJob) isJob_TypeJob() {}
|
||
|
func (*Job_SparkSqlJob) isJob_TypeJob() {}
|
||
|
|
||
|
func (m *Job) GetTypeJob() isJob_TypeJob {
|
||
|
if m != nil {
|
||
|
return m.TypeJob
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *Job) GetReference() *JobReference {
|
||
|
if m != nil {
|
||
|
return m.Reference
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *Job) GetPlacement() *JobPlacement {
|
||
|
if m != nil {
|
||
|
return m.Placement
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *Job) GetHadoopJob() *HadoopJob {
|
||
|
if x, ok := m.GetTypeJob().(*Job_HadoopJob); ok {
|
||
|
return x.HadoopJob
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *Job) GetSparkJob() *SparkJob {
|
||
|
if x, ok := m.GetTypeJob().(*Job_SparkJob); ok {
|
||
|
return x.SparkJob
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *Job) GetPysparkJob() *PySparkJob {
|
||
|
if x, ok := m.GetTypeJob().(*Job_PysparkJob); ok {
|
||
|
return x.PysparkJob
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *Job) GetHiveJob() *HiveJob {
|
||
|
if x, ok := m.GetTypeJob().(*Job_HiveJob); ok {
|
||
|
return x.HiveJob
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *Job) GetPigJob() *PigJob {
|
||
|
if x, ok := m.GetTypeJob().(*Job_PigJob); ok {
|
||
|
return x.PigJob
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *Job) GetSparkSqlJob() *SparkSqlJob {
|
||
|
if x, ok := m.GetTypeJob().(*Job_SparkSqlJob); ok {
|
||
|
return x.SparkSqlJob
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *Job) GetStatus() *JobStatus {
|
||
|
if m != nil {
|
||
|
return m.Status
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *Job) GetStatusHistory() []*JobStatus {
|
||
|
if m != nil {
|
||
|
return m.StatusHistory
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *Job) GetYarnApplications() []*YarnApplication {
|
||
|
if m != nil {
|
||
|
return m.YarnApplications
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *Job) GetDriverOutputResourceUri() string {
|
||
|
if m != nil {
|
||
|
return m.DriverOutputResourceUri
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *Job) GetDriverControlFilesUri() string {
|
||
|
if m != nil {
|
||
|
return m.DriverControlFilesUri
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *Job) GetLabels() map[string]string {
|
||
|
if m != nil {
|
||
|
return m.Labels
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *Job) GetScheduling() *JobScheduling {
|
||
|
if m != nil {
|
||
|
return m.Scheduling
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
// XXX_OneofFuncs is for the internal use of the proto package.
|
||
|
func (*Job) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
|
||
|
return _Job_OneofMarshaler, _Job_OneofUnmarshaler, _Job_OneofSizer, []interface{}{
|
||
|
(*Job_HadoopJob)(nil),
|
||
|
(*Job_SparkJob)(nil),
|
||
|
(*Job_PysparkJob)(nil),
|
||
|
(*Job_HiveJob)(nil),
|
||
|
(*Job_PigJob)(nil),
|
||
|
(*Job_SparkSqlJob)(nil),
|
||
|
}
|
||
|
}
|
||
|
|
||
|
func _Job_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
|
||
|
m := msg.(*Job)
|
||
|
// type_job
|
||
|
switch x := m.TypeJob.(type) {
|
||
|
case *Job_HadoopJob:
|
||
|
b.EncodeVarint(3<<3 | proto.WireBytes)
|
||
|
if err := b.EncodeMessage(x.HadoopJob); err != nil {
|
||
|
return err
|
||
|
}
|
||
|
case *Job_SparkJob:
|
||
|
b.EncodeVarint(4<<3 | proto.WireBytes)
|
||
|
if err := b.EncodeMessage(x.SparkJob); err != nil {
|
||
|
return err
|
||
|
}
|
||
|
case *Job_PysparkJob:
|
||
|
b.EncodeVarint(5<<3 | proto.WireBytes)
|
||
|
if err := b.EncodeMessage(x.PysparkJob); err != nil {
|
||
|
return err
|
||
|
}
|
||
|
case *Job_HiveJob:
|
||
|
b.EncodeVarint(6<<3 | proto.WireBytes)
|
||
|
if err := b.EncodeMessage(x.HiveJob); err != nil {
|
||
|
return err
|
||
|
}
|
||
|
case *Job_PigJob:
|
||
|
b.EncodeVarint(7<<3 | proto.WireBytes)
|
||
|
if err := b.EncodeMessage(x.PigJob); err != nil {
|
||
|
return err
|
||
|
}
|
||
|
case *Job_SparkSqlJob:
|
||
|
b.EncodeVarint(12<<3 | proto.WireBytes)
|
||
|
if err := b.EncodeMessage(x.SparkSqlJob); err != nil {
|
||
|
return err
|
||
|
}
|
||
|
case nil:
|
||
|
default:
|
||
|
return fmt.Errorf("Job.TypeJob has unexpected type %T", x)
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func _Job_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
|
||
|
m := msg.(*Job)
|
||
|
switch tag {
|
||
|
case 3: // type_job.hadoop_job
|
||
|
if wire != proto.WireBytes {
|
||
|
return true, proto.ErrInternalBadWireType
|
||
|
}
|
||
|
msg := new(HadoopJob)
|
||
|
err := b.DecodeMessage(msg)
|
||
|
m.TypeJob = &Job_HadoopJob{msg}
|
||
|
return true, err
|
||
|
case 4: // type_job.spark_job
|
||
|
if wire != proto.WireBytes {
|
||
|
return true, proto.ErrInternalBadWireType
|
||
|
}
|
||
|
msg := new(SparkJob)
|
||
|
err := b.DecodeMessage(msg)
|
||
|
m.TypeJob = &Job_SparkJob{msg}
|
||
|
return true, err
|
||
|
case 5: // type_job.pyspark_job
|
||
|
if wire != proto.WireBytes {
|
||
|
return true, proto.ErrInternalBadWireType
|
||
|
}
|
||
|
msg := new(PySparkJob)
|
||
|
err := b.DecodeMessage(msg)
|
||
|
m.TypeJob = &Job_PysparkJob{msg}
|
||
|
return true, err
|
||
|
case 6: // type_job.hive_job
|
||
|
if wire != proto.WireBytes {
|
||
|
return true, proto.ErrInternalBadWireType
|
||
|
}
|
||
|
msg := new(HiveJob)
|
||
|
err := b.DecodeMessage(msg)
|
||
|
m.TypeJob = &Job_HiveJob{msg}
|
||
|
return true, err
|
||
|
case 7: // type_job.pig_job
|
||
|
if wire != proto.WireBytes {
|
||
|
return true, proto.ErrInternalBadWireType
|
||
|
}
|
||
|
msg := new(PigJob)
|
||
|
err := b.DecodeMessage(msg)
|
||
|
m.TypeJob = &Job_PigJob{msg}
|
||
|
return true, err
|
||
|
case 12: // type_job.spark_sql_job
|
||
|
if wire != proto.WireBytes {
|
||
|
return true, proto.ErrInternalBadWireType
|
||
|
}
|
||
|
msg := new(SparkSqlJob)
|
||
|
err := b.DecodeMessage(msg)
|
||
|
m.TypeJob = &Job_SparkSqlJob{msg}
|
||
|
return true, err
|
||
|
default:
|
||
|
return false, nil
|
||
|
}
|
||
|
}
|
||
|
|
||
|
func _Job_OneofSizer(msg proto.Message) (n int) {
|
||
|
m := msg.(*Job)
|
||
|
// type_job
|
||
|
switch x := m.TypeJob.(type) {
|
||
|
case *Job_HadoopJob:
|
||
|
s := proto.Size(x.HadoopJob)
|
||
|
n += 1 // tag and wire
|
||
|
n += proto.SizeVarint(uint64(s))
|
||
|
n += s
|
||
|
case *Job_SparkJob:
|
||
|
s := proto.Size(x.SparkJob)
|
||
|
n += 1 // tag and wire
|
||
|
n += proto.SizeVarint(uint64(s))
|
||
|
n += s
|
||
|
case *Job_PysparkJob:
|
||
|
s := proto.Size(x.PysparkJob)
|
||
|
n += 1 // tag and wire
|
||
|
n += proto.SizeVarint(uint64(s))
|
||
|
n += s
|
||
|
case *Job_HiveJob:
|
||
|
s := proto.Size(x.HiveJob)
|
||
|
n += 1 // tag and wire
|
||
|
n += proto.SizeVarint(uint64(s))
|
||
|
n += s
|
||
|
case *Job_PigJob:
|
||
|
s := proto.Size(x.PigJob)
|
||
|
n += 1 // tag and wire
|
||
|
n += proto.SizeVarint(uint64(s))
|
||
|
n += s
|
||
|
case *Job_SparkSqlJob:
|
||
|
s := proto.Size(x.SparkSqlJob)
|
||
|
n += 1 // tag and wire
|
||
|
n += proto.SizeVarint(uint64(s))
|
||
|
n += s
|
||
|
case nil:
|
||
|
default:
|
||
|
panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
|
||
|
}
|
||
|
return n
|
||
|
}
|
||
|
|
||
|
// Job scheduling options.
|
||
|
//
|
||
|
// **Beta Feature**: These options are available for testing purposes only.
|
||
|
// They may be changed before final release.
|
||
|
type JobScheduling struct {
|
||
|
// Optional. Maximum number of times per hour a driver may be restarted as
|
||
|
// a result of driver terminating with non-zero code before job is
|
||
|
// reported failed.
|
||
|
//
|
||
|
// A job may be reported as thrashing if driver exits with non-zero code
|
||
|
// 4 times within 10 minute window.
|
||
|
//
|
||
|
// Maximum value is 10.
|
||
|
MaxFailuresPerHour int32 `protobuf:"varint,1,opt,name=max_failures_per_hour,json=maxFailuresPerHour,proto3" json:"max_failures_per_hour,omitempty"`
|
||
|
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||
|
XXX_unrecognized []byte `json:"-"`
|
||
|
XXX_sizecache int32 `json:"-"`
|
||
|
}
|
||
|
|
||
|
func (m *JobScheduling) Reset() { *m = JobScheduling{} }
|
||
|
func (m *JobScheduling) String() string { return proto.CompactTextString(m) }
|
||
|
func (*JobScheduling) ProtoMessage() {}
|
||
|
func (*JobScheduling) Descriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{13}
|
||
|
}
|
||
|
func (m *JobScheduling) XXX_Unmarshal(b []byte) error {
|
||
|
return xxx_messageInfo_JobScheduling.Unmarshal(m, b)
|
||
|
}
|
||
|
func (m *JobScheduling) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||
|
return xxx_messageInfo_JobScheduling.Marshal(b, m, deterministic)
|
||
|
}
|
||
|
func (dst *JobScheduling) XXX_Merge(src proto.Message) {
|
||
|
xxx_messageInfo_JobScheduling.Merge(dst, src)
|
||
|
}
|
||
|
func (m *JobScheduling) XXX_Size() int {
|
||
|
return xxx_messageInfo_JobScheduling.Size(m)
|
||
|
}
|
||
|
func (m *JobScheduling) XXX_DiscardUnknown() {
|
||
|
xxx_messageInfo_JobScheduling.DiscardUnknown(m)
|
||
|
}
|
||
|
|
||
|
var xxx_messageInfo_JobScheduling proto.InternalMessageInfo
|
||
|
|
||
|
func (m *JobScheduling) GetMaxFailuresPerHour() int32 {
|
||
|
if m != nil {
|
||
|
return m.MaxFailuresPerHour
|
||
|
}
|
||
|
return 0
|
||
|
}
|
||
|
|
||
|
// A request to submit a job.
|
||
|
type SubmitJobRequest struct {
|
||
|
// Required. The ID of the Google Cloud Platform project that the job
|
||
|
// belongs to.
|
||
|
ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
|
||
|
// Required. The Cloud Dataproc region in which to handle the request.
|
||
|
Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"`
|
||
|
// Required. The job resource.
|
||
|
Job *Job `protobuf:"bytes,2,opt,name=job,proto3" json:"job,omitempty"`
|
||
|
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||
|
XXX_unrecognized []byte `json:"-"`
|
||
|
XXX_sizecache int32 `json:"-"`
|
||
|
}
|
||
|
|
||
|
func (m *SubmitJobRequest) Reset() { *m = SubmitJobRequest{} }
|
||
|
func (m *SubmitJobRequest) String() string { return proto.CompactTextString(m) }
|
||
|
func (*SubmitJobRequest) ProtoMessage() {}
|
||
|
func (*SubmitJobRequest) Descriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{14}
|
||
|
}
|
||
|
func (m *SubmitJobRequest) XXX_Unmarshal(b []byte) error {
|
||
|
return xxx_messageInfo_SubmitJobRequest.Unmarshal(m, b)
|
||
|
}
|
||
|
func (m *SubmitJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||
|
return xxx_messageInfo_SubmitJobRequest.Marshal(b, m, deterministic)
|
||
|
}
|
||
|
func (dst *SubmitJobRequest) XXX_Merge(src proto.Message) {
|
||
|
xxx_messageInfo_SubmitJobRequest.Merge(dst, src)
|
||
|
}
|
||
|
func (m *SubmitJobRequest) XXX_Size() int {
|
||
|
return xxx_messageInfo_SubmitJobRequest.Size(m)
|
||
|
}
|
||
|
func (m *SubmitJobRequest) XXX_DiscardUnknown() {
|
||
|
xxx_messageInfo_SubmitJobRequest.DiscardUnknown(m)
|
||
|
}
|
||
|
|
||
|
var xxx_messageInfo_SubmitJobRequest proto.InternalMessageInfo
|
||
|
|
||
|
func (m *SubmitJobRequest) GetProjectId() string {
|
||
|
if m != nil {
|
||
|
return m.ProjectId
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *SubmitJobRequest) GetRegion() string {
|
||
|
if m != nil {
|
||
|
return m.Region
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *SubmitJobRequest) GetJob() *Job {
|
||
|
if m != nil {
|
||
|
return m.Job
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
// A request to get the resource representation for a job in a project.
|
||
|
type GetJobRequest struct {
|
||
|
// Required. The ID of the Google Cloud Platform project that the job
|
||
|
// belongs to.
|
||
|
ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
|
||
|
// Required. The Cloud Dataproc region in which to handle the request.
|
||
|
Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"`
|
||
|
// Required. The job ID.
|
||
|
JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"`
|
||
|
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||
|
XXX_unrecognized []byte `json:"-"`
|
||
|
XXX_sizecache int32 `json:"-"`
|
||
|
}
|
||
|
|
||
|
func (m *GetJobRequest) Reset() { *m = GetJobRequest{} }
|
||
|
func (m *GetJobRequest) String() string { return proto.CompactTextString(m) }
|
||
|
func (*GetJobRequest) ProtoMessage() {}
|
||
|
func (*GetJobRequest) Descriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{15}
|
||
|
}
|
||
|
func (m *GetJobRequest) XXX_Unmarshal(b []byte) error {
|
||
|
return xxx_messageInfo_GetJobRequest.Unmarshal(m, b)
|
||
|
}
|
||
|
func (m *GetJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||
|
return xxx_messageInfo_GetJobRequest.Marshal(b, m, deterministic)
|
||
|
}
|
||
|
func (dst *GetJobRequest) XXX_Merge(src proto.Message) {
|
||
|
xxx_messageInfo_GetJobRequest.Merge(dst, src)
|
||
|
}
|
||
|
func (m *GetJobRequest) XXX_Size() int {
|
||
|
return xxx_messageInfo_GetJobRequest.Size(m)
|
||
|
}
|
||
|
func (m *GetJobRequest) XXX_DiscardUnknown() {
|
||
|
xxx_messageInfo_GetJobRequest.DiscardUnknown(m)
|
||
|
}
|
||
|
|
||
|
var xxx_messageInfo_GetJobRequest proto.InternalMessageInfo
|
||
|
|
||
|
func (m *GetJobRequest) GetProjectId() string {
|
||
|
if m != nil {
|
||
|
return m.ProjectId
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *GetJobRequest) GetRegion() string {
|
||
|
if m != nil {
|
||
|
return m.Region
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *GetJobRequest) GetJobId() string {
|
||
|
if m != nil {
|
||
|
return m.JobId
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
// A request to list jobs in a project.
|
||
|
type ListJobsRequest struct {
|
||
|
// Required. The ID of the Google Cloud Platform project that the job
|
||
|
// belongs to.
|
||
|
ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
|
||
|
// Required. The Cloud Dataproc region in which to handle the request.
|
||
|
Region string `protobuf:"bytes,6,opt,name=region,proto3" json:"region,omitempty"`
|
||
|
// Optional. The number of results to return in each response.
|
||
|
PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"`
|
||
|
// Optional. The page token, returned by a previous call, to request the
|
||
|
// next page of results.
|
||
|
PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"`
|
||
|
// Optional. If set, the returned jobs list includes only jobs that were
|
||
|
// submitted to the named cluster.
|
||
|
ClusterName string `protobuf:"bytes,4,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"`
|
||
|
// Optional. Specifies enumerated categories of jobs to list.
|
||
|
// (default = match ALL jobs).
|
||
|
//
|
||
|
// If `filter` is provided, `jobStateMatcher` will be ignored.
|
||
|
JobStateMatcher ListJobsRequest_JobStateMatcher `protobuf:"varint,5,opt,name=job_state_matcher,json=jobStateMatcher,proto3,enum=google.cloud.dataproc.v1.ListJobsRequest_JobStateMatcher" json:"job_state_matcher,omitempty"`
|
||
|
// Optional. A filter constraining the jobs to list. Filters are
|
||
|
// case-sensitive and have the following syntax:
|
||
|
//
|
||
|
// [field = value] AND [field [= value]] ...
|
||
|
//
|
||
|
// where **field** is `status.state` or `labels.[KEY]`, and `[KEY]` is a label
|
||
|
// key. **value** can be `*` to match all values.
|
||
|
// `status.state` can be either `ACTIVE` or `NON_ACTIVE`.
|
||
|
// Only the logical `AND` operator is supported; space-separated items are
|
||
|
// treated as having an implicit `AND` operator.
|
||
|
//
|
||
|
// Example filter:
|
||
|
//
|
||
|
// status.state = ACTIVE AND labels.env = staging AND labels.starred = *
|
||
|
Filter string `protobuf:"bytes,7,opt,name=filter,proto3" json:"filter,omitempty"`
|
||
|
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||
|
XXX_unrecognized []byte `json:"-"`
|
||
|
XXX_sizecache int32 `json:"-"`
|
||
|
}
|
||
|
|
||
|
func (m *ListJobsRequest) Reset() { *m = ListJobsRequest{} }
|
||
|
func (m *ListJobsRequest) String() string { return proto.CompactTextString(m) }
|
||
|
func (*ListJobsRequest) ProtoMessage() {}
|
||
|
func (*ListJobsRequest) Descriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{16}
|
||
|
}
|
||
|
func (m *ListJobsRequest) XXX_Unmarshal(b []byte) error {
|
||
|
return xxx_messageInfo_ListJobsRequest.Unmarshal(m, b)
|
||
|
}
|
||
|
func (m *ListJobsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||
|
return xxx_messageInfo_ListJobsRequest.Marshal(b, m, deterministic)
|
||
|
}
|
||
|
func (dst *ListJobsRequest) XXX_Merge(src proto.Message) {
|
||
|
xxx_messageInfo_ListJobsRequest.Merge(dst, src)
|
||
|
}
|
||
|
func (m *ListJobsRequest) XXX_Size() int {
|
||
|
return xxx_messageInfo_ListJobsRequest.Size(m)
|
||
|
}
|
||
|
func (m *ListJobsRequest) XXX_DiscardUnknown() {
|
||
|
xxx_messageInfo_ListJobsRequest.DiscardUnknown(m)
|
||
|
}
|
||
|
|
||
|
var xxx_messageInfo_ListJobsRequest proto.InternalMessageInfo
|
||
|
|
||
|
func (m *ListJobsRequest) GetProjectId() string {
|
||
|
if m != nil {
|
||
|
return m.ProjectId
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *ListJobsRequest) GetRegion() string {
|
||
|
if m != nil {
|
||
|
return m.Region
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *ListJobsRequest) GetPageSize() int32 {
|
||
|
if m != nil {
|
||
|
return m.PageSize
|
||
|
}
|
||
|
return 0
|
||
|
}
|
||
|
|
||
|
func (m *ListJobsRequest) GetPageToken() string {
|
||
|
if m != nil {
|
||
|
return m.PageToken
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *ListJobsRequest) GetClusterName() string {
|
||
|
if m != nil {
|
||
|
return m.ClusterName
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *ListJobsRequest) GetJobStateMatcher() ListJobsRequest_JobStateMatcher {
|
||
|
if m != nil {
|
||
|
return m.JobStateMatcher
|
||
|
}
|
||
|
return ListJobsRequest_ALL
|
||
|
}
|
||
|
|
||
|
func (m *ListJobsRequest) GetFilter() string {
|
||
|
if m != nil {
|
||
|
return m.Filter
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
// A request to update a job.
|
||
|
type UpdateJobRequest struct {
|
||
|
// Required. The ID of the Google Cloud Platform project that the job
|
||
|
// belongs to.
|
||
|
ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
|
||
|
// Required. The Cloud Dataproc region in which to handle the request.
|
||
|
Region string `protobuf:"bytes,2,opt,name=region,proto3" json:"region,omitempty"`
|
||
|
// Required. The job ID.
|
||
|
JobId string `protobuf:"bytes,3,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"`
|
||
|
// Required. The changes to the job.
|
||
|
Job *Job `protobuf:"bytes,4,opt,name=job,proto3" json:"job,omitempty"`
|
||
|
// Required. Specifies the path, relative to <code>Job</code>, of
|
||
|
// the field to update. For example, to update the labels of a Job the
|
||
|
// <code>update_mask</code> parameter would be specified as
|
||
|
// <code>labels</code>, and the `PATCH` request body would specify the new
|
||
|
// value. <strong>Note:</strong> Currently, <code>labels</code> is the only
|
||
|
// field that can be updated.
|
||
|
UpdateMask *field_mask.FieldMask `protobuf:"bytes,5,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"`
|
||
|
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||
|
XXX_unrecognized []byte `json:"-"`
|
||
|
XXX_sizecache int32 `json:"-"`
|
||
|
}
|
||
|
|
||
|
func (m *UpdateJobRequest) Reset() { *m = UpdateJobRequest{} }
|
||
|
func (m *UpdateJobRequest) String() string { return proto.CompactTextString(m) }
|
||
|
func (*UpdateJobRequest) ProtoMessage() {}
|
||
|
func (*UpdateJobRequest) Descriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{17}
|
||
|
}
|
||
|
func (m *UpdateJobRequest) XXX_Unmarshal(b []byte) error {
|
||
|
return xxx_messageInfo_UpdateJobRequest.Unmarshal(m, b)
|
||
|
}
|
||
|
func (m *UpdateJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||
|
return xxx_messageInfo_UpdateJobRequest.Marshal(b, m, deterministic)
|
||
|
}
|
||
|
func (dst *UpdateJobRequest) XXX_Merge(src proto.Message) {
|
||
|
xxx_messageInfo_UpdateJobRequest.Merge(dst, src)
|
||
|
}
|
||
|
func (m *UpdateJobRequest) XXX_Size() int {
|
||
|
return xxx_messageInfo_UpdateJobRequest.Size(m)
|
||
|
}
|
||
|
func (m *UpdateJobRequest) XXX_DiscardUnknown() {
|
||
|
xxx_messageInfo_UpdateJobRequest.DiscardUnknown(m)
|
||
|
}
|
||
|
|
||
|
var xxx_messageInfo_UpdateJobRequest proto.InternalMessageInfo
|
||
|
|
||
|
func (m *UpdateJobRequest) GetProjectId() string {
|
||
|
if m != nil {
|
||
|
return m.ProjectId
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *UpdateJobRequest) GetRegion() string {
|
||
|
if m != nil {
|
||
|
return m.Region
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *UpdateJobRequest) GetJobId() string {
|
||
|
if m != nil {
|
||
|
return m.JobId
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *UpdateJobRequest) GetJob() *Job {
|
||
|
if m != nil {
|
||
|
return m.Job
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *UpdateJobRequest) GetUpdateMask() *field_mask.FieldMask {
|
||
|
if m != nil {
|
||
|
return m.UpdateMask
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
// A list of jobs in a project.
|
||
|
type ListJobsResponse struct {
|
||
|
// Output-only. Jobs list.
|
||
|
Jobs []*Job `protobuf:"bytes,1,rep,name=jobs,proto3" json:"jobs,omitempty"`
|
||
|
// Optional. This token is included in the response if there are more results
|
||
|
// to fetch. To fetch additional results, provide this value as the
|
||
|
// `page_token` in a subsequent <code>ListJobsRequest</code>.
|
||
|
NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"`
|
||
|
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||
|
XXX_unrecognized []byte `json:"-"`
|
||
|
XXX_sizecache int32 `json:"-"`
|
||
|
}
|
||
|
|
||
|
func (m *ListJobsResponse) Reset() { *m = ListJobsResponse{} }
|
||
|
func (m *ListJobsResponse) String() string { return proto.CompactTextString(m) }
|
||
|
func (*ListJobsResponse) ProtoMessage() {}
|
||
|
func (*ListJobsResponse) Descriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{18}
|
||
|
}
|
||
|
func (m *ListJobsResponse) XXX_Unmarshal(b []byte) error {
|
||
|
return xxx_messageInfo_ListJobsResponse.Unmarshal(m, b)
|
||
|
}
|
||
|
func (m *ListJobsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||
|
return xxx_messageInfo_ListJobsResponse.Marshal(b, m, deterministic)
|
||
|
}
|
||
|
func (dst *ListJobsResponse) XXX_Merge(src proto.Message) {
|
||
|
xxx_messageInfo_ListJobsResponse.Merge(dst, src)
|
||
|
}
|
||
|
func (m *ListJobsResponse) XXX_Size() int {
|
||
|
return xxx_messageInfo_ListJobsResponse.Size(m)
|
||
|
}
|
||
|
func (m *ListJobsResponse) XXX_DiscardUnknown() {
|
||
|
xxx_messageInfo_ListJobsResponse.DiscardUnknown(m)
|
||
|
}
|
||
|
|
||
|
var xxx_messageInfo_ListJobsResponse proto.InternalMessageInfo
|
||
|
|
||
|
func (m *ListJobsResponse) GetJobs() []*Job {
|
||
|
if m != nil {
|
||
|
return m.Jobs
|
||
|
}
|
||
|
return nil
|
||
|
}
|
||
|
|
||
|
func (m *ListJobsResponse) GetNextPageToken() string {
|
||
|
if m != nil {
|
||
|
return m.NextPageToken
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
// A request to cancel a job.
|
||
|
type CancelJobRequest struct {
|
||
|
// Required. The ID of the Google Cloud Platform project that the job
|
||
|
// belongs to.
|
||
|
ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
|
||
|
// Required. The Cloud Dataproc region in which to handle the request.
|
||
|
Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"`
|
||
|
// Required. The job ID.
|
||
|
JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"`
|
||
|
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||
|
XXX_unrecognized []byte `json:"-"`
|
||
|
XXX_sizecache int32 `json:"-"`
|
||
|
}
|
||
|
|
||
|
func (m *CancelJobRequest) Reset() { *m = CancelJobRequest{} }
|
||
|
func (m *CancelJobRequest) String() string { return proto.CompactTextString(m) }
|
||
|
func (*CancelJobRequest) ProtoMessage() {}
|
||
|
func (*CancelJobRequest) Descriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{19}
|
||
|
}
|
||
|
func (m *CancelJobRequest) XXX_Unmarshal(b []byte) error {
|
||
|
return xxx_messageInfo_CancelJobRequest.Unmarshal(m, b)
|
||
|
}
|
||
|
func (m *CancelJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||
|
return xxx_messageInfo_CancelJobRequest.Marshal(b, m, deterministic)
|
||
|
}
|
||
|
func (dst *CancelJobRequest) XXX_Merge(src proto.Message) {
|
||
|
xxx_messageInfo_CancelJobRequest.Merge(dst, src)
|
||
|
}
|
||
|
func (m *CancelJobRequest) XXX_Size() int {
|
||
|
return xxx_messageInfo_CancelJobRequest.Size(m)
|
||
|
}
|
||
|
func (m *CancelJobRequest) XXX_DiscardUnknown() {
|
||
|
xxx_messageInfo_CancelJobRequest.DiscardUnknown(m)
|
||
|
}
|
||
|
|
||
|
var xxx_messageInfo_CancelJobRequest proto.InternalMessageInfo
|
||
|
|
||
|
func (m *CancelJobRequest) GetProjectId() string {
|
||
|
if m != nil {
|
||
|
return m.ProjectId
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *CancelJobRequest) GetRegion() string {
|
||
|
if m != nil {
|
||
|
return m.Region
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *CancelJobRequest) GetJobId() string {
|
||
|
if m != nil {
|
||
|
return m.JobId
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
// A request to delete a job.
|
||
|
type DeleteJobRequest struct {
|
||
|
// Required. The ID of the Google Cloud Platform project that the job
|
||
|
// belongs to.
|
||
|
ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
|
||
|
// Required. The Cloud Dataproc region in which to handle the request.
|
||
|
Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"`
|
||
|
// Required. The job ID.
|
||
|
JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"`
|
||
|
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||
|
XXX_unrecognized []byte `json:"-"`
|
||
|
XXX_sizecache int32 `json:"-"`
|
||
|
}
|
||
|
|
||
|
func (m *DeleteJobRequest) Reset() { *m = DeleteJobRequest{} }
|
||
|
func (m *DeleteJobRequest) String() string { return proto.CompactTextString(m) }
|
||
|
func (*DeleteJobRequest) ProtoMessage() {}
|
||
|
func (*DeleteJobRequest) Descriptor() ([]byte, []int) {
|
||
|
return fileDescriptor_jobs_a8158969d454e928, []int{20}
|
||
|
}
|
||
|
func (m *DeleteJobRequest) XXX_Unmarshal(b []byte) error {
|
||
|
return xxx_messageInfo_DeleteJobRequest.Unmarshal(m, b)
|
||
|
}
|
||
|
func (m *DeleteJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||
|
return xxx_messageInfo_DeleteJobRequest.Marshal(b, m, deterministic)
|
||
|
}
|
||
|
func (dst *DeleteJobRequest) XXX_Merge(src proto.Message) {
|
||
|
xxx_messageInfo_DeleteJobRequest.Merge(dst, src)
|
||
|
}
|
||
|
func (m *DeleteJobRequest) XXX_Size() int {
|
||
|
return xxx_messageInfo_DeleteJobRequest.Size(m)
|
||
|
}
|
||
|
func (m *DeleteJobRequest) XXX_DiscardUnknown() {
|
||
|
xxx_messageInfo_DeleteJobRequest.DiscardUnknown(m)
|
||
|
}
|
||
|
|
||
|
var xxx_messageInfo_DeleteJobRequest proto.InternalMessageInfo
|
||
|
|
||
|
func (m *DeleteJobRequest) GetProjectId() string {
|
||
|
if m != nil {
|
||
|
return m.ProjectId
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *DeleteJobRequest) GetRegion() string {
|
||
|
if m != nil {
|
||
|
return m.Region
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func (m *DeleteJobRequest) GetJobId() string {
|
||
|
if m != nil {
|
||
|
return m.JobId
|
||
|
}
|
||
|
return ""
|
||
|
}
|
||
|
|
||
|
func init() {
|
||
|
proto.RegisterType((*LoggingConfig)(nil), "google.cloud.dataproc.v1.LoggingConfig")
|
||
|
proto.RegisterMapType((map[string]LoggingConfig_Level)(nil), "google.cloud.dataproc.v1.LoggingConfig.DriverLogLevelsEntry")
|
||
|
proto.RegisterType((*HadoopJob)(nil), "google.cloud.dataproc.v1.HadoopJob")
|
||
|
proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.HadoopJob.PropertiesEntry")
|
||
|
proto.RegisterType((*SparkJob)(nil), "google.cloud.dataproc.v1.SparkJob")
|
||
|
proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.SparkJob.PropertiesEntry")
|
||
|
proto.RegisterType((*PySparkJob)(nil), "google.cloud.dataproc.v1.PySparkJob")
|
||
|
proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.PySparkJob.PropertiesEntry")
|
||
|
proto.RegisterType((*QueryList)(nil), "google.cloud.dataproc.v1.QueryList")
|
||
|
proto.RegisterType((*HiveJob)(nil), "google.cloud.dataproc.v1.HiveJob")
|
||
|
proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.HiveJob.PropertiesEntry")
|
||
|
proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.HiveJob.ScriptVariablesEntry")
|
||
|
proto.RegisterType((*SparkSqlJob)(nil), "google.cloud.dataproc.v1.SparkSqlJob")
|
||
|
proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.SparkSqlJob.PropertiesEntry")
|
||
|
proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.SparkSqlJob.ScriptVariablesEntry")
|
||
|
proto.RegisterType((*PigJob)(nil), "google.cloud.dataproc.v1.PigJob")
|
||
|
proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.PigJob.PropertiesEntry")
|
||
|
proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.PigJob.ScriptVariablesEntry")
|
||
|
proto.RegisterType((*JobPlacement)(nil), "google.cloud.dataproc.v1.JobPlacement")
|
||
|
proto.RegisterType((*JobStatus)(nil), "google.cloud.dataproc.v1.JobStatus")
|
||
|
proto.RegisterType((*JobReference)(nil), "google.cloud.dataproc.v1.JobReference")
|
||
|
proto.RegisterType((*YarnApplication)(nil), "google.cloud.dataproc.v1.YarnApplication")
|
||
|
proto.RegisterType((*Job)(nil), "google.cloud.dataproc.v1.Job")
|
||
|
proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.Job.LabelsEntry")
|
||
|
proto.RegisterType((*JobScheduling)(nil), "google.cloud.dataproc.v1.JobScheduling")
|
||
|
proto.RegisterType((*SubmitJobRequest)(nil), "google.cloud.dataproc.v1.SubmitJobRequest")
|
||
|
proto.RegisterType((*GetJobRequest)(nil), "google.cloud.dataproc.v1.GetJobRequest")
|
||
|
proto.RegisterType((*ListJobsRequest)(nil), "google.cloud.dataproc.v1.ListJobsRequest")
|
||
|
proto.RegisterType((*UpdateJobRequest)(nil), "google.cloud.dataproc.v1.UpdateJobRequest")
|
||
|
proto.RegisterType((*ListJobsResponse)(nil), "google.cloud.dataproc.v1.ListJobsResponse")
|
||
|
proto.RegisterType((*CancelJobRequest)(nil), "google.cloud.dataproc.v1.CancelJobRequest")
|
||
|
proto.RegisterType((*DeleteJobRequest)(nil), "google.cloud.dataproc.v1.DeleteJobRequest")
|
||
|
proto.RegisterEnum("google.cloud.dataproc.v1.LoggingConfig_Level", LoggingConfig_Level_name, LoggingConfig_Level_value)
|
||
|
proto.RegisterEnum("google.cloud.dataproc.v1.JobStatus_State", JobStatus_State_name, JobStatus_State_value)
|
||
|
proto.RegisterEnum("google.cloud.dataproc.v1.JobStatus_Substate", JobStatus_Substate_name, JobStatus_Substate_value)
|
||
|
proto.RegisterEnum("google.cloud.dataproc.v1.YarnApplication_State", YarnApplication_State_name, YarnApplication_State_value)
|
||
|
proto.RegisterEnum("google.cloud.dataproc.v1.ListJobsRequest_JobStateMatcher", ListJobsRequest_JobStateMatcher_name, ListJobsRequest_JobStateMatcher_value)
|
||
|
}
|
||
|
|
||
|
// Reference imports to suppress errors if they are not otherwise used.
|
||
|
var _ context.Context
|
||
|
var _ grpc.ClientConn
|
||
|
|
||
|
// This is a compile-time assertion to ensure that this generated file
|
||
|
// is compatible with the grpc package it is being compiled against.
|
||
|
const _ = grpc.SupportPackageIsVersion4
|
||
|
|
||
|
// JobControllerClient is the client API for JobController service.
|
||
|
//
|
||
|
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
|
||
|
type JobControllerClient interface {
|
||
|
// Submits a job to a cluster.
|
||
|
SubmitJob(ctx context.Context, in *SubmitJobRequest, opts ...grpc.CallOption) (*Job, error)
|
||
|
// Gets the resource representation for a job in a project.
|
||
|
GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error)
|
||
|
// Lists regions/{region}/jobs in a project.
|
||
|
ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error)
|
||
|
// Updates a job in a project.
|
||
|
UpdateJob(ctx context.Context, in *UpdateJobRequest, opts ...grpc.CallOption) (*Job, error)
|
||
|
// Starts a job cancellation request. To access the job resource
|
||
|
// after cancellation, call
|
||
|
// [regions/{region}/jobs.list](/dataproc/docs/reference/rest/v1/projects.regions.jobs/list) or
|
||
|
// [regions/{region}/jobs.get](/dataproc/docs/reference/rest/v1/projects.regions.jobs/get).
|
||
|
CancelJob(ctx context.Context, in *CancelJobRequest, opts ...grpc.CallOption) (*Job, error)
|
||
|
// Deletes the job from the project. If the job is active, the delete fails,
|
||
|
// and the response returns `FAILED_PRECONDITION`.
|
||
|
DeleteJob(ctx context.Context, in *DeleteJobRequest, opts ...grpc.CallOption) (*empty.Empty, error)
|
||
|
}
|
||
|
|
||
|
type jobControllerClient struct {
|
||
|
cc *grpc.ClientConn
|
||
|
}
|
||
|
|
||
|
func NewJobControllerClient(cc *grpc.ClientConn) JobControllerClient {
|
||
|
return &jobControllerClient{cc}
|
||
|
}
|
||
|
|
||
|
func (c *jobControllerClient) SubmitJob(ctx context.Context, in *SubmitJobRequest, opts ...grpc.CallOption) (*Job, error) {
|
||
|
out := new(Job)
|
||
|
err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/SubmitJob", in, out, opts...)
|
||
|
if err != nil {
|
||
|
return nil, err
|
||
|
}
|
||
|
return out, nil
|
||
|
}
|
||
|
|
||
|
func (c *jobControllerClient) GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error) {
|
||
|
out := new(Job)
|
||
|
err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/GetJob", in, out, opts...)
|
||
|
if err != nil {
|
||
|
return nil, err
|
||
|
}
|
||
|
return out, nil
|
||
|
}
|
||
|
|
||
|
func (c *jobControllerClient) ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error) {
|
||
|
out := new(ListJobsResponse)
|
||
|
err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/ListJobs", in, out, opts...)
|
||
|
if err != nil {
|
||
|
return nil, err
|
||
|
}
|
||
|
return out, nil
|
||
|
}
|
||
|
|
||
|
func (c *jobControllerClient) UpdateJob(ctx context.Context, in *UpdateJobRequest, opts ...grpc.CallOption) (*Job, error) {
|
||
|
out := new(Job)
|
||
|
err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/UpdateJob", in, out, opts...)
|
||
|
if err != nil {
|
||
|
return nil, err
|
||
|
}
|
||
|
return out, nil
|
||
|
}
|
||
|
|
||
|
func (c *jobControllerClient) CancelJob(ctx context.Context, in *CancelJobRequest, opts ...grpc.CallOption) (*Job, error) {
|
||
|
out := new(Job)
|
||
|
err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/CancelJob", in, out, opts...)
|
||
|
if err != nil {
|
||
|
return nil, err
|
||
|
}
|
||
|
return out, nil
|
||
|
}
|
||
|
|
||
|
func (c *jobControllerClient) DeleteJob(ctx context.Context, in *DeleteJobRequest, opts ...grpc.CallOption) (*empty.Empty, error) {
|
||
|
out := new(empty.Empty)
|
||
|
err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/DeleteJob", in, out, opts...)
|
||
|
if err != nil {
|
||
|
return nil, err
|
||
|
}
|
||
|
return out, nil
|
||
|
}
|
||
|
|
||
|
// JobControllerServer is the server API for JobController service.
|
||
|
type JobControllerServer interface {
|
||
|
// Submits a job to a cluster.
|
||
|
SubmitJob(context.Context, *SubmitJobRequest) (*Job, error)
|
||
|
// Gets the resource representation for a job in a project.
|
||
|
GetJob(context.Context, *GetJobRequest) (*Job, error)
|
||
|
// Lists regions/{region}/jobs in a project.
|
||
|
ListJobs(context.Context, *ListJobsRequest) (*ListJobsResponse, error)
|
||
|
// Updates a job in a project.
|
||
|
UpdateJob(context.Context, *UpdateJobRequest) (*Job, error)
|
||
|
// Starts a job cancellation request. To access the job resource
|
||
|
// after cancellation, call
|
||
|
// [regions/{region}/jobs.list](/dataproc/docs/reference/rest/v1/projects.regions.jobs/list) or
|
||
|
// [regions/{region}/jobs.get](/dataproc/docs/reference/rest/v1/projects.regions.jobs/get).
|
||
|
CancelJob(context.Context, *CancelJobRequest) (*Job, error)
|
||
|
// Deletes the job from the project. If the job is active, the delete fails,
|
||
|
// and the response returns `FAILED_PRECONDITION`.
|
||
|
DeleteJob(context.Context, *DeleteJobRequest) (*empty.Empty, error)
|
||
|
}
|
||
|
|
||
|
func RegisterJobControllerServer(s *grpc.Server, srv JobControllerServer) {
|
||
|
s.RegisterService(&_JobController_serviceDesc, srv)
|
||
|
}
|
||
|
|
||
|
func _JobController_SubmitJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||
|
in := new(SubmitJobRequest)
|
||
|
if err := dec(in); err != nil {
|
||
|
return nil, err
|
||
|
}
|
||
|
if interceptor == nil {
|
||
|
return srv.(JobControllerServer).SubmitJob(ctx, in)
|
||
|
}
|
||
|
info := &grpc.UnaryServerInfo{
|
||
|
Server: srv,
|
||
|
FullMethod: "/google.cloud.dataproc.v1.JobController/SubmitJob",
|
||
|
}
|
||
|
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||
|
return srv.(JobControllerServer).SubmitJob(ctx, req.(*SubmitJobRequest))
|
||
|
}
|
||
|
return interceptor(ctx, in, info, handler)
|
||
|
}
|
||
|
|
||
|
func _JobController_GetJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||
|
in := new(GetJobRequest)
|
||
|
if err := dec(in); err != nil {
|
||
|
return nil, err
|
||
|
}
|
||
|
if interceptor == nil {
|
||
|
return srv.(JobControllerServer).GetJob(ctx, in)
|
||
|
}
|
||
|
info := &grpc.UnaryServerInfo{
|
||
|
Server: srv,
|
||
|
FullMethod: "/google.cloud.dataproc.v1.JobController/GetJob",
|
||
|
}
|
||
|
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||
|
return srv.(JobControllerServer).GetJob(ctx, req.(*GetJobRequest))
|
||
|
}
|
||
|
return interceptor(ctx, in, info, handler)
|
||
|
}
|
||
|
|
||
|
func _JobController_ListJobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||
|
in := new(ListJobsRequest)
|
||
|
if err := dec(in); err != nil {
|
||
|
return nil, err
|
||
|
}
|
||
|
if interceptor == nil {
|
||
|
return srv.(JobControllerServer).ListJobs(ctx, in)
|
||
|
}
|
||
|
info := &grpc.UnaryServerInfo{
|
||
|
Server: srv,
|
||
|
FullMethod: "/google.cloud.dataproc.v1.JobController/ListJobs",
|
||
|
}
|
||
|
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||
|
return srv.(JobControllerServer).ListJobs(ctx, req.(*ListJobsRequest))
|
||
|
}
|
||
|
return interceptor(ctx, in, info, handler)
|
||
|
}
|
||
|
|
||
|
func _JobController_UpdateJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||
|
in := new(UpdateJobRequest)
|
||
|
if err := dec(in); err != nil {
|
||
|
return nil, err
|
||
|
}
|
||
|
if interceptor == nil {
|
||
|
return srv.(JobControllerServer).UpdateJob(ctx, in)
|
||
|
}
|
||
|
info := &grpc.UnaryServerInfo{
|
||
|
Server: srv,
|
||
|
FullMethod: "/google.cloud.dataproc.v1.JobController/UpdateJob",
|
||
|
}
|
||
|
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||
|
return srv.(JobControllerServer).UpdateJob(ctx, req.(*UpdateJobRequest))
|
||
|
}
|
||
|
return interceptor(ctx, in, info, handler)
|
||
|
}
|
||
|
|
||
|
func _JobController_CancelJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||
|
in := new(CancelJobRequest)
|
||
|
if err := dec(in); err != nil {
|
||
|
return nil, err
|
||
|
}
|
||
|
if interceptor == nil {
|
||
|
return srv.(JobControllerServer).CancelJob(ctx, in)
|
||
|
}
|
||
|
info := &grpc.UnaryServerInfo{
|
||
|
Server: srv,
|
||
|
FullMethod: "/google.cloud.dataproc.v1.JobController/CancelJob",
|
||
|
}
|
||
|
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||
|
return srv.(JobControllerServer).CancelJob(ctx, req.(*CancelJobRequest))
|
||
|
}
|
||
|
return interceptor(ctx, in, info, handler)
|
||
|
}
|
||
|
|
||
|
func _JobController_DeleteJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||
|
in := new(DeleteJobRequest)
|
||
|
if err := dec(in); err != nil {
|
||
|
return nil, err
|
||
|
}
|
||
|
if interceptor == nil {
|
||
|
return srv.(JobControllerServer).DeleteJob(ctx, in)
|
||
|
}
|
||
|
info := &grpc.UnaryServerInfo{
|
||
|
Server: srv,
|
||
|
FullMethod: "/google.cloud.dataproc.v1.JobController/DeleteJob",
|
||
|
}
|
||
|
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||
|
return srv.(JobControllerServer).DeleteJob(ctx, req.(*DeleteJobRequest))
|
||
|
}
|
||
|
return interceptor(ctx, in, info, handler)
|
||
|
}
|
||
|
|
||
|
var _JobController_serviceDesc = grpc.ServiceDesc{
|
||
|
ServiceName: "google.cloud.dataproc.v1.JobController",
|
||
|
HandlerType: (*JobControllerServer)(nil),
|
||
|
Methods: []grpc.MethodDesc{
|
||
|
{
|
||
|
MethodName: "SubmitJob",
|
||
|
Handler: _JobController_SubmitJob_Handler,
|
||
|
},
|
||
|
{
|
||
|
MethodName: "GetJob",
|
||
|
Handler: _JobController_GetJob_Handler,
|
||
|
},
|
||
|
{
|
||
|
MethodName: "ListJobs",
|
||
|
Handler: _JobController_ListJobs_Handler,
|
||
|
},
|
||
|
{
|
||
|
MethodName: "UpdateJob",
|
||
|
Handler: _JobController_UpdateJob_Handler,
|
||
|
},
|
||
|
{
|
||
|
MethodName: "CancelJob",
|
||
|
Handler: _JobController_CancelJob_Handler,
|
||
|
},
|
||
|
{
|
||
|
MethodName: "DeleteJob",
|
||
|
Handler: _JobController_DeleteJob_Handler,
|
||
|
},
|
||
|
},
|
||
|
Streams: []grpc.StreamDesc{},
|
||
|
Metadata: "google/cloud/dataproc/v1/jobs.proto",
|
||
|
}
|
||
|
|
||
|
func init() {
|
||
|
proto.RegisterFile("google/cloud/dataproc/v1/jobs.proto", fileDescriptor_jobs_a8158969d454e928)
|
||
|
}
|
||
|
|
||
|
var fileDescriptor_jobs_a8158969d454e928 = []byte{
|
||
|
// 2290 bytes of a gzipped FileDescriptorProto
|
||
|
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xdc, 0x59, 0xcf, 0x73, 0x1b, 0x49,
|
||
|
0xf5, 0xb7, 0x7e, 0x6b, 0x9e, 0x6c, 0x79, 0xdc, 0x9b, 0xec, 0x57, 0xa5, 0xdd, 0xad, 0xf5, 0x4e,
|
||
|
0xbe, 0x1b, 0x9c, 0x00, 0x12, 0xd6, 0x42, 0x36, 0x6b, 0x03, 0x59, 0x59, 0x1a, 0x47, 0xf2, 0x2a,
|
||
|
0xb2, 0x32, 0x92, 0x92, 0x82, 0x2a, 0x6a, 0x32, 0x92, 0xda, 0xf2, 0xd8, 0xa3, 0x99, 0xf1, 0xf4,
|
||
|
0x8c, 0x2b, 0x4a, 0x2a, 0x17, 0x2e, 0x1c, 0x29, 0xe0, 0x04, 0x55, 0x5c, 0xb8, 0xf1, 0x07, 0xc0,
|
||
|
0x85, 0xa2, 0xb8, 0x70, 0xe6, 0xc2, 0x81, 0x0b, 0xb5, 0x27, 0x8e, 0xfc, 0x11, 0x54, 0x77, 0xcf,
|
||
|
0xc8, 0x92, 0x6c, 0xfd, 0x70, 0x02, 0x5b, 0xbb, 0x7b, 0x72, 0x4f, 0xbf, 0x1f, 0xfd, 0xba, 0x3f,
|
||
|
0x9f, 0x7e, 0xef, 0xb5, 0x0c, 0xb7, 0xfa, 0x96, 0xd5, 0x37, 0x70, 0xbe, 0x6b, 0x58, 0x5e, 0x2f,
|
||
|
0xdf, 0xd3, 0x5c, 0xcd, 0x76, 0xac, 0x6e, 0xfe, 0x7c, 0x3b, 0x7f, 0x62, 0x75, 0x48, 0xce, 0x76,
|
||
|
0x2c, 0xd7, 0x42, 0x19, 0xae, 0x94, 0x63, 0x4a, 0xb9, 0x40, 0x29, 0x77, 0xbe, 0x9d, 0x7d, 0xd7,
|
||
|
0x37, 0xd7, 0x6c, 0x3d, 0xaf, 0x99, 0xa6, 0xe5, 0x6a, 0xae, 0x6e, 0x99, 0xbe, 0x5d, 0xf6, 0x1d,
|
||
|
0x5f, 0xca, 0xbe, 0x3a, 0xde, 0x51, 0x1e, 0x0f, 0x6c, 0x77, 0xe8, 0x0b, 0x37, 0xa7, 0x85, 0x47,
|
||
|
0x3a, 0x36, 0x7a, 0xea, 0x40, 0x23, 0xa7, 0xbe, 0xc6, 0xfb, 0xd3, 0x1a, 0xae, 0x3e, 0xc0, 0xc4,
|
||
|
0xd5, 0x06, 0x36, 0x57, 0x90, 0x3e, 0x0f, 0xc3, 0x5a, 0xcd, 0xea, 0xf7, 0x75, 0xb3, 0x5f, 0xb2,
|
||
|
0xcc, 0x23, 0xbd, 0x8f, 0x8e, 0x61, 0xa3, 0xe7, 0xe8, 0xe7, 0xd8, 0x51, 0x0d, 0xab, 0xaf, 0x1a,
|
||
|
0xf8, 0x1c, 0x1b, 0x24, 0x13, 0xde, 0x8c, 0x6c, 0xa5, 0x0a, 0xdf, 0xcf, 0xcd, 0xda, 0x45, 0x6e,
|
||
|
0xc2, 0x47, 0xae, 0xcc, 0x1c, 0xd4, 0xac, 0x7e, 0x8d, 0x99, 0xcb, 0xa6, 0xeb, 0x0c, 0x95, 0xf5,
|
||
|
0xde, 0xe4, 0x6c, 0xf6, 0x0c, 0x6e, 0x5c, 0xa5, 0x88, 0x44, 0x88, 0x9c, 0xe2, 0x61, 0x26, 0xb4,
|
||
|
0x19, 0xda, 0x12, 0x14, 0x3a, 0x44, 0x25, 0x88, 0x9d, 0x6b, 0x86, 0x87, 0x33, 0xe1, 0xcd, 0xd0,
|
||
|
0x56, 0xba, 0xf0, 0xed, 0x65, 0xe3, 0x60, 0x5e, 0x15, 0x6e, 0xbb, 0x13, 0xbe, 0x1f, 0x92, 0x6c,
|
||
|
0x88, 0xb1, 0x39, 0x74, 0x13, 0x36, 0x6a, 0xf2, 0x13, 0xb9, 0xa6, 0xb6, 0xeb, 0xcd, 0x86, 0x5c,
|
||
|
0xaa, 0xee, 0x57, 0xe5, 0xb2, 0xb8, 0x82, 0x12, 0x10, 0x29, 0xd6, 0x6a, 0x62, 0x08, 0x09, 0x10,
|
||
|
0x6b, 0x29, 0xc5, 0x92, 0x2c, 0x86, 0xe9, 0xb0, 0x2c, 0xef, 0xb5, 0x1f, 0x8a, 0x11, 0x94, 0x84,
|
||
|
0x68, 0xb5, 0xbe, 0x7f, 0x28, 0x46, 0xe9, 0xe8, 0x69, 0x51, 0xa9, 0x8b, 0x31, 0x2a, 0x96, 0x15,
|
||
|
0xe5, 0x50, 0x11, 0xe3, 0x74, 0xb8, 0x5f, 0x6c, 0x15, 0x6b, 0x62, 0x82, 0x3a, 0x3a, 0xdc, 0xdf,
|
||
|
0x17, 0x93, 0xd2, 0x5f, 0x22, 0x20, 0x54, 0xb4, 0x9e, 0x65, 0xd9, 0x07, 0x56, 0x07, 0x7d, 0x13,
|
||
|
0x36, 0x06, 0x9a, 0x6e, 0xaa, 0x27, 0x9a, 0xa3, 0x1e, 0xe9, 0x06, 0x56, 0x3d, 0x47, 0xe7, 0x1b,
|
||
|
0xad, 0xac, 0x28, 0x69, 0x2a, 0x3a, 0xd0, 0x9c, 0x7d, 0xdd, 0xc0, 0x6d, 0x47, 0x47, 0xef, 0x03,
|
||
|
0x30, 0xe5, 0xae, 0xa1, 0x11, 0xc2, 0xb6, 0x4e, 0xb5, 0x04, 0x3a, 0x57, 0xa2, 0x53, 0x08, 0x41,
|
||
|
0x54, 0x73, 0xfa, 0x24, 0x13, 0xd9, 0x8c, 0x6c, 0x09, 0x0a, 0x1b, 0x23, 0x09, 0xd6, 0xc6, 0x9d,
|
||
|
0x93, 0x4c, 0x94, 0x09, 0x53, 0x27, 0x23, 0xbf, 0x04, 0xbd, 0x03, 0xc2, 0x85, 0x3c, 0xc6, 0xe4,
|
||
|
0xc9, 0xa3, 0x40, 0xf8, 0x01, 0xac, 0x6a, 0x4e, 0xf7, 0x58, 0x3f, 0xf7, 0xe5, 0x71, 0x6e, 0xef,
|
||
|
0xcf, 0x31, 0x95, 0x26, 0x80, 0xed, 0x58, 0x36, 0x76, 0x5c, 0x1d, 0x93, 0x4c, 0x82, 0x71, 0xe3,
|
||
|
0xa3, 0xd9, 0x98, 0x8c, 0xb6, 0x9f, 0x6b, 0x8c, 0xac, 0x38, 0x25, 0xc6, 0xdc, 0xa0, 0x3a, 0xa4,
|
||
|
0x0d, 0x0e, 0x9e, 0xda, 0x65, 0xe8, 0x65, 0x92, 0x9b, 0xa1, 0xad, 0x54, 0xe1, 0x1b, 0x4b, 0x82,
|
||
|
0xad, 0xac, 0x19, 0xe3, 0x9f, 0xd9, 0x1f, 0xc0, 0xfa, 0xd4, 0x72, 0x57, 0x10, 0xeb, 0xc6, 0x38,
|
||
|
0xb1, 0x84, 0x31, 0xa6, 0xec, 0x25, 0x21, 0xce, 0xf9, 0x2a, 0xfd, 0x39, 0x02, 0xc9, 0xa6, 0xad,
|
||
|
0x39, 0xa7, 0x5f, 0x1f, 0x00, 0x95, 0x2b, 0x00, 0x2c, 0xcc, 0x3e, 0xe7, 0x60, 0xf7, 0x5f, 0x4d,
|
||
|
0xfc, 0xfe, 0x1a, 0x01, 0x68, 0x0c, 0x47, 0x08, 0xe6, 0xe1, 0x06, 0x03, 0xc5, 0x1e, 0xba, 0xc7,
|
||
|
0x96, 0x39, 0x05, 0xa2, 0xc2, 0xd0, 0x6d, 0x30, 0x51, 0x80, 0x62, 0x00, 0x52, 0x78, 0x0c, 0xa4,
|
||
|
0x2d, 0x10, 0xa7, 0xec, 0x03, 0x10, 0xd3, 0xf6, 0xb8, 0xf1, 0x17, 0x03, 0x67, 0xeb, 0x0a, 0x38,
|
||
|
0xbf, 0x3b, 0xfb, 0xd8, 0x2f, 0x0e, 0xe3, 0x2b, 0x04, 0xa8, 0xf4, 0x21, 0x08, 0x8f, 0x3d, 0xec,
|
||
|
0x0c, 0x6b, 0x3a, 0x71, 0x51, 0x06, 0x12, 0x67, 0x1e, 0x76, 0xe8, 0x76, 0x43, 0xec, 0x3c, 0x82,
|
||
|
0x4f, 0xe9, 0xe7, 0x51, 0x48, 0x54, 0xf4, 0x73, 0x4c, 0xa1, 0xbe, 0x0d, 0x69, 0x3a, 0x3d, 0xbc,
|
||
|
0x7c, 0x53, 0x57, 0xd9, 0x7c, 0x80, 0x70, 0x19, 0x80, 0xeb, 0x19, 0x3a, 0x71, 0xd9, 0xca, 0xa9,
|
||
|
0xc2, 0xad, 0xd9, 0xbb, 0x1c, 0x85, 0x41, 0x2f, 0xf3, 0xd9, 0x28, 0xa6, 0x1c, 0xbc, 0xd5, 0xb5,
|
||
|
0x4c, 0x57, 0x37, 0x3d, 0xac, 0x52, 0x62, 0x68, 0xba, 0xe1, 0x39, 0x38, 0x13, 0xd9, 0x0c, 0x6d,
|
||
|
0x25, 0x95, 0x8d, 0x40, 0x74, 0x68, 0xee, 0x73, 0x01, 0xd2, 0x40, 0x24, 0x5d, 0x47, 0xb7, 0x5d,
|
||
|
0xf5, 0x5c, 0x73, 0x74, 0xad, 0x63, 0x60, 0x4e, 0x8e, 0x54, 0xe1, 0xde, 0x9c, 0x5c, 0xca, 0xb7,
|
||
|
0x96, 0x6b, 0x32, 0xcb, 0x27, 0x81, 0xa1, 0x5f, 0x61, 0xc9, 0xe4, 0x2c, 0x7a, 0x3c, 0x41, 0x8c,
|
||
|
0x18, 0x73, 0xbe, 0xbd, 0xd8, 0xf9, 0x3c, 0x56, 0x5c, 0xe2, 0x73, 0xfc, 0x12, 0x9f, 0xb3, 0x7b,
|
||
|
0x70, 0xe3, 0xaa, 0xf8, 0xae, 0x03, 0xf7, 0x9b, 0x5e, 0x7f, 0x61, 0x44, 0x10, 0xe9, 0x4f, 0x51,
|
||
|
0x48, 0x31, 0xc2, 0x37, 0xcf, 0x8c, 0x2f, 0x9e, 0x15, 0xf8, 0x0a, 0x94, 0x23, 0x0c, 0x88, 0x9d,
|
||
|
0x05, 0x09, 0x97, 0x87, 0xbb, 0x24, 0xd2, 0xed, 0x09, 0xa4, 0x39, 0x8d, 0xbe, 0xb7, 0xdc, 0x02,
|
||
|
0xd7, 0x42, 0xfb, 0xfe, 0xe5, 0xec, 0x75, 0x39, 0x4f, 0xc4, 0xdf, 0x28, 0x4f, 0x7c, 0xb9, 0xd8,
|
||
|
0xf3, 0xcf, 0x28, 0xc4, 0x1b, 0x7a, 0xff, 0xcb, 0x9f, 0x4e, 0x9e, 0xcd, 0x4c, 0x27, 0x73, 0x78,
|
||
|
0xc0, 0x77, 0xb6, 0x24, 0xc7, 0x1a, 0x57, 0x64, 0x93, 0xef, 0x2c, 0xf4, 0xfd, 0x86, 0xc9, 0xe4,
|
||
|
0x0a, 0x7a, 0x25, 0xbe, 0x46, 0xf4, 0x6a, 0xc1, 0xea, 0x81, 0xd5, 0x69, 0x18, 0x5a, 0x17, 0x0f,
|
||
|
0xb0, 0xe9, 0xd2, 0x6a, 0xdf, 0x35, 0x3c, 0xe2, 0x62, 0x47, 0x35, 0xb5, 0x01, 0xf6, 0xfd, 0xa5,
|
||
|
0xfc, 0xb9, 0xba, 0x36, 0xc0, 0xe3, 0x2a, 0x9e, 0xa7, 0xf7, 0x7c, 0xf7, 0x81, 0x4a, 0xdb, 0xd3,
|
||
|
0x7b, 0xd2, 0xbf, 0x23, 0x20, 0x1c, 0x58, 0x9d, 0xa6, 0xab, 0xb9, 0x1e, 0x41, 0x0f, 0x20, 0x46,
|
||
|
0x5c, 0xcd, 0xe5, 0xce, 0xd2, 0x85, 0x3b, 0xb3, 0x0f, 0x6e, 0x64, 0x93, 0xa3, 0x7f, 0xb0, 0xc2,
|
||
|
0xed, 0x68, 0xb5, 0xed, 0x61, 0x57, 0xd3, 0x0d, 0xbf, 0x89, 0x55, 0x82, 0x4f, 0x54, 0x06, 0x91,
|
||
|
0xa9, 0xa8, 0xc4, 0xd5, 0x1c, 0x57, 0xa5, 0xaf, 0x4b, 0xff, 0xf6, 0x67, 0x83, 0x55, 0x82, 0xa7,
|
||
|
0x67, 0xae, 0x15, 0x3c, 0x3d, 0x95, 0x34, 0xb3, 0x69, 0x52, 0x13, 0x3a, 0x89, 0x2a, 0x90, 0x24,
|
||
|
0x5e, 0x87, 0xc7, 0x98, 0x60, 0x31, 0x7e, 0x6b, 0xa9, 0x18, 0x7d, 0x1b, 0x65, 0x64, 0x2d, 0xfd,
|
||
|
0x3e, 0x04, 0x31, 0x16, 0x3a, 0x7d, 0xe0, 0x35, 0x5b, 0xc5, 0x96, 0x3c, 0xf5, 0xc0, 0x4b, 0x41,
|
||
|
0xa2, 0x21, 0xd7, 0xcb, 0xd5, 0xfa, 0x43, 0x31, 0x84, 0xd2, 0x00, 0x4d, 0xb9, 0xd5, 0x6e, 0xa8,
|
||
|
0xe5, 0xc3, 0xba, 0x2c, 0x26, 0xa9, 0x50, 0x69, 0xd7, 0xeb, 0x54, 0x18, 0x46, 0x08, 0xd2, 0xa5,
|
||
|
0x62, 0xbd, 0x24, 0xd7, 0xd4, 0xc0, 0x20, 0x32, 0x36, 0xd7, 0x6c, 0x15, 0x95, 0x96, 0x5c, 0x16,
|
||
|
0x13, 0x68, 0x0d, 0x04, 0x3e, 0x57, 0x93, 0xcb, 0xfc, 0x61, 0xc8, 0xbc, 0x4d, 0x3c, 0x0c, 0xdf,
|
||
|
0x82, 0xf5, 0x62, 0xab, 0x25, 0x3f, 0x6a, 0xb4, 0xd4, 0xfd, 0x62, 0xb5, 0xd6, 0x56, 0x64, 0x51,
|
||
|
0x90, 0x2a, 0x90, 0x0c, 0x76, 0x80, 0xd6, 0x21, 0x35, 0x19, 0xe7, 0x1a, 0x08, 0xcd, 0xf6, 0xde,
|
||
|
0xa3, 0x6a, 0x8b, 0x2e, 0x12, 0x42, 0x00, 0xf1, 0xc7, 0x6d, 0xb9, 0x2d, 0x97, 0xc5, 0x30, 0x12,
|
||
|
0x61, 0xb5, 0xd9, 0x2a, 0xd6, 0x64, 0x1a, 0x43, 0xab, 0xdd, 0x14, 0x23, 0x52, 0x99, 0x91, 0x48,
|
||
|
0xc1, 0x47, 0xd8, 0xc1, 0x66, 0x17, 0xa3, 0xf7, 0xd8, 0x45, 0x3d, 0xc1, 0x5d, 0x57, 0xd5, 0x7b,
|
||
|
0x3e, 0x85, 0x04, 0x7f, 0xa6, 0xda, 0x43, 0x37, 0x21, 0x7e, 0x62, 0x75, 0xd4, 0x11, 0x75, 0x62,
|
||
|
0x27, 0x56, 0xa7, 0xda, 0x93, 0xfe, 0x10, 0x86, 0xf5, 0x1f, 0x69, 0x8e, 0x59, 0xb4, 0x6d, 0x43,
|
||
|
0xef, 0xb2, 0x5f, 0x21, 0x68, 0xef, 0x3b, 0x46, 0x43, 0x36, 0x46, 0x72, 0x40, 0x27, 0xfe, 0x18,
|
||
|
0xcf, 0xcf, 0x86, 0x6a, 0xca, 0xdb, 0x24, 0xa9, 0xb2, 0x90, 0xb4, 0x1d, 0xab, 0xef, 0x60, 0x42,
|
||
|
0x58, 0x52, 0x0b, 0x2b, 0xa3, 0x6f, 0x4a, 0x71, 0xd7, 0xd1, 0xba, 0xa7, 0xf4, 0xd2, 0x7b, 0x8e,
|
||
|
0x91, 0x89, 0x72, 0x8a, 0x07, 0x73, 0x6d, 0xc7, 0x90, 0x7e, 0xb6, 0x08, 0xe9, 0x04, 0x44, 0xea,
|
||
|
0xf2, 0x53, 0x8e, 0x72, 0x5d, 0x7e, 0xaa, 0x36, 0x8b, 0x4f, 0x38, 0xb0, 0x13, 0x47, 0x1b, 0x41,
|
||
|
0xab, 0x90, 0x2c, 0x96, 0x4a, 0x72, 0xa3, 0xc5, 0xe0, 0x1b, 0xa3, 0x40, 0x8c, 0x8a, 0xf6, 0xab,
|
||
|
0xf5, 0x6a, 0xb3, 0x22, 0x97, 0xc5, 0x38, 0xc5, 0x80, 0x82, 0xc7, 0x40, 0x07, 0x88, 0x7f, 0x56,
|
||
|
0x65, 0x88, 0x27, 0xa5, 0x7f, 0x24, 0x21, 0x42, 0xcb, 0x43, 0x19, 0x04, 0x27, 0x80, 0x80, 0x1d,
|
||
|
0x58, 0xaa, 0x70, 0x7b, 0x2e, 0x8d, 0x47, 0x80, 0x29, 0x17, 0x86, 0xd4, 0x8b, 0x1d, 0x64, 0x03,
|
||
|
0xbf, 0x76, 0xcc, 0xf7, 0x32, 0xca, 0x1d, 0xca, 0x85, 0x21, 0x2d, 0x41, 0xc7, 0xec, 0xd5, 0xad,
|
||
|
0x9e, 0x58, 0x1d, 0x76, 0xbc, 0x73, 0x4b, 0xd0, 0xe8, 0x85, 0x4e, 0x4b, 0xd0, 0xf1, 0xe8, 0xd7,
|
||
|
0x8a, 0x22, 0x08, 0x84, 0x36, 0x0a, 0xcc, 0x49, 0x94, 0x39, 0x91, 0x16, 0xbf, 0x12, 0x2b, 0x2b,
|
||
|
0x4a, 0x92, 0x04, 0xaf, 0xad, 0x87, 0x90, 0xb2, 0x87, 0x17, 0x4e, 0x62, 0xcc, 0xc9, 0xff, 0x2f,
|
||
|
0xf3, 0x36, 0xa9, 0xac, 0x28, 0xe0, 0x9b, 0x52, 0x47, 0x3f, 0x84, 0x24, 0x7b, 0x03, 0x51, 0x2f,
|
||
|
0x3c, 0xc3, 0x7c, 0xb0, 0xb0, 0x91, 0xad, 0xac, 0x28, 0x89, 0x63, 0xff, 0x2d, 0xb0, 0x0b, 0x09,
|
||
|
0x5b, 0xef, 0x33, 0x73, 0x5e, 0x3f, 0x36, 0x17, 0x55, 0xae, 0xca, 0x8a, 0x12, 0xb7, 0x79, 0xe5,
|
||
|
0xff, 0x0c, 0xd6, 0xf8, 0x1e, 0xc8, 0x99, 0xc1, 0x5c, 0xac, 0x32, 0x17, 0x1f, 0x2e, 0xd5, 0x60,
|
||
|
0x55, 0x56, 0x94, 0x14, 0x19, 0xeb, 0x3f, 0x77, 0x21, 0x4e, 0x58, 0x02, 0xf3, 0xdf, 0x53, 0xb7,
|
||
|
0x96, 0xc8, 0x75, 0x8a, 0x6f, 0x82, 0x0e, 0x20, 0xcd, 0x47, 0xea, 0xb1, 0x4e, 0x5c, 0xcb, 0x19,
|
||
|
0x66, 0xd6, 0x58, 0x1d, 0x5e, 0xca, 0xc9, 0x1a, 0x37, 0xad, 0x70, 0x4b, 0xf4, 0x04, 0x36, 0x86,
|
||
|
0x9a, 0x63, 0xaa, 0xda, 0xc5, 0x15, 0x25, 0x19, 0x81, 0xb9, 0xbb, 0xb3, 0xf4, 0xa5, 0x56, 0xc4,
|
||
|
0xe1, 0xe4, 0x04, 0x41, 0xbb, 0x90, 0xf5, 0x7f, 0x41, 0xb4, 0x3c, 0xd7, 0xf6, 0x5c, 0xd5, 0xc1,
|
||
|
0xc4, 0xf2, 0x9c, 0x2e, 0xef, 0x99, 0x36, 0xd8, 0x5d, 0xfe, 0x3f, 0xae, 0x71, 0xc8, 0x14, 0x14,
|
||
|
0x5f, 0x4e, 0x9b, 0xa7, 0x8f, 0x21, 0xe3, 0x1b, 0xd3, 0x16, 0xc7, 0xb1, 0x0c, 0xd6, 0x1d, 0x10,
|
||
|
0x66, 0xba, 0xce, 0x4c, 0x6f, 0x72, 0x79, 0x89, 0x8b, 0x69, 0x9f, 0x40, 0xa8, 0x61, 0x11, 0xe2,
|
||
|
0x86, 0xd6, 0xc1, 0x06, 0xc9, 0xa0, 0x45, 0x5b, 0xa0, 0x6d, 0x49, 0x8d, 0xe9, 0xf2, 0x96, 0xc4,
|
||
|
0x37, 0x44, 0x0f, 0x01, 0x48, 0xf7, 0x18, 0xf7, 0x3c, 0x43, 0x37, 0xfb, 0x99, 0x1b, 0x8b, 0xda,
|
||
|
0x0c, 0x7a, 0xb0, 0x23, 0x75, 0x65, 0xcc, 0x34, 0xfb, 0x09, 0xa4, 0xc6, 0xfc, 0x5f, 0xab, 0x37,
|
||
|
0x00, 0x48, 0xba, 0x43, 0x9b, 0xf1, 0x5c, 0xda, 0x83, 0xb5, 0x89, 0x35, 0xd0, 0x36, 0xdc, 0x1c,
|
||
|
0x68, 0xcf, 0x83, 0x5e, 0x90, 0xa8, 0x36, 0x76, 0xd4, 0x63, 0xcb, 0x73, 0x98, 0xeb, 0x98, 0x82,
|
||
|
0x06, 0xda, 0x73, 0xbf, 0x1d, 0x24, 0x0d, 0xec, 0x54, 0x2c, 0xcf, 0x91, 0x5e, 0x80, 0xd8, 0xf4,
|
||
|
0x3a, 0x03, 0xdd, 0x65, 0x09, 0xe7, 0xcc, 0xc3, 0xc4, 0x5d, 0x54, 0x1f, 0xde, 0x86, 0xb8, 0x83,
|
||
|
0xfb, 0xba, 0x65, 0xb2, 0xc4, 0x21, 0x28, 0xfe, 0x17, 0xca, 0x43, 0x84, 0x72, 0x9f, 0x27, 0xa5,
|
||
|
0xf7, 0xe6, 0xa7, 0x36, 0xaa, 0x29, 0xfd, 0x04, 0xd6, 0x1e, 0xe2, 0xff, 0xc2, 0xc2, 0x33, 0x0a,
|
||
|
0xd6, 0xe7, 0x61, 0x58, 0xa7, 0xad, 0xf2, 0x81, 0xd5, 0x21, 0xd7, 0x5e, 0x21, 0x3e, 0xb1, 0xc2,
|
||
|
0x3b, 0x20, 0xd8, 0x5a, 0x1f, 0xab, 0x44, 0x7f, 0xc1, 0x31, 0x89, 0x29, 0x49, 0x3a, 0xd1, 0xd4,
|
||
|
0x5f, 0xf0, 0x72, 0x4a, 0x85, 0xae, 0x75, 0x8a, 0x83, 0xd0, 0x98, 0x7a, 0x8b, 0x4e, 0x5c, 0x6a,
|
||
|
0xd9, 0xa2, 0x97, 0x5b, 0x36, 0x0c, 0x1b, 0x74, 0x03, 0xbc, 0x55, 0x1a, 0x68, 0x6e, 0xf7, 0x18,
|
||
|
0x3b, 0x2c, 0x17, 0xa6, 0x0b, 0x9f, 0xcc, 0x69, 0x63, 0x27, 0xf7, 0x16, 0x5c, 0x64, 0xfc, 0x88,
|
||
|
0x3b, 0x50, 0xd6, 0x4f, 0x26, 0x27, 0xe8, 0xee, 0x8e, 0x74, 0xc3, 0xc5, 0x0e, 0x4b, 0x71, 0x82,
|
||
|
0xe2, 0x7f, 0x49, 0xf7, 0x60, 0x7d, 0xca, 0x36, 0xf8, 0xa1, 0x7b, 0x85, 0x56, 0xb2, 0x62, 0xa9,
|
||
|
0x55, 0x7d, 0x22, 0xfb, 0x95, 0xf2, 0xb0, 0xae, 0xfa, 0xdf, 0x61, 0xe9, 0x6f, 0x21, 0x10, 0xdb,
|
||
|
0x76, 0x4f, 0x73, 0xf1, 0xeb, 0x60, 0x18, 0x9e, 0x81, 0x61, 0x64, 0x0c, 0xc3, 0x80, 0x53, 0xd1,
|
||
|
0x65, 0x39, 0x85, 0x76, 0x21, 0xe5, 0xb1, 0x90, 0xd8, 0xbf, 0x39, 0xfc, 0x82, 0x72, 0xb9, 0xd9,
|
||
|
0xdc, 0xd7, 0xb1, 0xd1, 0x7b, 0xa4, 0x91, 0x53, 0x05, 0xb8, 0x3a, 0x1d, 0x4b, 0x03, 0x10, 0x2f,
|
||
|
0x0e, 0x95, 0xd8, 0x96, 0x49, 0x30, 0xda, 0x86, 0xe8, 0x89, 0xd5, 0xe1, 0xbf, 0x23, 0x2d, 0x0c,
|
||
|
0x81, 0xa9, 0xa2, 0xdb, 0xb0, 0x6e, 0xe2, 0xe7, 0xae, 0x3a, 0xc6, 0x0a, 0xbe, 0xd9, 0x35, 0x3a,
|
||
|
0xdd, 0x08, 0x98, 0x21, 0x3d, 0x03, 0xb1, 0xa4, 0x99, 0x5d, 0x6c, 0xfc, 0xcf, 0xae, 0xc0, 0x33,
|
||
|
0x10, 0xcb, 0xd8, 0xc0, 0xaf, 0x07, 0xd0, 0x32, 0x2b, 0x14, 0xfe, 0x98, 0x60, 0x49, 0xc8, 0xcf,
|
||
|
0xb6, 0x06, 0x76, 0xd0, 0xaf, 0x43, 0x20, 0x8c, 0x52, 0x0a, 0xba, 0x3b, 0xa7, 0x06, 0x4e, 0xe5,
|
||
|
0x9d, 0xec, 0xfc, 0xc3, 0x95, 0x8a, 0x3f, 0xfd, 0xfb, 0xbf, 0x7e, 0x15, 0xde, 0x95, 0xee, 0xe5,
|
||
|
0xcf, 0xb7, 0xf3, 0x7e, 0xc0, 0x24, 0xff, 0xf2, 0x62, 0x33, 0xaf, 0xf2, 0x3c, 0x56, 0x92, 0x7f,
|
||
|
0xc9, 0x07, 0xaf, 0xd8, 0xbf, 0xd8, 0x76, 0x08, 0x5b, 0x68, 0x27, 0x74, 0x17, 0xfd, 0x32, 0x04,
|
||
|
0x71, 0x9e, 0x72, 0xd0, 0x9c, 0xc4, 0x3d, 0x91, 0x94, 0x16, 0x45, 0xf5, 0x29, 0x8b, 0x6a, 0x07,
|
||
|
0xdd, 0xbf, 0x66, 0x54, 0xf9, 0x97, 0xfc, 0x38, 0x5f, 0xa1, 0xdf, 0x84, 0x20, 0x19, 0xd0, 0x0e,
|
||
|
0xdd, 0x59, 0xfa, 0xbe, 0x67, 0xef, 0x2e, 0xa3, 0xca, 0x59, 0x2c, 0x7d, 0xcc, 0xa2, 0xdc, 0x46,
|
||
|
0xf9, 0x6b, 0x46, 0x89, 0x7e, 0x1b, 0x02, 0x61, 0x74, 0xc7, 0xe7, 0xa1, 0x39, 0x9d, 0x08, 0x16,
|
||
|
0x9d, 0x9b, 0xcc, 0x22, 0x7a, 0x50, 0x78, 0xed, 0x73, 0xdb, 0x61, 0xf7, 0xfd, 0x77, 0x21, 0x10,
|
||
|
0x46, 0x97, 0x68, 0x5e, 0x7c, 0xd3, 0x37, 0x6d, 0x51, 0x7c, 0x07, 0x2c, 0xbe, 0xb2, 0xf4, 0xe0,
|
||
|
0xb5, 0xe3, 0xeb, 0xb2, 0x15, 0x29, 0xed, 0x7e, 0x11, 0x02, 0x61, 0x74, 0x0f, 0xe7, 0x05, 0x39,
|
||
|
0x7d, 0x59, 0xb3, 0x6f, 0x5f, 0xca, 0x5c, 0xf2, 0xc0, 0x76, 0x87, 0x01, 0xeb, 0xee, 0xbe, 0xf6,
|
||
|
0xe9, 0xed, 0x0d, 0xe0, 0xdd, 0xae, 0x35, 0x98, 0x19, 0xca, 0x9e, 0x40, 0xf9, 0xd3, 0xa0, 0xab,
|
||
|
0x36, 0x42, 0x3f, 0xfe, 0xd4, 0x57, 0xeb, 0x5b, 0x86, 0x66, 0xf6, 0x73, 0x96, 0xd3, 0xcf, 0xf7,
|
||
|
0xb1, 0xc9, 0x62, 0xca, 0x73, 0x91, 0x66, 0xeb, 0xe4, 0xf2, 0xbf, 0xb8, 0x77, 0x83, 0x71, 0x27,
|
||
|
0xce, 0x94, 0x3f, 0xfa, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x02, 0xa8, 0x72, 0x7c, 0x0e, 0x1f,
|
||
|
0x00, 0x00,
|
||
|
}
|