Class: Google::Apis::AiplatformV1::LearningGenaiRootHarm

Inherits:
Object
  • Object
show all
Includes:
Core::Hashable, Core::JsonObjectSupport
Defined in:
lib/google/apis/aiplatform_v1/classes.rb,
lib/google/apis/aiplatform_v1/representations.rb,
lib/google/apis/aiplatform_v1/representations.rb

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(**args) ⇒ LearningGenaiRootHarm

Returns a new instance of LearningGenaiRootHarm.



31200
31201
31202
# File 'lib/google/apis/aiplatform_v1/classes.rb', line 31200

def initialize(**args)
   update!(**args)
end

Instance Attribute Details

#contextual_dangerousBoolean Also known as: contextual_dangerous?

Please do not use, this is still under development. Corresponds to the JSON property contextualDangerous

Returns:

  • (Boolean)


31104
31105
31106
# File 'lib/google/apis/aiplatform_v1/classes.rb', line 31104

def contextual_dangerous
  @contextual_dangerous
end

#csamBoolean Also known as: csam?

Corresponds to the JSON property csam

Returns:

  • (Boolean)


31110
31111
31112
# File 'lib/google/apis/aiplatform_v1/classes.rb', line 31110

def csam
  @csam
end

#fringeBoolean Also known as: fringe?

Corresponds to the JSON property fringe

Returns:

  • (Boolean)


31116
31117
31118
# File 'lib/google/apis/aiplatform_v1/classes.rb', line 31116

def fringe
  @fringe
end

#grail_image_harm_typeGoogle::Apis::AiplatformV1::LearningGenaiRootHarmGrailImageHarmType

Harm type for images Corresponds to the JSON property grailImageHarmType



31122
31123
31124
# File 'lib/google/apis/aiplatform_v1/classes.rb', line 31122

def grail_image_harm_type
  @grail_image_harm_type
end

#grail_text_harm_typeGoogle::Apis::AiplatformV1::LearningGenaiRootHarmGrailTextHarmType

Harm type for text Corresponds to the JSON property grailTextHarmType



31127
31128
31129
# File 'lib/google/apis/aiplatform_v1/classes.rb', line 31127

def grail_text_harm_type
  @grail_text_harm_type
end

#image_csamBoolean Also known as: image_csam?

Corresponds to the JSON property imageCsam

Returns:

  • (Boolean)


31132
31133
31134
# File 'lib/google/apis/aiplatform_v1/classes.rb', line 31132

def image_csam
  @image_csam
end

#image_pedoBoolean Also known as: image_pedo?

Corresponds to the JSON property imagePedo

Returns:

  • (Boolean)


31138
31139
31140
# File 'lib/google/apis/aiplatform_v1/classes.rb', line 31138

def image_pedo
  @image_pedo
end

#image_pornBoolean Also known as: image_porn?

Image signals Corresponds to the JSON property imagePorn

Returns:

  • (Boolean)


31144
31145
31146
# File 'lib/google/apis/aiplatform_v1/classes.rb', line 31144

def image_porn
  @image_porn
end

#image_violenceBoolean Also known as: image_violence?

Corresponds to the JSON property imageViolence

Returns:

  • (Boolean)


31150
31151
31152
# File 'lib/google/apis/aiplatform_v1/classes.rb', line 31150

def image_violence
  @image_violence
end

#pqcBoolean Also known as: pqc?

Corresponds to the JSON property pqc

Returns:

  • (Boolean)


31156
31157
31158
# File 'lib/google/apis/aiplatform_v1/classes.rb', line 31156

def pqc
  @pqc
end

#safetycatGoogle::Apis::AiplatformV1::LearningGenaiRootHarmSafetyCatCategories

Corresponds to the JSON property safetycat



31162
31163
31164
# File 'lib/google/apis/aiplatform_v1/classes.rb', line 31162

def safetycat
  @safetycat
end

#spiiGoogle::Apis::AiplatformV1::LearningGenaiRootHarmSpiiFilter

Spii Filter uses buckets http://google3/google/privacy/dlp/v2/storage.proto;l= 77;rcl=584719820 to classify the input. LMRoot converts the bucket into double score. For example the score for "POSSIBLE" is 3 / 5 = 0.6 . Corresponds to the JSON property spii



31169
31170
31171
# File 'lib/google/apis/aiplatform_v1/classes.rb', line 31169

def spii
  @spii
end

#thresholdFloat

Corresponds to the JSON property threshold

Returns:

  • (Float)


31174
31175
31176
# File 'lib/google/apis/aiplatform_v1/classes.rb', line 31174

def threshold
  @threshold
end

#video_frame_csamBoolean Also known as: video_frame_csam?

Corresponds to the JSON property videoFrameCsam

Returns:

  • (Boolean)


31179
31180
31181
# File 'lib/google/apis/aiplatform_v1/classes.rb', line 31179

def video_frame_csam
  @video_frame_csam
end

#video_frame_pedoBoolean Also known as: video_frame_pedo?

Corresponds to the JSON property videoFramePedo

Returns:

  • (Boolean)


31185
31186
31187
# File 'lib/google/apis/aiplatform_v1/classes.rb', line 31185

def video_frame_pedo
  @video_frame_pedo
end

#video_frame_pornBoolean Also known as: video_frame_porn?

Video frame signals Corresponds to the JSON property videoFramePorn

Returns:

  • (Boolean)


31191
31192
31193
# File 'lib/google/apis/aiplatform_v1/classes.rb', line 31191

def video_frame_porn
  @video_frame_porn
end

#video_frame_violenceBoolean Also known as: video_frame_violence?

Corresponds to the JSON property videoFrameViolence

Returns:

  • (Boolean)


31197
31198
31199
# File 'lib/google/apis/aiplatform_v1/classes.rb', line 31197

def video_frame_violence
  @video_frame_violence
end

Instance Method Details

#update!(**args) ⇒ Object

Update properties of this object



31205
31206
31207
31208
31209
31210
31211
31212
31213
31214
31215
31216
31217
31218
31219
31220
31221
31222
31223
# File 'lib/google/apis/aiplatform_v1/classes.rb', line 31205

def update!(**args)
  @contextual_dangerous = args[:contextual_dangerous] if args.key?(:contextual_dangerous)
  @csam = args[:csam] if args.key?(:csam)
  @fringe = args[:fringe] if args.key?(:fringe)
  @grail_image_harm_type = args[:grail_image_harm_type] if args.key?(:grail_image_harm_type)
  @grail_text_harm_type = args[:grail_text_harm_type] if args.key?(:grail_text_harm_type)
  @image_csam = args[:image_csam] if args.key?(:image_csam)
  @image_pedo = args[:image_pedo] if args.key?(:image_pedo)
  @image_porn = args[:image_porn] if args.key?(:image_porn)
  @image_violence = args[:image_violence] if args.key?(:image_violence)
  @pqc = args[:pqc] if args.key?(:pqc)
  @safetycat = args[:safetycat] if args.key?(:safetycat)
  @spii = args[:spii] if args.key?(:spii)
  @threshold = args[:threshold] if args.key?(:threshold)
  @video_frame_csam = args[:video_frame_csam] if args.key?(:video_frame_csam)
  @video_frame_pedo = args[:video_frame_pedo] if args.key?(:video_frame_pedo)
  @video_frame_porn = args[:video_frame_porn] if args.key?(:video_frame_porn)
  @video_frame_violence = args[:video_frame_violence] if args.key?(:video_frame_violence)
end