@@ -97,21 +97,65 @@ public struct SafetySetting {
9797}
9898
9999/// Categories describing the potential harm a piece of content may pose.
100- public enum HarmCategory : String , Sendable {
101- /// Unknown. A new server value that isn't recognized by the SDK.
102- case unknown = " HARM_CATEGORY_UNKNOWN "
100+ public struct HarmCategory : Sendable , Equatable , Hashable {
101+ enum Kind : String {
102+ case harassment = " HARM_CATEGORY_HARASSMENT "
103+ case hateSpeech = " HARM_CATEGORY_HATE_SPEECH "
104+ case sexuallyExplicit = " HARM_CATEGORY_SEXUALLY_EXPLICIT "
105+ case dangerousContent = " HARM_CATEGORY_DANGEROUS_CONTENT "
106+ case civicIntegrity = " HARM_CATEGORY_CIVIC_INTEGRITY "
107+ }
103108
104109 /// Harassment content.
105- case harassment = " HARM_CATEGORY_HARASSMENT "
110+ public static var harassment : HarmCategory {
111+ return self . init ( kind: . harassment)
112+ }
106113
107114 /// Negative or harmful comments targeting identity and/or protected attributes.
108- case hateSpeech = " HARM_CATEGORY_HATE_SPEECH "
115+ public static var hateSpeech : HarmCategory {
116+ return self . init ( kind: . hateSpeech)
117+ }
109118
110119 /// Contains references to sexual acts or other lewd content.
111- case sexuallyExplicit = " HARM_CATEGORY_SEXUALLY_EXPLICIT "
120+ public static var sexuallyExplicit : HarmCategory {
121+ return self . init ( kind: . sexuallyExplicit)
122+ }
112123
113124 /// Promotes or enables access to harmful goods, services, or activities.
114- case dangerousContent = " HARM_CATEGORY_DANGEROUS_CONTENT "
125+ public static var dangerousContent : HarmCategory {
126+ return self . init ( kind: . dangerousContent)
127+ }
128+
129+ /// Content that may be used to harm civic integrity.
130+ public static var civicIntegrity : HarmCategory {
131+ return self . init ( kind: . civicIntegrity)
132+ }
133+
134+ /// Returns the raw string representation of the `HarmCategory` value.
135+ ///
136+ /// > Note: This value directly corresponds to the values in the
137+ /// > [REST API](https://cloud.google.com/vertex-ai/docs/reference/rest/v1beta1/HarmCategory).
138+ public let rawValue : String
139+
140+ init ( kind: Kind ) {
141+ rawValue = kind. rawValue
142+ }
143+
144+ init ( rawValue: String ) {
145+ if Kind ( rawValue: rawValue) == nil {
146+ VertexLog . error (
147+ code: . generateContentResponseUnrecognizedHarmCategory,
148+ """
149+ Unrecognized HarmCategory with value " \( rawValue) " :
150+ - Check for updates to the SDK as support for " \( rawValue) " may have been added; see \
151+ release notes at https://firebase.google.com/support/release-notes/ios
152+ - Search for " \( rawValue) " in the Firebase Apple SDK Issue Tracker at \
153+ https://github.com/firebase/firebase-ios-sdk/issues and file a Bug Report if none found
154+ """
155+ )
156+ }
157+ self . rawValue = rawValue
158+ }
115159}
116160
117161// MARK: - Codable Conformances
@@ -139,17 +183,8 @@ extension SafetyRating: Decodable {}
139183@available ( iOS 15 . 0 , macOS 11 . 0 , macCatalyst 15 . 0 , tvOS 15 . 0 , watchOS 8 . 0 , * )
140184extension HarmCategory : Codable {
141185 public init ( from decoder: Decoder ) throws {
142- let value = try decoder. singleValueContainer ( ) . decode ( String . self)
143- guard let decodedCategory = HarmCategory ( rawValue: value) else {
144- VertexLog . error (
145- code: . generateContentResponseUnrecognizedHarmCategory,
146- " Unrecognized HarmCategory with value \" \( value) \" . "
147- )
148- self = . unknown
149- return
150- }
151-
152- self = decodedCategory
186+ let rawValue = try decoder. singleValueContainer ( ) . decode ( String . self)
187+ self = HarmCategory ( rawValue: rawValue)
153188 }
154189}
155190
0 commit comments