diff --git a/clients/client-rekognitionstreaming/src/commands/StartFaceLivenessSessionCommand.ts b/clients/client-rekognitionstreaming/src/commands/StartFaceLivenessSessionCommand.ts index 22550564d848..4164a07e1166 100644 --- a/clients/client-rekognitionstreaming/src/commands/StartFaceLivenessSessionCommand.ts +++ b/clients/client-rekognitionstreaming/src/commands/StartFaceLivenessSessionCommand.ts @@ -48,8 +48,6 @@ export interface StartFaceLivenessSessionCommandOutput extends StartFaceLiveness *

The maximum video size for Face Liveness is 10 MB. Face Liveness throws a * ValidationException if the video does not match the necessary formatting and * size parameters.

- *

StartFaceLivenessSession supports the websockets and the AWS SDK - * for JavaScript.

* @example * Use a bare-bones client and the command you need to make an API call. * ```javascript @@ -106,6 +104,30 @@ export interface StartFaceLivenessSessionCommandOutput extends StartFaceLiveness * CurrentColorStartTimestamp: Number("long"), // required * }, * }, + * FaceMovementChallenge: { // FaceMovementClientChallenge + * ChallengeId: "STRING_VALUE", // required + * VideoStartTimestamp: Number("long"), + * VideoEndTimestamp: Number("long"), + * InitialFace: { + * BoundingBox: { + * Width: Number("float"), // required + * Height: Number("float"), // required + * Left: Number("float"), // required + * Top: Number("float"), // required + * }, + * InitialFaceDetectedTimestamp: Number("long"), // required + * }, + * TargetFace: { + * BoundingBox: { + * Width: Number("float"), // required + * Height: Number("float"), // required + * Left: Number("float"), // required + * Top: Number("float"), // required + * }, + * FaceDetectedInTargetPositionStartTimestamp: Number("long"), // required + * FaceDetectedInTargetPositionEndTimestamp: Number("long"), // required + * }, + * }, * }, * }, * }, @@ -151,12 +173,37 @@ export interface StartFaceLivenessSessionCommandOutput extends StartFaceLiveness * // }, * // ], * // }, + * // FaceMovementChallenge: { // FaceMovementServerChallenge + * // OvalParameters: { + * // Width: Number("float"), // required + * // Height: Number("float"), // required + * // CenterX: Number("float"), // required + * // CenterY: Number("float"), // required + * // }, + * // ChallengeConfig: { + * // BlazeFaceDetectionThreshold: Number("float"), + * // FaceDistanceThresholdMin: Number("float"), + * // FaceDistanceThreshold: Number("float"), + * // FaceDistanceThresholdMax: Number("float"), + * // OvalIouThreshold: Number("float"), + * // OvalHeightWidthRatio: Number("float"), + * // OvalIouWidthThreshold: Number("float"), + * // OvalIouHeightThreshold: Number("float"), + * // FaceIouWidthThreshold: Number("float"), + * // FaceIouHeightThreshold: Number("float"), + * // OvalFitTimeout: Number("int"), + * // }, + * // }, * // }, * // }, * // }, * // DisconnectionEvent: { // DisconnectionEvent * // TimestampMillis: Number("long"), // required * // }, + * // ChallengeEvent: { // ChallengeEvent + * // Version: "STRING_VALUE", // required + * // Type: "FaceMovementAndLightChallenge" || "FaceMovementChallenge", // required + * // }, * // ValidationException: { // ValidationException * // Message: "STRING_VALUE", * // Code: "STRING_VALUE", diff --git a/clients/client-rekognitionstreaming/src/endpoint/ruleset.ts b/clients/client-rekognitionstreaming/src/endpoint/ruleset.ts index 75643cbc8fb5..c8157e8d91d7 100644 --- a/clients/client-rekognitionstreaming/src/endpoint/ruleset.ts +++ b/clients/client-rekognitionstreaming/src/endpoint/ruleset.ts @@ -6,25 +6,27 @@ import { RuleSetObject } from "@smithy/types"; or see "smithy.rules#endpointRuleSet" in codegen/sdk-codegen/aws-models/rekognitionstreaming.json */ -const q="required", -r="fn", -s="argv", -t="ref"; -const a="isSet", -b="tree", -c="error", -d="endpoint", -e="PartitionResult", -f={[q]:false,"type":"String"}, -g={[q]:true,"default":false,"type":"Boolean"}, -h={[t]:"Endpoint"}, -i={[r]:"booleanEquals",[s]:[{[t]:"UseFIPS"},true]}, -j={[r]:"booleanEquals",[s]:[{[t]:"UseDualStack"},true]}, -k={}, -l={[r]:"booleanEquals",[s]:[true,{[r]:"getAttr",[s]:[{[t]:e},"supportsFIPS"]}]}, -m={[r]:"booleanEquals",[s]:[true,{[r]:"getAttr",[s]:[{[t]:e},"supportsDualStack"]}]}, -n=[i], -o=[j], -p=[{[t]:"Region"}]; -const _data={version:"1.0",parameters:{Region:f,UseDualStack:g,UseFIPS:g,Endpoint:f},rules:[{conditions:[{[r]:a,[s]:[h]}],type:b,rules:[{conditions:n,error:"Invalid Configuration: FIPS and custom endpoint are not supported",type:c},{type:b,rules:[{conditions:o,error:"Invalid Configuration: Dualstack and custom endpoint are not supported",type:c},{endpoint:{url:h,properties:k,headers:k},type:d}]}]},{type:b,rules:[{conditions:[{[r]:a,[s]:p}],type:b,rules:[{conditions:[{[r]:"aws.partition",[s]:p,assign:e}],type:b,rules:[{conditions:[i,j],type:b,rules:[{conditions:[l,m],type:b,rules:[{type:b,rules:[{endpoint:{url:"https://streaming-rekognition-fips.{Region}.{PartitionResult#dualStackDnsSuffix}",properties:k,headers:k},type:d}]}]},{error:"FIPS and DualStack are enabled, but this partition does not support one or both",type:c}]},{conditions:n,type:b,rules:[{conditions:[l],type:b,rules:[{type:b,rules:[{endpoint:{url:"https://streaming-rekognition-fips.{Region}.{PartitionResult#dnsSuffix}",properties:k,headers:k},type:d}]}]},{error:"FIPS is enabled but this partition does not support FIPS",type:c}]},{conditions:o,type:b,rules:[{conditions:[m],type:b,rules:[{type:b,rules:[{endpoint:{url:"https://streaming-rekognition.{Region}.{PartitionResult#dualStackDnsSuffix}",properties:k,headers:k},type:d}]}]},{error:"DualStack is enabled but this partition does not support DualStack",type:c}]},{type:b,rules:[{endpoint:{url:"https://streaming-rekognition.{Region}.{PartitionResult#dnsSuffix}",properties:k,headers:k},type:d}]}]}]},{error:"Invalid Configuration: Missing Region",type:c}]}]}; +const s="required", +t="fn", +u="argv", +v="ref"; +const a=true, +b="isSet", +c="booleanEquals", +d="error", +e="endpoint", +f="tree", +g="PartitionResult", +h={[s]:false,"type":"String"}, +i={[s]:true,"default":false,"type":"Boolean"}, +j={[v]:"Endpoint"}, +k={[t]:c,[u]:[{[v]:"UseFIPS"},true]}, +l={[t]:c,[u]:[{[v]:"UseDualStack"},true]}, +m={}, +n={[t]:"getAttr",[u]:[{[v]:g},"supportsFIPS"]}, +o={[t]:c,[u]:[true,{[t]:"getAttr",[u]:[{[v]:g},"supportsDualStack"]}]}, +p=[k], +q=[l], +r=[{[v]:"Region"}]; +const _data={version:"1.0",parameters:{Region:h,UseDualStack:i,UseFIPS:i,Endpoint:h},rules:[{conditions:[{[t]:b,[u]:[j]}],rules:[{conditions:p,error:"Invalid Configuration: FIPS and custom endpoint are not supported",type:d},{rules:[{conditions:q,error:"Invalid Configuration: Dualstack and custom endpoint are not supported",type:d},{endpoint:{url:j,properties:m,headers:m},type:e}],type:f}],type:f},{rules:[{conditions:[{[t]:b,[u]:r}],rules:[{conditions:[{[t]:"aws.partition",[u]:r,assign:g}],rules:[{conditions:[k,l],rules:[{conditions:[{[t]:c,[u]:[a,n]},o],rules:[{rules:[{endpoint:{url:"https://streaming-rekognition-fips.{Region}.{PartitionResult#dualStackDnsSuffix}",properties:m,headers:m},type:e}],type:f}],type:f},{error:"FIPS and DualStack are enabled, but this partition does not support one or both",type:d}],type:f},{conditions:p,rules:[{conditions:[{[t]:c,[u]:[n,a]}],rules:[{rules:[{endpoint:{url:"https://streaming-rekognition-fips.{Region}.{PartitionResult#dnsSuffix}",properties:m,headers:m},type:e}],type:f}],type:f},{error:"FIPS is enabled but this partition does not support FIPS",type:d}],type:f},{conditions:q,rules:[{conditions:[o],rules:[{rules:[{endpoint:{url:"https://streaming-rekognition.{Region}.{PartitionResult#dualStackDnsSuffix}",properties:m,headers:m},type:e}],type:f}],type:f},{error:"DualStack is enabled but this partition does not support DualStack",type:d}],type:f},{rules:[{endpoint:{url:"https://streaming-rekognition.{Region}.{PartitionResult#dnsSuffix}",properties:m,headers:m},type:e}],type:f}],type:f}],type:f},{error:"Invalid Configuration: Missing Region",type:d}],type:f}]}; export const ruleSet: RuleSetObject = _data; diff --git a/clients/client-rekognitionstreaming/src/models/models_0.ts b/clients/client-rekognitionstreaming/src/models/models_0.ts index 17a2ac8c40b9..4a1096cf5192 100644 --- a/clients/client-rekognitionstreaming/src/models/models_0.ts +++ b/clients/client-rekognitionstreaming/src/models/models_0.ts @@ -12,6 +12,7 @@ export class AccessDeniedException extends __BaseException { readonly $fault: "client" = "client"; Message?: string | undefined; Code?: string | undefined; + /** * @internal */ @@ -129,13 +130,31 @@ export interface ChallengeConfig { */ FaceIouHeightThreshold?: number | undefined; - /** - *

Timeout limit in which the end-users need to fit in the oval, in miliseconds.

- * @public - */ OvalFitTimeout?: number | undefined; } +/** + * @public + * @enum + */ +export const ChallengeType = { + FACE_MOVEMENT_AND_LIGHT_CHALLENGE: "FaceMovementAndLightChallenge", + FACE_MOVEMENT_CHALLENGE: "FaceMovementChallenge", +} as const; + +/** + * @public + */ +export type ChallengeType = (typeof ChallengeType)[keyof typeof ChallengeType]; + +/** + * @public + */ +export interface ChallengeEvent { + Version: string | undefined; + Type: ChallengeType | undefined; +} + /** *

Represents the colors to be flashed to the end user, each color represented in RGB values.

* @public @@ -275,12 +294,40 @@ export interface FaceMovementAndLightClientChallenge { ColorDisplayed?: ColorDisplayed | undefined; } +/** + * @public + */ +export interface FaceMovementClientChallenge { + ChallengeId: string | undefined; + VideoStartTimestamp?: number | undefined; + VideoEndTimestamp?: number | undefined; + /** + *

Contains bounding box of initial face position of the user on the device screen. + * Contains an epoch timestamp of when the user was detected in this position. Used for Face + * Liveness detection.

+ * @public + */ + InitialFace?: InitialFace | undefined; + + /** + *

Contains bounding box of face position of the user on the device screen at target + * location constructed for the challenge. This is generated using the random offsets provided by + * the server to the client at session start. Also contains start and end epoch timestamp of when + * the user was detected in this position.

+ * @public + */ + TargetFace?: TargetFace | undefined; +} + /** *

Object containing information for Face Liveness challenges performed at the client * side.

* @public */ -export type ClientChallenge = ClientChallenge.FaceMovementAndLightChallengeMember | ClientChallenge.$UnknownMember; +export type ClientChallenge = + | ClientChallenge.FaceMovementAndLightChallengeMember + | ClientChallenge.FaceMovementChallengeMember + | ClientChallenge.$UnknownMember; /** * @public @@ -292,6 +339,13 @@ export namespace ClientChallenge { */ export interface FaceMovementAndLightChallengeMember { FaceMovementAndLightChallenge: FaceMovementAndLightClientChallenge; + FaceMovementChallenge?: never; + $unknown?: never; + } + + export interface FaceMovementChallengeMember { + FaceMovementAndLightChallenge?: never; + FaceMovementChallenge: FaceMovementClientChallenge; $unknown?: never; } @@ -300,17 +354,20 @@ export namespace ClientChallenge { */ export interface $UnknownMember { FaceMovementAndLightChallenge?: never; + FaceMovementChallenge?: never; $unknown: [string, any]; } export interface Visitor { FaceMovementAndLightChallenge: (value: FaceMovementAndLightClientChallenge) => T; + FaceMovementChallenge: (value: FaceMovementClientChallenge) => T; _: (name: string, value: any) => T; } export const visit = (value: ClientChallenge, visitor: Visitor): T => { if (value.FaceMovementAndLightChallenge !== undefined) return visitor.FaceMovementAndLightChallenge(value.FaceMovementAndLightChallenge); + if (value.FaceMovementChallenge !== undefined) return visitor.FaceMovementChallenge(value.FaceMovementChallenge); return visitor._(value.$unknown[0], value.$unknown[1]); }; } @@ -439,6 +496,23 @@ export interface FaceMovementAndLightServerChallenge { ColorSequences: ColorSequence[] | undefined; } +/** + * @public + */ +export interface FaceMovementServerChallenge { + /** + *

Oval parameters need for oval display to complete oval match challenge.

+ * @public + */ + OvalParameters: OvalParameters | undefined; + + /** + *

Configuration options for Face Liveness challenges performed at the client side.

+ * @public + */ + ChallengeConfig: ChallengeConfig | undefined; +} + /** *

Unexpected error during processing of request.

* @public @@ -448,6 +522,7 @@ export class InternalServerException extends __BaseException { readonly $fault: "server" = "server"; Message?: string | undefined; Code?: string | undefined; + /** * @internal */ @@ -541,7 +616,10 @@ export namespace LivenessRequestStream { *

Information on the challenge sent by the server.

* @public */ -export type ServerChallenge = ServerChallenge.FaceMovementAndLightChallengeMember | ServerChallenge.$UnknownMember; +export type ServerChallenge = + | ServerChallenge.FaceMovementAndLightChallengeMember + | ServerChallenge.FaceMovementChallengeMember + | ServerChallenge.$UnknownMember; /** * @public @@ -553,6 +631,13 @@ export namespace ServerChallenge { */ export interface FaceMovementAndLightChallengeMember { FaceMovementAndLightChallenge: FaceMovementAndLightServerChallenge; + FaceMovementChallenge?: never; + $unknown?: never; + } + + export interface FaceMovementChallengeMember { + FaceMovementAndLightChallenge?: never; + FaceMovementChallenge: FaceMovementServerChallenge; $unknown?: never; } @@ -561,17 +646,20 @@ export namespace ServerChallenge { */ export interface $UnknownMember { FaceMovementAndLightChallenge?: never; + FaceMovementChallenge?: never; $unknown: [string, any]; } export interface Visitor { FaceMovementAndLightChallenge: (value: FaceMovementAndLightServerChallenge) => T; + FaceMovementChallenge: (value: FaceMovementServerChallenge) => T; _: (name: string, value: any) => T; } export const visit = (value: ServerChallenge, visitor: Visitor): T => { if (value.FaceMovementAndLightChallenge !== undefined) return visitor.FaceMovementAndLightChallenge(value.FaceMovementAndLightChallenge); + if (value.FaceMovementChallenge !== undefined) return visitor.FaceMovementChallenge(value.FaceMovementChallenge); return visitor._(value.$unknown[0], value.$unknown[1]); }; } @@ -609,6 +697,7 @@ export class ServiceQuotaExceededException extends __BaseException { readonly $fault: "client" = "client"; Message?: string | undefined; Code?: string | undefined; + /** * @internal */ @@ -633,6 +722,7 @@ export class ServiceUnavailableException extends __BaseException { readonly $fault: "server" = "server"; Message?: string | undefined; Code?: string | undefined; + /** * @internal */ @@ -658,6 +748,7 @@ export class ThrottlingException extends __BaseException { readonly $fault: "client" = "client"; Message?: string | undefined; Code?: string | undefined; + /** * @internal */ @@ -686,6 +777,7 @@ export class ValidationException extends __BaseException { readonly $fault: "client" = "client"; Message?: string | undefined; Code?: string | undefined; + /** * @internal */ @@ -706,6 +798,7 @@ export class ValidationException extends __BaseException { * @public */ export type LivenessResponseStream = + | LivenessResponseStream.ChallengeEventMember | LivenessResponseStream.DisconnectionEventMember | LivenessResponseStream.InternalServerExceptionMember | LivenessResponseStream.ServerSessionInformationEventMember @@ -726,6 +819,7 @@ export namespace LivenessResponseStream { export interface ServerSessionInformationEventMember { ServerSessionInformationEvent: ServerSessionInformationEvent; DisconnectionEvent?: never; + ChallengeEvent?: never; ValidationException?: never; InternalServerException?: never; ThrottlingException?: never; @@ -741,6 +835,19 @@ export namespace LivenessResponseStream { export interface DisconnectionEventMember { ServerSessionInformationEvent?: never; DisconnectionEvent: DisconnectionEvent; + ChallengeEvent?: never; + ValidationException?: never; + InternalServerException?: never; + ThrottlingException?: never; + ServiceQuotaExceededException?: never; + ServiceUnavailableException?: never; + $unknown?: never; + } + + export interface ChallengeEventMember { + ServerSessionInformationEvent?: never; + DisconnectionEvent?: never; + ChallengeEvent: ChallengeEvent; ValidationException?: never; InternalServerException?: never; ThrottlingException?: never; @@ -756,6 +863,7 @@ export namespace LivenessResponseStream { export interface ValidationExceptionMember { ServerSessionInformationEvent?: never; DisconnectionEvent?: never; + ChallengeEvent?: never; ValidationException: ValidationException; InternalServerException?: never; ThrottlingException?: never; @@ -771,6 +879,7 @@ export namespace LivenessResponseStream { export interface InternalServerExceptionMember { ServerSessionInformationEvent?: never; DisconnectionEvent?: never; + ChallengeEvent?: never; ValidationException?: never; InternalServerException: InternalServerException; ThrottlingException?: never; @@ -786,6 +895,7 @@ export namespace LivenessResponseStream { export interface ThrottlingExceptionMember { ServerSessionInformationEvent?: never; DisconnectionEvent?: never; + ChallengeEvent?: never; ValidationException?: never; InternalServerException?: never; ThrottlingException: ThrottlingException; @@ -801,6 +911,7 @@ export namespace LivenessResponseStream { export interface ServiceQuotaExceededExceptionMember { ServerSessionInformationEvent?: never; DisconnectionEvent?: never; + ChallengeEvent?: never; ValidationException?: never; InternalServerException?: never; ThrottlingException?: never; @@ -816,6 +927,7 @@ export namespace LivenessResponseStream { export interface ServiceUnavailableExceptionMember { ServerSessionInformationEvent?: never; DisconnectionEvent?: never; + ChallengeEvent?: never; ValidationException?: never; InternalServerException?: never; ThrottlingException?: never; @@ -830,6 +942,7 @@ export namespace LivenessResponseStream { export interface $UnknownMember { ServerSessionInformationEvent?: never; DisconnectionEvent?: never; + ChallengeEvent?: never; ValidationException?: never; InternalServerException?: never; ThrottlingException?: never; @@ -841,6 +954,7 @@ export namespace LivenessResponseStream { export interface Visitor { ServerSessionInformationEvent: (value: ServerSessionInformationEvent) => T; DisconnectionEvent: (value: DisconnectionEvent) => T; + ChallengeEvent: (value: ChallengeEvent) => T; ValidationException: (value: ValidationException) => T; InternalServerException: (value: InternalServerException) => T; ThrottlingException: (value: ThrottlingException) => T; @@ -853,6 +967,7 @@ export namespace LivenessResponseStream { if (value.ServerSessionInformationEvent !== undefined) return visitor.ServerSessionInformationEvent(value.ServerSessionInformationEvent); if (value.DisconnectionEvent !== undefined) return visitor.DisconnectionEvent(value.DisconnectionEvent); + if (value.ChallengeEvent !== undefined) return visitor.ChallengeEvent(value.ChallengeEvent); if (value.ValidationException !== undefined) return visitor.ValidationException(value.ValidationException); if (value.InternalServerException !== undefined) return visitor.InternalServerException(value.InternalServerException); @@ -874,6 +989,7 @@ export class SessionNotFoundException extends __BaseException { readonly $fault: "client" = "client"; Message?: string | undefined; Code?: string | undefined; + /** * @internal */ @@ -961,6 +1077,7 @@ export const LivenessResponseStreamFilterSensitiveLog = (obj: LivenessResponseSt if (obj.ServerSessionInformationEvent !== undefined) return { ServerSessionInformationEvent: obj.ServerSessionInformationEvent }; if (obj.DisconnectionEvent !== undefined) return { DisconnectionEvent: obj.DisconnectionEvent }; + if (obj.ChallengeEvent !== undefined) return { ChallengeEvent: obj.ChallengeEvent }; if (obj.ValidationException !== undefined) return { ValidationException: obj.ValidationException }; if (obj.InternalServerException !== undefined) return { InternalServerException: obj.InternalServerException }; if (obj.ThrottlingException !== undefined) return { ThrottlingException: obj.ThrottlingException }; diff --git a/clients/client-rekognitionstreaming/src/protocols/Aws_restJson1.ts b/clients/client-rekognitionstreaming/src/protocols/Aws_restJson1.ts index f3f469e587cd..6084a0698b77 100644 --- a/clients/client-rekognitionstreaming/src/protocols/Aws_restJson1.ts +++ b/clients/client-rekognitionstreaming/src/protocols/Aws_restJson1.ts @@ -37,6 +37,7 @@ import { AccessDeniedException, BoundingBox, ChallengeConfig, + ChallengeEvent, ClientChallenge, ClientSessionInformationEvent, ColorDisplayed, @@ -44,6 +45,8 @@ import { DisconnectionEvent, FaceMovementAndLightClientChallenge, FaceMovementAndLightServerChallenge, + FaceMovementClientChallenge, + FaceMovementServerChallenge, FreshnessColor, InitialFace, InternalServerException, @@ -347,6 +350,11 @@ const de_LivenessResponseStream = ( DisconnectionEvent: await de_DisconnectionEvent_event(event["DisconnectionEvent"], context), }; } + if (event["ChallengeEvent"] != null) { + return { + ChallengeEvent: await de_ChallengeEvent_event(event["ChallengeEvent"], context), + }; + } if (event["ValidationException"] != null) { return { ValidationException: await de_ValidationException_event(event["ValidationException"], context), @@ -381,6 +389,12 @@ const de_LivenessResponseStream = ( return { $unknown: output }; }); }; +const de_ChallengeEvent_event = async (output: any, context: __SerdeContext): Promise => { + const contents: ChallengeEvent = {} as any; + const data: any = await parseBody(output.body, context); + Object.assign(contents, _json(data)); + return contents; +}; const de_DisconnectionEvent_event = async (output: any, context: __SerdeContext): Promise => { const contents: DisconnectionEvent = {} as any; const data: any = await parseBody(output.body, context); @@ -460,6 +474,7 @@ const se_ClientChallenge = (input: ClientChallenge, context: __SerdeContext): an FaceMovementAndLightChallenge: (value) => ({ FaceMovementAndLightChallenge: se_FaceMovementAndLightClientChallenge(value, context), }), + FaceMovementChallenge: (value) => ({ FaceMovementChallenge: se_FaceMovementClientChallenge(value, context) }), _: (name, value) => ({ [name]: value } as any), }); }; @@ -494,6 +509,19 @@ const se_FaceMovementAndLightClientChallenge = ( }); }; +/** + * serializeAws_restJson1FaceMovementClientChallenge + */ +const se_FaceMovementClientChallenge = (input: FaceMovementClientChallenge, context: __SerdeContext): any => { + return take(input, { + ChallengeId: [], + InitialFace: (_) => se_InitialFace(_, context), + TargetFace: (_) => se_TargetFace(_, context), + VideoEndTimestamp: [], + VideoStartTimestamp: [], + }); +}; + // se_FreshnessColor omitted. /** @@ -546,6 +574,8 @@ const de_ChallengeConfig = (output: any, context: __SerdeContext): ChallengeConf }) as any; }; +// de_ChallengeEvent omitted. + // de_ColorComponentList omitted. /** @@ -588,6 +618,16 @@ const de_FaceMovementAndLightServerChallenge = ( }) as any; }; +/** + * deserializeAws_restJson1FaceMovementServerChallenge + */ +const de_FaceMovementServerChallenge = (output: any, context: __SerdeContext): FaceMovementServerChallenge => { + return take(output, { + ChallengeConfig: (_: any) => de_ChallengeConfig(_, context), + OvalParameters: (_: any) => de_OvalParameters(_, context), + }) as any; +}; + // de_FreshnessColor omitted. /** @@ -614,6 +654,11 @@ const de_ServerChallenge = (output: any, context: __SerdeContext): ServerChallen ), }; } + if (output.FaceMovementChallenge != null) { + return { + FaceMovementChallenge: de_FaceMovementServerChallenge(output.FaceMovementChallenge, context), + }; + } return { $unknown: Object.entries(output)[0] }; }; diff --git a/codegen/sdk-codegen/aws-models/rekognitionstreaming.json b/codegen/sdk-codegen/aws-models/rekognitionstreaming.json index 6b060194af73..5bb05f944b03 100644 --- a/codegen/sdk-codegen/aws-models/rekognitionstreaming.json +++ b/codegen/sdk-codegen/aws-models/rekognitionstreaming.json @@ -154,16 +154,47 @@ } }, "OvalFitTimeout": { - "target": "com.amazonaws.rekognitionstreaming#TimeoutInMs", - "traits": { - "smithy.api#documentation": "

Timeout limit in which the end-users need to fit in the oval, in miliseconds.

" - } + "target": "com.amazonaws.rekognitionstreaming#TimeoutInMs" } }, "traits": { "smithy.api#documentation": "

Configuration options for Face Liveness challenges performed at the client side.

" } }, + "com.amazonaws.rekognitionstreaming#ChallengeEvent": { + "type": "structure", + "members": { + "Version": { + "target": "com.amazonaws.rekognitionstreaming#Version", + "traits": { + "smithy.api#required": {} + } + }, + "Type": { + "target": "com.amazonaws.rekognitionstreaming#ChallengeType", + "traits": { + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.rekognitionstreaming#ChallengeType": { + "type": "enum", + "members": { + "FACE_MOVEMENT_AND_LIGHT_CHALLENGE": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "FaceMovementAndLightChallenge" + } + }, + "FACE_MOVEMENT_CHALLENGE": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "FaceMovementChallenge" + } + } + } + }, "com.amazonaws.rekognitionstreaming#ChallengeVersions": { "type": "string", "traits": { @@ -182,6 +213,9 @@ "traits": { "smithy.api#documentation": "

Information about a face movement and light challenge type.

" } + }, + "FaceMovementChallenge": { + "target": "com.amazonaws.rekognitionstreaming#FaceMovementClientChallenge" } }, "traits": { @@ -417,6 +451,46 @@ "smithy.api#documentation": "

Contains information regarding the OvalParameters and\n LightChallengeType for a challenge.

" } }, + "com.amazonaws.rekognitionstreaming#FaceMovementClientChallenge": { + "type": "structure", + "members": { + "ChallengeId": { + "target": "com.amazonaws.rekognitionstreaming#UUID", + "traits": { + "smithy.api#required": {} + } + }, + "VideoStartTimestamp": { + "target": "com.amazonaws.rekognitionstreaming#EpochMillis" + }, + "VideoEndTimestamp": { + "target": "com.amazonaws.rekognitionstreaming#EpochMillis" + }, + "InitialFace": { + "target": "com.amazonaws.rekognitionstreaming#InitialFace" + }, + "TargetFace": { + "target": "com.amazonaws.rekognitionstreaming#TargetFace" + } + } + }, + "com.amazonaws.rekognitionstreaming#FaceMovementServerChallenge": { + "type": "structure", + "members": { + "OvalParameters": { + "target": "com.amazonaws.rekognitionstreaming#OvalParameters", + "traits": { + "smithy.api#required": {} + } + }, + "ChallengeConfig": { + "target": "com.amazonaws.rekognitionstreaming#ChallengeConfig", + "traits": { + "smithy.api#required": {} + } + } + } + }, "com.amazonaws.rekognitionstreaming#Float": { "type": "float" }, @@ -520,6 +594,9 @@ "smithy.api#documentation": "

Notification that disconnection event has occurred to a Face Liveness session.

" } }, + "ChallengeEvent": { + "target": "com.amazonaws.rekognitionstreaming#ChallengeEvent" + }, "ValidationException": { "target": "com.amazonaws.rekognitionstreaming#ValidationException", "traits": { @@ -665,7 +742,6 @@ ] } ], - "type": "tree", "rules": [ { "conditions": [ @@ -684,7 +760,6 @@ }, { "conditions": [], - "type": "tree", "rules": [ { "conditions": [ @@ -712,13 +787,14 @@ }, "type": "endpoint" } - ] + ], + "type": "tree" } - ] + ], + "type": "tree" }, { "conditions": [], - "type": "tree", "rules": [ { "conditions": [ @@ -731,7 +807,6 @@ ] } ], - "type": "tree", "rules": [ { "conditions": [ @@ -745,7 +820,6 @@ "assign": "PartitionResult" } ], - "type": "tree", "rules": [ { "conditions": [ @@ -768,7 +842,6 @@ ] } ], - "type": "tree", "rules": [ { "conditions": [ @@ -803,11 +876,9 @@ ] } ], - "type": "tree", "rules": [ { "conditions": [], - "type": "tree", "rules": [ { "conditions": [], @@ -818,16 +889,19 @@ }, "type": "endpoint" } - ] + ], + "type": "tree" } - ] + ], + "type": "tree" }, { "conditions": [], "error": "FIPS and DualStack are enabled, but this partition does not support one or both", "type": "error" } - ] + ], + "type": "tree" }, { "conditions": [ @@ -841,14 +915,12 @@ ] } ], - "type": "tree", "rules": [ { "conditions": [ { "fn": "booleanEquals", "argv": [ - true, { "fn": "getAttr", "argv": [ @@ -857,15 +929,14 @@ }, "supportsFIPS" ] - } + }, + true ] } ], - "type": "tree", "rules": [ { "conditions": [], - "type": "tree", "rules": [ { "conditions": [], @@ -876,16 +947,19 @@ }, "type": "endpoint" } - ] + ], + "type": "tree" } - ] + ], + "type": "tree" }, { "conditions": [], "error": "FIPS is enabled but this partition does not support FIPS", "type": "error" } - ] + ], + "type": "tree" }, { "conditions": [ @@ -899,7 +973,6 @@ ] } ], - "type": "tree", "rules": [ { "conditions": [ @@ -919,11 +992,9 @@ ] } ], - "type": "tree", "rules": [ { "conditions": [], - "type": "tree", "rules": [ { "conditions": [], @@ -934,20 +1005,22 @@ }, "type": "endpoint" } - ] + ], + "type": "tree" } - ] + ], + "type": "tree" }, { "conditions": [], "error": "DualStack is enabled but this partition does not support DualStack", "type": "error" } - ] + ], + "type": "tree" }, { "conditions": [], - "type": "tree", "rules": [ { "conditions": [], @@ -958,18 +1031,22 @@ }, "type": "endpoint" } - ] + ], + "type": "tree" } - ] + ], + "type": "tree" } - ] + ], + "type": "tree" }, { "conditions": [], "error": "Invalid Configuration: Missing Region", "type": "error" } - ] + ], + "type": "tree" } ] }, @@ -1297,6 +1374,9 @@ "traits": { "smithy.api#documentation": "

Paramteters and configuration information for the face movement and light sequence challenges.

" } + }, + "FaceMovementChallenge": { + "target": "com.amazonaws.rekognitionstreaming#FaceMovementServerChallenge" } }, "traits": { @@ -1413,7 +1493,7 @@ } ], "traits": { - "smithy.api#documentation": "

Starts a Face Liveness video stream and liveness detection process for a given\n session.

\n

Requires sessionId, ChallengeVersions, VideoWidth,\n VideoHeight and a RequestEventStream as input. The event stream\n contains information about different events for the session, including the challenge\n information used for verification.

\n

The maximum video size for Face Liveness is 10 MB. Face Liveness throws a\n ValidationException if the video does not match the necessary formatting and\n size parameters.

\n

StartFaceLivenessSession supports the websockets and the AWS SDK \n for JavaScript.

", + "smithy.api#documentation": "

Starts a Face Liveness video stream and liveness detection process for a given\n session.

\n

Requires sessionId, ChallengeVersions, VideoWidth,\n VideoHeight and a RequestEventStream as input. The event stream\n contains information about different events for the session, including the challenge\n information used for verification.

\n

The maximum video size for Face Liveness is 10 MB. Face Liveness throws a\n ValidationException if the video does not match the necessary formatting and\n size parameters.

", "smithy.api#http": { "method": "POST", "uri": "/start-face-liveness-session", @@ -1583,6 +1663,16 @@ "smithy.api#httpError": 400 } }, + "com.amazonaws.rekognitionstreaming#Version": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 5, + "max": 15 + }, + "smithy.api#pattern": "^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)$" + } + }, "com.amazonaws.rekognitionstreaming#VideoChunk": { "type": "blob" },