1
1
AWSTemplateFormatVersion: "2010-09-09"
2
2
3
- Description: Amazon Transcribe Post Call Analytics - PCA (v0.7.8 ) (uksb-1sn29lk73)
3
+ Description: Amazon Transcribe Post Call Analytics - PCA (v0.7.9 ) (uksb-1sn29lk73)
4
4
5
5
Parameters:
6
6
@@ -363,8 +363,10 @@ Parameters:
363
363
364
364
GenAIQueryBedrockModelId:
365
365
Type: String
366
- Default: anthropic.claude-v2
366
+ Default: anthropic.claude-3-haiku-20240307-v1:0
367
367
AllowedValues:
368
+ - anthropic.claude-3-haiku-20240307-v1:0
369
+ - anthropic.claude-3-sonnet-20240229-v1:0
368
370
- amazon.titan-text-express-v1
369
371
- anthropic.claude-v1
370
372
- anthropic.claude-instant-v1
@@ -394,8 +396,10 @@ Parameters:
394
396
395
397
SummarizationBedrockModelId:
396
398
Type: String
397
- Default: anthropic.claude-instant-v1
399
+ Default: anthropic.claude-3-haiku-20240307-v1:0
398
400
AllowedValues:
401
+ - anthropic.claude-3-haiku-20240307-v1:0
402
+ - anthropic.claude-3-sonnet-20240229-v1:0
399
403
- amazon.titan-text-express-v1
400
404
- anthropic.claude-v1
401
405
- anthropic.claude-instant-v1
@@ -685,10 +689,18 @@ Resources:
685
689
provider = modelId.split(".")[0]
686
690
request_body = None
687
691
if provider == "anthropic":
688
- request_body = {
689
- "prompt": prompt,
690
- "max_tokens_to_sample": DEFAULT_MAX_TOKENS
691
- }
692
+ if 'claude-3' in modelId:
693
+ request_body = {
694
+ "max_tokens": DEFAULT_MAX_TOKENS,
695
+ "messages": [{"role": "user", "content": prompt}],
696
+ "anthropic_version": "bedrock-2023-05-31"
697
+ }
698
+ else:
699
+ request_body = {
700
+ "prompt": prompt,
701
+ "max_tokens_to_sample": DEFAULT_MAX_TOKENS
702
+ }
703
+
692
704
request_body.update(parameters)
693
705
elif provider == "ai21":
694
706
request_body = {
@@ -713,8 +725,13 @@ Resources:
713
725
provider = modelId.split(".")[0]
714
726
generated_text = None
715
727
if provider == "anthropic":
716
- response_body = json.loads(response.get("body").read().decode())
717
- generated_text = response_body.get("completion")
728
+ if 'claude-3' in modelId:
729
+ response_raw = json.loads(response.get("body").read().decode())
730
+ generated_text = response_raw.get('content')[0].get('text')
731
+
732
+ else:
733
+ response_body = json.loads(response.get("body").read().decode())
734
+ generated_text = response_body.get("completion")
718
735
elif provider == "ai21":
719
736
response_body = json.loads(response.get("body").read())
720
737
generated_text = response_body.get("completions")[0].get("data").get("text")
0 commit comments