diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml index cce6a46..1f014e8 100644 --- a/.github/workflows/main.yaml +++ b/.github/workflows/main.yaml @@ -18,6 +18,7 @@ env: REGISTRY: ghcr.io IMAGE_NAME: ${{ github.repository }} DOCS_IMAGE_NAME: aihub-prepdocs + PLUGIN_IMAGE_NAME: aihub-plugin AIHUB_ARTIFACT_NAME: aihub-tf-module jobs: @@ -105,6 +106,21 @@ jobs: tags: ${{ env.REGISTRY }}/${{ env.GITHUB_REPOSITORY_LOWER_CASE }}/${{ env.DOCS_IMAGE_NAME }}:${{ env.MINVERVERSIONOVERRIDE }} labels: ${{ steps.meta-docs.outputs.labels }} + - name: Extract metadata (tags, labels) for plugin Docker + id: meta-plugin + uses: docker/metadata-action@v3 + with: + images: ${{ env.REGISTRY }}/${{ env.PLUGIN_IMAGE_NAME }} + + - name: Build and push Docker image for plugin + uses: docker/build-push-action@v3 + with: + context: ./src/OpenAI.Plugin/ + file: ./src/OpenAI.Plugin/Dockerfile + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ env.REGISTRY }}/${{ env.GITHUB_REPOSITORY_LOWER_CASE }}/${{ env.PLUGIN_IMAGE_NAME }}:${{ env.MINVERVERSIONOVERRIDE }} + labels: ${{ steps.meta-plugin.outputs.labels }} + - name: Upload Artifacts uses: actions/upload-artifact@v3 with: diff --git a/.gitignore b/.gitignore index 6b6f56d..2fc9fe8 100644 --- a/.gitignore +++ b/.gitignore @@ -414,4 +414,6 @@ src/AIHub/appsettings.Development.json # .tfstate files *.tfstate -*.tfstate.* \ No newline at end of file +*.tfstate.* + +local.settings.json \ No newline at end of file diff --git a/infra/.terraform.lock.hcl b/infra/.terraform.lock.hcl index a12f564..cc2a273 100644 --- a/infra/.terraform.lock.hcl +++ b/infra/.terraform.lock.hcl @@ -59,25 +59,6 @@ provider "registry.terraform.io/hashicorp/azurerm" { ] } -provider "registry.terraform.io/hashicorp/null" { - version = "3.2.2" - hashes = [ - "h1:m467k2tZ9cdFFgHW7LPBK2GLPH43LC6wc3ppxr8yvoE=", - "zh:3248aae6a2198f3ec8394218d05bd5e42be59f43a3a7c0b71c66ec0df08b69e7", - "zh:32b1aaa1c3013d33c245493f4a65465eab9436b454d250102729321a44c8ab9a", - "zh:38eff7e470acb48f66380a73a5c7cdd76cc9b9c9ba9a7249c7991488abe22fe3", - "zh:4c2f1faee67af104f5f9e711c4574ff4d298afaa8a420680b0cb55d7bbc65606", - "zh:544b33b757c0b954dbb87db83a5ad921edd61f02f1dc86c6186a5ea86465b546", - "zh:696cf785090e1e8cf1587499516b0494f47413b43cb99877ad97f5d0de3dc539", - "zh:6e301f34757b5d265ae44467d95306d61bef5e41930be1365f5a8dcf80f59452", - "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", - "zh:913a929070c819e59e94bb37a2a253c228f83921136ff4a7aa1a178c7cce5422", - "zh:aa9015926cd152425dbf86d1abdbc74bfe0e1ba3d26b3db35051d7b9ca9f72ae", - "zh:bb04798b016e1e1d49bcc76d62c53b56c88c63d6f2dfe38821afef17c416a0e1", - "zh:c23084e1b23577de22603cff752e59128d83cfecc2e6819edadd8cf7a10af11e", - ] -} - provider "registry.terraform.io/hashicorp/random" { version = "3.6.0" hashes = [ diff --git a/infra/main.tf b/infra/main.tf index 8d46a67..d4fd06e 100644 --- a/infra/main.tf +++ b/infra/main.tf @@ -23,6 +23,7 @@ locals { ca_chat_name = "${var.ca_chat_name}${local.name_sufix}" ca_prep_docs_name = "${var.ca_prep_docs_name}${local.name_sufix}" ca_aihub_name = "${var.ca_aihub_name}${local.name_sufix}" + func_name = "plugin${local.sufix}" } resource "azurerm_resource_group" "rg" { @@ -229,3 +230,18 @@ module "ca_aihub" { enable_entra_id_authentication = var.enable_entra_id_authentication image_name = var.ca_aihub_image } + +module "plugin" { + source = "./modules/ca-plugin" + location = azurerm_resource_group.rg.location + resource_group_name = azurerm_resource_group.rg.name + resource_group_id = azurerm_resource_group.rg.id + func_name = local.func_name + image_name = var.ca_plugin_image + cae_id = module.cae.cae_id + cae_default_domain = module.cae.default_domain + appi_instrumentation_key = module.appi.appi_key + openai_key = module.openai.openai_key + openai_model = module.openai.gpt_deployment_name + openai_endpoint = module.openai.openai_endpoint +} diff --git a/infra/modules/ca-plugin/main.tf b/infra/modules/ca-plugin/main.tf new file mode 100644 index 0000000..f8cf223 --- /dev/null +++ b/infra/modules/ca-plugin/main.tf @@ -0,0 +1,74 @@ +resource "azurerm_storage_account" "sa" { + name = "stfunc${var.func_name}" + location = var.location + resource_group_name = var.resource_group_name + account_tier = "Standard" + account_replication_type = "LRS" + enable_https_traffic_only = true +} + +resource "azapi_resource" "ca_function" { + schema_validation_enabled = false + name = "func-${var.func_name}" + location = var.location + parent_id = var.resource_group_id + type = "Microsoft.Web/sites@2023-01-01" + body = jsonencode({ + kind = "functionapp,linux,container,azurecontainerapps" + properties : { + language = "dotnet-isolated" + managedEnvironmentId = "${var.cae_id}" + siteConfig = { + linuxFxVersion = "DOCKER|cmendibl3/aoai-plugin:0.8.0" + appSettings = [ + { + name = "AzureWebJobsStorage" + value = azurerm_storage_account.sa.primary_connection_string + }, + { + name = "WEBSITE_CONTENTAZUREFILECONNECTIONSTRING" + value = azurerm_storage_account.sa.primary_connection_string + }, + { + name = "APPINSIGHTS_INSTRUMENTATIONKEY" + value = var.appi_instrumentation_key + }, + { + name = "APPLICATIONINSIGHTS_CONNECTION_STRING" + value = "InstrumentationKey=${var.appi_instrumentation_key}" + }, + { + name = "FUNCTIONS_WORKER_RUNTIME" + value = "dotnet-isolated" + }, + { + name = "FUNCTIONS_EXTENSION_VERSION" + value = "~4" + }, + { + name = "MODEL_ID" + value = var.openai_model + }, + { + name = "API_KEY" + value = var.openai_key + }, + { + name = "ENDPOINT" + value = var.openai_endpoint + }, + { + name = "OpenApi__HostNames" + value = "https://func-${var.func_name}.${var.cae_default_domain}/api" + } + ] + } + workloadProfileName = "Consumption" + resourceConfig = { + cpu = 1 + memory = "2Gi" + } + httpsOnly = false + } + }) +} diff --git a/infra/modules/ca-plugin/outputs.tf b/infra/modules/ca-plugin/outputs.tf new file mode 100644 index 0000000..e69de29 diff --git a/infra/modules/ca-plugin/providers.tf b/infra/modules/ca-plugin/providers.tf new file mode 100644 index 0000000..c4fa93c --- /dev/null +++ b/infra/modules/ca-plugin/providers.tf @@ -0,0 +1,20 @@ +terraform { + required_version = ">= 1.1.8" + required_providers { + azurerm = { + source = "hashicorp/azurerm" + version = "3.87.0" + } + azapi = { + source = "Azure/azapi" + } + } +} + +provider "azurerm" { + features { + cognitive_account { + purge_soft_delete_on_destroy = true + } + } +} \ No newline at end of file diff --git a/infra/modules/ca-plugin/variables.tf b/infra/modules/ca-plugin/variables.tf new file mode 100644 index 0000000..3b4974d --- /dev/null +++ b/infra/modules/ca-plugin/variables.tf @@ -0,0 +1,11 @@ +variable "resource_group_name" {} +variable "resource_group_id" {} +variable "location" {} +variable "func_name" {} +variable "image_name" {} +variable "cae_id" {} +variable "cae_default_domain" {} +variable "appi_instrumentation_key" {} +variable "openai_key" {} +variable "openai_model" {} +variable "openai_endpoint" {} diff --git a/infra/modules/cae/outputs.tf b/infra/modules/cae/outputs.tf index 500378d..94868ce 100644 --- a/infra/modules/cae/outputs.tf +++ b/infra/modules/cae/outputs.tf @@ -2,7 +2,7 @@ output "cae_id" { value = azapi_resource.cae.id } -output "defaultDomain" { +output "default_domain" { value = jsondecode(azapi_resource.cae.output).properties.defaultDomain } diff --git a/infra/modules/openai/outputs.tf b/infra/modules/openai/outputs.tf index 9d98fda..bcc3207 100644 --- a/infra/modules/openai/outputs.tf +++ b/infra/modules/openai/outputs.tf @@ -33,3 +33,7 @@ output "gpt4_deployment_model_name" { output "embedding_deployment_name" { value = azurerm_cognitive_deployment.embedding.name } + +output "openai_key" { + value = azurerm_cognitive_account.openai.primary_access_key +} diff --git a/infra/variables.tf b/infra/variables.tf index 421ab01..3100f10 100644 --- a/infra/variables.tf +++ b/infra/variables.tf @@ -91,11 +91,15 @@ variable "ca_chat_image" { } variable "ca_prep_docs_image" { - default = "ghcr.io/azure/aihub/aihub-prepdocs:1.0.6" + default = "ghcr.io/azure/aihub/aihub-prepdocs:1.0.8" +} + +variable "ca_plugin_image" { + default = "ghcr.io/azure/aihub/aihub-plugin:1.0.8" } variable "ca_aihub_image" { - default = "ghcr.io/azure/aihub/aihub:1.0.6" + default = "ghcr.io/azure/aihub/aihub:1.0.8" } variable "use_random_suffix" { diff --git a/src/OpenAI.Plugin/.dockerignore b/src/OpenAI.Plugin/.dockerignore new file mode 100644 index 0000000..1927772 --- /dev/null +++ b/src/OpenAI.Plugin/.dockerignore @@ -0,0 +1 @@ +local.settings.json \ No newline at end of file diff --git a/src/OpenAI.Plugin/AIPluginJson.cs b/src/OpenAI.Plugin/AIPluginJson.cs new file mode 100644 index 0000000..f22322a --- /dev/null +++ b/src/OpenAI.Plugin/AIPluginJson.cs @@ -0,0 +1,23 @@ +public class AIPluginJson +{ + [Function("GetAIPluginJson")] + public HttpResponseData Run([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = ".well-known/ai-plugin.json")] HttpRequestData req) + { + var currentDomain = $"{req.Url.Scheme}://{req.Url.Host}:{req.Url.Port}/api"; + + HttpResponseData response = req.CreateResponse(HttpStatusCode.OK); + response.Headers.Add("Content-Type", "application/json"); + + var settings = AIPluginSettings.FromFile(); + + // serialize app settings to json using System.Text.Json + var json = System.Text.Json.JsonSerializer.Serialize(settings); + + // replace {url} with the current domain + json = json.Replace("{url}", currentDomain, StringComparison.OrdinalIgnoreCase); + + response.WriteString(json); + + return response; + } +} \ No newline at end of file diff --git a/src/OpenAI.Plugin/CallTranscriptPlugin.cs b/src/OpenAI.Plugin/CallTranscriptPlugin.cs new file mode 100644 index 0000000..30c1ce9 --- /dev/null +++ b/src/OpenAI.Plugin/CallTranscriptPlugin.cs @@ -0,0 +1,73 @@ +using System.Net; +using System.Text.Json; +using Microsoft.Azure.Functions.Worker; +using Microsoft.Azure.Functions.Worker.Http; +using Microsoft.Azure.WebJobs.Extensions.OpenApi.Core.Attributes; +using Microsoft.Extensions.Logging; +using Models; + +namespace OpenAI.Plugin +{ + public class CallTranscriptPlugin + { + private static readonly JsonSerializerOptions _jsonOptions = new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }; + private readonly ILogger _logger; + private readonly Kernel _kernel; + private readonly string _endpoint = Environment.GetEnvironmentVariable("ENDPOINT")!; + private readonly string _deploymentName = Environment.GetEnvironmentVariable("MODEL_ID")!; + private readonly string _subscriptionKey = Environment.GetEnvironmentVariable("API_KEY")!; + + public CallTranscriptPlugin(ILoggerFactory loggerFactory, Kernel kernel) + { + _logger = loggerFactory.CreateLogger(); + _kernel = kernel; + } + + [Function("Call Transcript Plugin")] + [OpenApiOperation(operationId: "CallTranscriptPlugin", tags: new[] { "CallTranscriptPlugin" }, Description = "Used to analyze a call given the transcript and a prompt")] + [OpenApiRequestBody("application/json", typeof(ExecuteFunctionRequest), Description = "Variables to use when executing the specified function.", Required = true)] + [OpenApiResponseWithBody(statusCode: HttpStatusCode.OK, contentType: "application/json", bodyType: typeof(ExecuteFunctionResponse), Description = "Returns the response from the AI.")] + [OpenApiResponseWithBody(statusCode: HttpStatusCode.BadRequest, contentType: "application/json", bodyType: typeof(ErrorResponse), Description = "Returned if the request body is invalid.")] + [OpenApiResponseWithBody(statusCode: HttpStatusCode.NotFound, contentType: "application/json", bodyType: typeof(ErrorResponse), Description = "Returned if the semantic function could not be found.")] + public async Task Run( + [HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "plugins/transcript")] HttpRequestData req, + FunctionContext executionContext) + { + _logger.LogInformation("C# HTTP trigger function processed a request."); + +#pragma warning disable CA1062 + var functionRequest = await JsonSerializer.DeserializeAsync(req.Body, _jsonOptions).ConfigureAwait(false); +#pragma warning disable CA1062 + if (functionRequest == null) + { + return await CreateResponseAsync(req, HttpStatusCode.BadRequest, new ErrorResponse() { Message = $"Invalid request body {functionRequest}" }).ConfigureAwait(false); + } + + try + { + var context = new KernelArguments + { + { "transcript", functionRequest.Transcript } + }; + + var result = await _kernel.InvokeAsync("Prompts", "CallAnalyzer", context).ConfigureAwait(false); + + return await CreateResponseAsync( + req, + HttpStatusCode.OK, + new ExecuteFunctionResponse() { Response = result.ToString() }).ConfigureAwait(false); + } + catch (Exception ex) + { + return await CreateResponseAsync(req, HttpStatusCode.BadRequest, new ErrorResponse() { Message = ex.Message }).ConfigureAwait(false); + } + } + + private static async Task CreateResponseAsync(HttpRequestData requestData, HttpStatusCode statusCode, object responseBody) + { + var responseData = requestData.CreateResponse(statusCode); + await responseData.WriteAsJsonAsync(responseBody).ConfigureAwait(false); + return responseData; + } + } +} diff --git a/src/OpenAI.Plugin/Dockerfile b/src/OpenAI.Plugin/Dockerfile new file mode 100644 index 0000000..1dcb2fa --- /dev/null +++ b/src/OpenAI.Plugin/Dockerfile @@ -0,0 +1,12 @@ +FROM mcr.microsoft.com/dotnet/sdk:8.0 AS installer-env + +COPY . /src/dotnet-function-app +RUN cd /src/dotnet-function-app && \ +mkdir -p /home/site/wwwroot && \ +dotnet publish *.csproj --output /home/site/wwwroot + +FROM mcr.microsoft.com/azure-functions/dotnet-isolated:4.0-dotnet-isolated8.0 +ENV AzureWebJobsScriptRoot=/home/site/wwwroot \ + AzureFunctionsJobHost__Logging__Console__IsEnabled=true + +COPY --from=installer-env ["/home/site/wwwroot", "/home/site/wwwroot"] \ No newline at end of file diff --git a/src/OpenAI.Plugin/GlobalUsing.cs b/src/OpenAI.Plugin/GlobalUsing.cs new file mode 100644 index 0000000..463b975 --- /dev/null +++ b/src/OpenAI.Plugin/GlobalUsing.cs @@ -0,0 +1,19 @@ +global using Microsoft.SemanticKernel; +global using Microsoft.Extensions.DependencyInjection; +global using Microsoft.Extensions.Logging; +global using Microsoft.Azure.WebJobs.Extensions.OpenApi.Core.Abstractions; +global using Microsoft.Azure.WebJobs.Extensions.OpenApi.Core.Configurations; +global using Microsoft.Azure.WebJobs.Extensions.OpenApi.Core.Enums; +global using Microsoft.Extensions.Hosting; +global using Microsoft.OpenApi.Models; +global using System.Text.Json.Serialization; +global using Microsoft.Azure.WebJobs.Extensions.OpenApi.Core.Attributes; +global using Microsoft.Extensions.Configuration; +global using System.Net; +global using System.Reflection; +global using Microsoft.Azure.Functions.Worker; +global using Microsoft.Azure.Functions.Worker.Http; +global using Azure.AI.OpenAI; +global using Azure; +global using Azure.Identity; +global using Models; diff --git a/src/OpenAI.Plugin/Models/AIPluginSettings.cs b/src/OpenAI.Plugin/Models/AIPluginSettings.cs new file mode 100644 index 0000000..91e05a5 --- /dev/null +++ b/src/OpenAI.Plugin/Models/AIPluginSettings.cs @@ -0,0 +1,73 @@ +// Copyright (c) Microsoft. All rights reserved. +namespace Models; + +#pragma warning disable CA1056 +#pragma warning disable CA1034 + +public class AIPluginSettings +{ + public const string DefaultConfigFile = "aiplugin.json"; + + [JsonPropertyName("schema_version")] + public string SchemaVersion { get; set; } = "v1"; + + [JsonPropertyName("name_for_model")] + public string NameForModel { get; set; } = string.Empty; + + [JsonPropertyName("name_for_human")] + public string NameForHuman { get; set; } = string.Empty; + + [JsonPropertyName("description_for_model")] + public string DescriptionForModel { get; set; } = string.Empty; + + [JsonPropertyName("description_for_human")] + public string DescriptionForHuman { get; set; } = string.Empty; + + [JsonPropertyName("auth")] + public AuthModel Auth { get; set; } = new AuthModel(); + + [JsonPropertyName("api")] + public ApiModel Api { get; set; } = new ApiModel(); + + [JsonPropertyName("logo_url")] + public string LogoUrl { get; set; } = string.Empty; + + [JsonPropertyName("contact_email")] + public string ContactEmail { get; set; } = string.Empty; + + [JsonPropertyName("legal_info_url")] + public string LegalInfoUrl { get; set; } = string.Empty; + + + public class AuthModel + { + [JsonPropertyName("type")] + public string Type { get; set; } = string.Empty; + + [JsonPropertyName("authorization_url")] + public string AuthorizationType { get; set; } = string.Empty; + } + + public class ApiModel + { + [JsonPropertyName("type")] + public string Type { get; set; } = "openapi"; + + [JsonPropertyName("url")] + public string Url { get; set; } = string.Empty; + + [JsonPropertyName("has_user_authentication")] + public bool HasUserAuthentication { get; set; } = false; + } + + public static AIPluginSettings FromFile(string configFile = DefaultConfigFile) + { + var configuration = new ConfigurationBuilder() + .SetBasePath(new FileInfo(Assembly.GetExecutingAssembly().Location).Directory.FullName) + .AddJsonFile(configFile, optional: false, reloadOnChange: true) + .Build(); + + return configuration.Get() + ?? throw new InvalidDataException($"Invalid app settings in '{configFile}', please provide configuration settings using instructions in the README."); + } +} \ No newline at end of file diff --git a/src/OpenAI.Plugin/Models/ErrorResponse.cs b/src/OpenAI.Plugin/Models/ErrorResponse.cs new file mode 100644 index 0000000..201ffa6 --- /dev/null +++ b/src/OpenAI.Plugin/Models/ErrorResponse.cs @@ -0,0 +1,8 @@ +namespace Models; + +internal class ErrorResponse +{ + [JsonPropertyName("message")] + [OpenApiProperty(Description = "The error message.")] + public string Message { get; set; } = string.Empty; +} \ No newline at end of file diff --git a/src/OpenAI.Plugin/Models/ExecuteFunctionRequest.cs b/src/OpenAI.Plugin/Models/ExecuteFunctionRequest.cs new file mode 100644 index 0000000..473d870 --- /dev/null +++ b/src/OpenAI.Plugin/Models/ExecuteFunctionRequest.cs @@ -0,0 +1,9 @@ +namespace Models; + +#pragma warning disable CA1812 +internal class ExecuteFunctionRequest +{ + [JsonPropertyName("transcript")] + [OpenApiProperty(Description = "The call transcript.", Default = "")] + public string Transcript { get; set; } = string.Empty; +} diff --git a/src/OpenAI.Plugin/Models/ExecuteFunctionResponse.cs b/src/OpenAI.Plugin/Models/ExecuteFunctionResponse.cs new file mode 100644 index 0000000..dd857b1 --- /dev/null +++ b/src/OpenAI.Plugin/Models/ExecuteFunctionResponse.cs @@ -0,0 +1,8 @@ +namespace Models; + +internal class ExecuteFunctionResponse +{ + [JsonPropertyName("response")] + [OpenApiProperty(Description = "The response from the AI.")] + public string? Response { get; set; } +} \ No newline at end of file diff --git a/src/OpenAI.Plugin/OpenAI.Plugin.csproj b/src/OpenAI.Plugin/OpenAI.Plugin.csproj new file mode 100644 index 0000000..29148cb --- /dev/null +++ b/src/OpenAI.Plugin/OpenAI.Plugin.csproj @@ -0,0 +1,43 @@ + + + net8.0 + v4 + Exe + enable + enable + + + + + + + + + + + + + + + + + + + PreserveNewest + + + PreserveNewest + Never + + + PreserveNewest + PreserveNewest + + + PreserveNewest + + + + + + \ No newline at end of file diff --git a/src/OpenAI.Plugin/Program.cs b/src/OpenAI.Plugin/Program.cs new file mode 100644 index 0000000..9d8208e --- /dev/null +++ b/src/OpenAI.Plugin/Program.cs @@ -0,0 +1,40 @@ +OpenApiConfigurationOptions configOptions = new() +{ + Info = new OpenApiInfo() + { + Version = "1.0.0", + Title = "Call Transcript Plugin", + Description = "This is a plugin that analyze a call given the transcript and a prompt.", + }, + Servers = DefaultOpenApiConfigurationOptions.GetHostNames(), + OpenApiVersion = OpenApiVersionType.V3, + ForceHttps = false, + ForceHttp = false, +}; + +var host = new HostBuilder() + .ConfigureFunctionsWorkerDefaults() + .ConfigureServices(services => + { + services.AddSingleton(_ => configOptions); + services.AddTransient((provider) => CreateKernel(provider)); + }) + .Build(); + +host.Run(); + +Kernel CreateKernel(IServiceProvider provider) +{ + const string DefaultSemanticPromptsFolder = "Prompts"; + string semanticPromptsFolder = Environment.GetEnvironmentVariable("SEMANTIC_PLUGINS_FOLDER") ?? DefaultSemanticPromptsFolder; + var modelId = Environment.GetEnvironmentVariable("MODEL_ID")!; + var endpoint = Environment.GetEnvironmentVariable("ENDPOINT")!; + var apiKey = Environment.GetEnvironmentVariable("API_KEY")!; + + var builder = Kernel.CreateBuilder(); + builder.Services.AddLogging(c => c.SetMinimumLevel(LogLevel.Trace).AddDebug()); + builder.Services.AddAzureOpenAIChatCompletion(modelId, endpoint, apiKey); + builder.Plugins.AddFromPromptDirectory(semanticPromptsFolder, "Prompts"); + var kernel = builder.Build(); + return kernel; +} diff --git a/src/OpenAI.Plugin/Prompts/CallAnalyzer/config.json b/src/OpenAI.Plugin/Prompts/CallAnalyzer/config.json new file mode 100644 index 0000000..6176ff0 --- /dev/null +++ b/src/OpenAI.Plugin/Prompts/CallAnalyzer/config.json @@ -0,0 +1,21 @@ +{ + "schema": 1, + "description": "Used to analyze a call given the transcript.", + "type": "completion", + "completion": { + "max_tokens": 1000, + "temperature": 0.1, + "top_p": 0, + "presence_penalty": 0, + "frequency_penalty": 0 + }, + "input": { + "parameters": [ + { + "name": "transcript", + "description": "The call transcript", + "defaultValue": "agent: hello, carlos: hello i have an issue agent: have you tried turmning it off and on again" + } + ] + } +} \ No newline at end of file diff --git a/src/OpenAI.Plugin/Prompts/CallAnalyzer/skprompt.txt b/src/OpenAI.Plugin/Prompts/CallAnalyzer/skprompt.txt new file mode 100644 index 0000000..1d19858 --- /dev/null +++ b/src/OpenAI.Plugin/Prompts/CallAnalyzer/skprompt.txt @@ -0,0 +1,11 @@ +EXTRACT THE FOLLOWING INFORMATION FROM THE PHONE CONVERSATION USE A BULLETED POINTS FORMAT: +1. CALL REASON +2. NAME OF AGENT +3. NAME OF CALLER +4. SENTIMENT OF THE CALLER +5. DETAILED SUMMARY +USE THE SAME LANGUAGE OF THE TRANSCRIPT TO COMPLETE THE FIELDS. + ++++++ +{{$transcript}} ++++++ \ No newline at end of file diff --git a/src/OpenAI.Plugin/Properties/launchSettings.json b/src/OpenAI.Plugin/Properties/launchSettings.json new file mode 100644 index 0000000..feaad50 --- /dev/null +++ b/src/OpenAI.Plugin/Properties/launchSettings.json @@ -0,0 +1,9 @@ +{ + "profiles": { + "OpenAI_Plugin": { + "commandName": "Project", + "commandLineArgs": "--port 7059", + "launchBrowser": false + } + } +} \ No newline at end of file diff --git a/src/OpenAI.Plugin/aiplugin.json b/src/OpenAI.Plugin/aiplugin.json new file mode 100644 index 0000000..8d10c63 --- /dev/null +++ b/src/OpenAI.Plugin/aiplugin.json @@ -0,0 +1,17 @@ +{ + "schemaVersion": "v1", + "nameForHuman": "Call Transcript Plugin", + "nameForModel": "CallTranscriptPlugin", + "descriptionForModel": "Used to analyze a call given the transcript.", + "descriptionForHuman": "Used to analyze a call given the transcript.", + "auth": { + "type": "none" + }, + "api": { + "type": "openapi", + "url": "{url}/swagger.json" + }, + "logoUrl": "{url}/logo.png", + "contactEmail": "support@example.com", + "legalInfoUrl": "http://www.example.com/legal" +} \ No newline at end of file diff --git a/src/OpenAI.Plugin/host.json b/src/OpenAI.Plugin/host.json new file mode 100644 index 0000000..ee5cf5f --- /dev/null +++ b/src/OpenAI.Plugin/host.json @@ -0,0 +1,12 @@ +{ + "version": "2.0", + "logging": { + "applicationInsights": { + "samplingSettings": { + "isEnabled": true, + "excludedTypes": "Request" + }, + "enableLiveMetricsFilters": true + } + } +} \ No newline at end of file