Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

remove tests that depend on legacy model #94

Merged
merged 4 commits into from
Jul 23, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import com.github.tomakehurst.wiremock.junit5.WireMockRuntimeInfo;
import com.github.tomakehurst.wiremock.junit5.WireMockTest;
import com.microsoft.semantickernel.Kernel;
import com.microsoft.semantickernel.aiservices.openai.chatcompletion.OpenAIChatCompletion;
import com.microsoft.semantickernel.orchestration.FunctionResult;
import com.microsoft.semantickernel.orchestration.PromptExecutionSettings;
import com.microsoft.semantickernel.semanticfunctions.KernelFunction;
Expand All @@ -27,13 +28,13 @@ public void main(WireMockRuntimeInfo wmRuntimeInfo) {
.endpoint("http://localhost:" + wmRuntimeInfo.getHttpPort())
.buildAsyncClient();

TextGenerationService textGenerationService = TextGenerationService.builder()
OpenAIChatCompletion chatCompletion = OpenAIChatCompletion.builder()
.withOpenAIAsyncClient(client)
.withModelId("text-davinci-003")
.withModelId("gpt-35-turbo")
.build();

Kernel kernel = Kernel.builder()
.withAIService(TextGenerationService.class, textGenerationService)
.withAIService(OpenAIChatCompletion.class, chatCompletion)
.build();

System.out.println("======== Inline Function Definition ========");
Expand Down Expand Up @@ -63,7 +64,7 @@ public void main(WireMockRuntimeInfo wmRuntimeInfo) {
.build())
.build();

WireMockUtil.mockCompletionResponse("I missed the F1 final race", "a-response");
WireMockUtil.mockChatCompletionResponse("I missed the F1 final race", "a-response");

var result = kernel.invokeAsync(excuseFunction)
.withArguments(
Expand All @@ -74,7 +75,7 @@ public void main(WireMockRuntimeInfo wmRuntimeInfo) {

Assertions.assertEquals("a-response", result.getResult());

WireMockUtil.mockCompletionResponse("sorry I forgot your birthday", "a-response-2");
WireMockUtil.mockChatCompletionResponse("sorry I forgot your birthday", "a-response-2");

result = kernel.invokeAsync(excuseFunction)
.withArguments(
Expand All @@ -85,7 +86,7 @@ public void main(WireMockRuntimeInfo wmRuntimeInfo) {

Assertions.assertEquals("a-response-2", result.getResult());

WireMockUtil.mockCompletionResponse("Translate this date ", "a-response-3");
WireMockUtil.mockChatCompletionResponse("Translate this date ", "a-response-3");

var date = DateTimeFormatter.ISO_LOCAL_DATE.withZone(ZoneOffset.UTC)
.format(Instant.ofEpochSecond(1));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ private static Builder getKernelBuilder(WireMockRuntimeInfo wmRuntimeInfo) {
.buildAsyncClient();

ChatCompletionService openAIChatCompletion = OpenAIChatCompletion.builder()
.withModelId("text-davinci-003")
.withModelId("gpt-35-turbo")
.withOpenAIAsyncClient(client)
.build();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ public static void mockChatCompletionResponse(
WireMock.reset();
WireMock.stubFor(WireMock
.post(new UrlPathPattern(
new RegexPattern("/openai/deployments/text-davinci-003/chat/completions"), true))
new RegexPattern("/openai/deployments/gpt-35-turbo/chat/completions"), true))
.withRequestBody(WireMock.matching(".*" + regexMatcher + ".*"))
.willReturn(WireMock.ok()
.withBody(body)));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,13 +43,13 @@ public static void main(String[] args) {
.credential(new KeyCredential("a-key"))
.buildAsyncClient();

TextGenerationService textGenerationService = TextGenerationService.builder()
ChatCompletionService openAIChatCompletion = OpenAIChatCompletion.builder()
.withModelId("gpt-35-turbo")
.withOpenAIAsyncClient(client2)
.withModelId("text-davinci-003")
.build();

Kernel kernel2 = Kernel.builder()
.withAIService(TextGenerationService.class, textGenerationService)
.withAIService(ChatCompletionService.class, openAIChatCompletion)
.build();
/////////////////////////////////////////////////////////

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,10 @@
import com.microsoft.semantickernel.samples.syntaxexamples.functions.Example05_InlineFunctionDefinition;
import com.microsoft.semantickernel.samples.syntaxexamples.functions.Example09_FunctionTypes;
import com.microsoft.semantickernel.samples.syntaxexamples.functions.Example27_PromptFunctionsUsingChatGPT;
import com.microsoft.semantickernel.samples.syntaxexamples.functions.Example59_OpenAIFunctionCalling;
import com.microsoft.semantickernel.samples.syntaxexamples.functions.Example60_AdvancedMethodFunctions;
import com.microsoft.semantickernel.samples.syntaxexamples.java.KernelFunctionYaml_Example;
import com.microsoft.semantickernel.samples.syntaxexamples.memory.AzureAISearch;
import com.microsoft.semantickernel.samples.syntaxexamples.plugins.Example10_DescribeAllPluginsAndFunctions;
import com.microsoft.semantickernel.samples.syntaxexamples.plugins.Example13_ConversationSummaryPlugin;
import com.microsoft.semantickernel.samples.syntaxexamples.template.Example06_TemplateLanguage;
Expand All @@ -35,35 +38,35 @@ public class RunAll {

public static void main(String[] args) {
List<MainMethod> mains = Arrays.asList(
AzureAISearch::main,
Example01_NativeFunctions::main,
Example03_Arguments::main,
Example05_InlineFunctionDefinition::main,
Example06_TemplateLanguage::main,
Example08_RetryHandler::main,
Example09_FunctionTypes::main,
Example10_DescribeAllPluginsAndFunctions::main,
//Example11_WebSearchQueries::main,
Example13_ConversationSummaryPlugin::main,
Example17_ChatGPT::main,
//Example26_AADAuth::main,

Example27_PromptFunctionsUsingChatGPT::main,
Example30_ChatWithPrompts::main,
Example33_Chat::main,
Example41_HttpClientUsage::main,
Example42_KernelBuilder::main,
Example43_GetModelResult::main,
Example44_MultiChatCompletion::main,
Example49_LogitBias::main,
Example55_TextChunker::main,
Example56_TemplateMethodFunctionsWithMultipleArguments::main,
Example57_KernelHooks::main,
Example58_ConfigureExecutionSettings::main,
Example59_OpenAIFunctionCalling::main,
Example60_AdvancedMethodFunctions::main,
Example61_MultipleLLMs::main,
Example62_CustomAIServiceSelector::main,
Example63_ChatCompletionPrompts::main,
Example64_MultiplePromptTemplates::main,
Example69_MutableKernelPlugin::main);
Example69_MutableKernelPlugin::main,
KernelFunctionYaml_Example::main);

Scanner scanner = new Scanner(System.in);
mains.forEach(mainMethod -> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,6 @@ public class Example82_Audio {

// Only required if AZURE_CLIENT_KEY is set
private static final String CLIENT_ENDPOINT = System.getenv("CLIENT_ENDPOINT");
private static final String MODEL_ID = System.getenv()
.getOrDefault("MODEL_ID", "gpt-35-turbo");

private static final String TextToAudioModel = "tts-1";
private static final String AudioToTextModel = "whisper-1";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,18 @@
import com.azure.core.http.policy.ExponentialBackoffOptions;
import com.azure.core.http.policy.RetryOptions;
import com.microsoft.semantickernel.Kernel;
import com.microsoft.semantickernel.aiservices.openai.chatcompletion.OpenAIChatCompletion;
import com.microsoft.semantickernel.exceptions.ConfigurationException;
import com.microsoft.semantickernel.semanticfunctions.KernelFunction;
import com.microsoft.semantickernel.semanticfunctions.KernelFunctionFromPrompt;
import com.microsoft.semantickernel.services.chatcompletion.ChatCompletionService;
import com.microsoft.semantickernel.services.textcompletion.TextGenerationService;
import java.time.Duration;

public class Example08_RetryHandler {

private static final String MODEL_ID = System.getenv()
.getOrDefault("MODEL_ID", "text-davinci-003");
.getOrDefault("MODEL_ID", "gpt-35-turbo");

public static void main(String[] args) throws ConfigurationException {
// Create a Kernel with the HttpClient
Expand All @@ -31,13 +33,13 @@ public static void main(String[] args) throws ConfigurationException {
.credential(new AzureKeyCredential("BAD KEY"))
.buildAsyncClient();

TextGenerationService textGenerationService = TextGenerationService.builder()
ChatCompletionService openAIChatCompletion = OpenAIChatCompletion.builder()
.withOpenAIAsyncClient(client)
.withModelId(MODEL_ID)
.build();

Kernel kernel = Kernel.builder()
.withAIService(TextGenerationService.class, textGenerationService)
.withAIService(ChatCompletionService.class, openAIChatCompletion)
.build();

String question = "How popular is the Polly library?";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,13 +47,13 @@ private static void useCustomHttpClient() {
.credential(new AzureKeyCredential("BAD KEY"))
.buildAsyncClient();

TextGenerationService textGenerationService = TextGenerationService.builder()
ChatCompletionService openAIChatCompletion = OpenAIChatCompletion.builder()
.withOpenAIAsyncClient(client)
.withModelId("text-davinci-003")
.withModelId("gpt-35-turbo")
.build();

Kernel kernel = Kernel.builder()
.withAIService(TextGenerationService.class, textGenerationService)
.withAIService(ChatCompletionService.class, openAIChatCompletion)
.build();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,14 @@
import com.azure.core.credential.AzureKeyCredential;
import com.azure.core.credential.KeyCredential;
import com.microsoft.semantickernel.Kernel;
import com.microsoft.semantickernel.aiservices.openai.chatcompletion.OpenAIChatCompletion;
import com.microsoft.semantickernel.exceptions.ConfigurationException;
import com.microsoft.semantickernel.orchestration.FunctionResult;
import com.microsoft.semantickernel.orchestration.PromptExecutionSettings;
import com.microsoft.semantickernel.semanticfunctions.KernelFunction;
import com.microsoft.semantickernel.semanticfunctions.KernelFunctionArguments;
import com.microsoft.semantickernel.semanticfunctions.KernelFunctionFromPrompt;
import com.microsoft.semantickernel.services.chatcompletion.ChatCompletionService;
import com.microsoft.semantickernel.services.textcompletion.TextGenerationService;
import java.time.Instant;
import java.time.ZoneOffset;
Expand All @@ -25,7 +27,7 @@ public class Example05_InlineFunctionDefinition {
// Only required if AZURE_CLIENT_KEY is set
private static final String CLIENT_ENDPOINT = System.getenv("CLIENT_ENDPOINT");
private static final String MODEL_ID = System.getenv()
.getOrDefault("MODEL_ID", "text-davinci-003");
.getOrDefault("MODEL_ID", "gpt-35-turbo");

public static void main(String[] args) throws ConfigurationException {

Expand All @@ -42,13 +44,13 @@ public static void main(String[] args) throws ConfigurationException {
.buildAsyncClient();
}

TextGenerationService textGenerationService = TextGenerationService.builder()
ChatCompletionService openAIChatCompletion = OpenAIChatCompletion.builder()
.withOpenAIAsyncClient(client)
.withModelId(MODEL_ID)
.build();

Kernel kernel = Kernel.builder()
.withAIService(TextGenerationService.class, textGenerationService)
.withAIService(ChatCompletionService.class, openAIChatCompletion)
.build();

System.out.println("======== Inline Function Definition ========");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import com.azure.core.credential.AzureKeyCredential;
import com.azure.core.credential.KeyCredential;
import com.microsoft.semantickernel.Kernel;
import com.microsoft.semantickernel.aiservices.openai.chatcompletion.OpenAIChatCompletion;
import com.microsoft.semantickernel.aiservices.openai.textcompletion.OpenAITextGenerationService;
import com.microsoft.semantickernel.contextvariables.ContextVariable;
import com.microsoft.semantickernel.contextvariables.ContextVariableType;
Expand All @@ -19,6 +20,7 @@
import com.microsoft.semantickernel.semanticfunctions.KernelFunctionArguments;
import com.microsoft.semantickernel.semanticfunctions.annotations.DefineKernelFunction;
import com.microsoft.semantickernel.semanticfunctions.annotations.KernelFunctionParameter;
import com.microsoft.semantickernel.services.chatcompletion.ChatCompletionService;
import com.microsoft.semantickernel.services.textcompletion.TextGenerationService;
import java.nio.file.Path;
import java.time.Instant;
Expand All @@ -42,7 +44,7 @@ public class Example09_FunctionTypes {
// Only required if AZURE_CLIENT_KEY is set
private static final String CLIENT_ENDPOINT = System.getenv("CLIENT_ENDPOINT");
private static final String MODEL_ID = System.getenv()
.getOrDefault("MODEL_ID", "text-davinci-003");
.getOrDefault("MODEL_ID", "gpt-35-turbo");

public static void main(String[] args) throws InterruptedException {

Expand All @@ -61,7 +63,7 @@ public static void main(String[] args) throws InterruptedException {
.buildAsyncClient();
}

TextGenerationService textGenerationService = OpenAITextGenerationService.builder()
ChatCompletionService openAIChatCompletion = OpenAIChatCompletion.builder()
.withOpenAIAsyncClient(client)
.withModelId(MODEL_ID)
.build();
Expand All @@ -86,7 +88,7 @@ public static void main(String[] args) throws InterruptedException {
Example09_FunctionTypes.class);

Kernel kernel = Kernel.builder()
.withAIService(TextGenerationService.class, textGenerationService)
.withAIService(ChatCompletionService.class, openAIChatCompletion)
.withPlugin(plugin)
.withPlugin(summarize)
.withPlugin(examplePlugin)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,14 @@
import com.azure.core.credential.KeyCredential;
import com.microsoft.semantickernel.Kernel;
import com.microsoft.semantickernel.Kernel.Builder;
import com.microsoft.semantickernel.aiservices.openai.chatcompletion.OpenAIChatCompletion;
import com.microsoft.semantickernel.exceptions.ConfigurationException;
import com.microsoft.semantickernel.implementation.EmbeddedResourceLoader;
import com.microsoft.semantickernel.orchestration.FunctionResult;
import com.microsoft.semantickernel.semanticfunctions.KernelFunction;
import com.microsoft.semantickernel.semanticfunctions.KernelFunctionArguments;
import com.microsoft.semantickernel.semanticfunctions.KernelFunctionYaml;
import com.microsoft.semantickernel.services.chatcompletion.ChatCompletionService;
import com.microsoft.semantickernel.services.textcompletion.TextGenerationService;
import java.io.IOException;

Expand All @@ -24,7 +26,7 @@ public class KernelFunctionYaml_Example {
// Only required if AZURE_CLIENT_KEY is set
private static final String CLIENT_ENDPOINT = System.getenv("CLIENT_ENDPOINT");
private static final String MODEL_ID = System.getenv()
.getOrDefault("MODEL_ID", "text-davinci-003");
.getOrDefault("MODEL_ID", "gpt-35-turbo");

public static void main(String[] args) throws ConfigurationException, IOException {

Expand All @@ -41,13 +43,13 @@ public static void main(String[] args) throws ConfigurationException, IOExceptio
.buildAsyncClient();
}

TextGenerationService textGenerationService = TextGenerationService.builder()
ChatCompletionService openAIChatCompletion = OpenAIChatCompletion.builder()
.withOpenAIAsyncClient(client)
.withModelId(MODEL_ID)
.build();

Builder kernelBuilder = Kernel.builder()
.withAIService(TextGenerationService.class, textGenerationService);
.withAIService(ChatCompletionService.class, openAIChatCompletion);

semanticKernelTemplate(kernelBuilder.build());
handlebarsTemplate(kernelBuilder.build());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ public static void documentSearchWithAzureAISearch(

// Create a new Azure AI Search vector store
var azureAISearchVectorStore = new AzureAISearchVectorStore<>(searchClient,
AzureAISearchVectorStoreOptions.<GitHubFile>builder()
AzureAISearchVectorStoreOptions.<GitHubFile>builder()
.withRecordClass(GitHubFile.class)
.build());

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import com.azure.core.credential.KeyCredential;
import com.microsoft.semantickernel.Kernel;
import com.microsoft.semantickernel.Kernel.Builder;
import com.microsoft.semantickernel.aiservices.openai.chatcompletion.OpenAIChatCompletion;
import com.microsoft.semantickernel.aiservices.openai.textcompletion.OpenAITextGenerationService;
import com.microsoft.semantickernel.orchestration.PromptExecutionSettings;
import com.microsoft.semantickernel.plugin.KernelPluginFactory;
Expand All @@ -19,6 +20,7 @@
import com.microsoft.semantickernel.semanticfunctions.KernelFunctionMetadata;
import com.microsoft.semantickernel.semanticfunctions.annotations.DefineKernelFunction;
import com.microsoft.semantickernel.semanticfunctions.annotations.KernelFunctionParameter;
import com.microsoft.semantickernel.services.chatcompletion.ChatCompletionService;
import com.microsoft.semantickernel.services.textcompletion.TextGenerationService;

public class Example10_DescribeAllPluginsAndFunctions {
Expand All @@ -31,7 +33,7 @@ public class Example10_DescribeAllPluginsAndFunctions {
// Only required if AZURE_CLIENT_KEY is set
private static final String CLIENT_ENDPOINT = System.getenv("CLIENT_ENDPOINT");
private static final String MODEL_ID = System.getenv()
.getOrDefault("MODEL_ID", "text-davinci-003");
.getOrDefault("MODEL_ID", "gpt-35-turbo");

/// <summary>
/// Print a list of all the functions imported into the kernel, including function descriptions,
Expand All @@ -53,13 +55,13 @@ public static void main(String[] args) {
.buildAsyncClient();
}

TextGenerationService textGenerationService = OpenAITextGenerationService.builder()
ChatCompletionService openAIChatCompletion = OpenAIChatCompletion.builder()
.withOpenAIAsyncClient(client)
.withModelId(MODEL_ID)
.build();

Builder kernelBuilder = Kernel.builder()
.withAIService(TextGenerationService.class, textGenerationService);
.withAIService(ChatCompletionService.class, openAIChatCompletion);

kernelBuilder.withPlugin(
KernelPluginFactory.createFromObject(
Expand Down
Loading
Loading