Skip to content
This repository was archived by the owner on Feb 11, 2025. It is now read-only.

Commit f193c92

Browse files
author
LangChain4j
authored
Enable CI, fix LogLevel NPE (#12)
1 parent db1b821 commit f193c92

File tree

16 files changed

+125
-29
lines changed

16 files changed

+125
-29
lines changed

.github/workflows/main.yaml

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
name: Java CI
2+
3+
on:
4+
push:
5+
branches:
6+
- main
7+
pull_request:
8+
branches:
9+
- main
10+
11+
jobs:
12+
java_build:
13+
strategy:
14+
matrix:
15+
java_version: [ 8, 11, 17, 21 ]
16+
runs-on: ubuntu-latest
17+
steps:
18+
- uses: actions/checkout@v4
19+
- name: Set up JDK ${{ matrix.java_version }}
20+
uses: actions/setup-java@v4
21+
with:
22+
java-version: ${{ matrix.java_version }}
23+
distribution: 'temurin'
24+
cache: 'maven'
25+
- name: Build with JDK ${{ matrix.java_version }}
26+
run: mvn -B clean test ${{ matrix.included_modules }}
27+
env:
28+
OPENAI_BASE_URL: 'http://langchain4j.dev:8082/v1'
29+
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}

.github/workflows/release.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ jobs:
2424
gpg-passphrase: GPG_PASSPHRASE
2525

2626
- name: release
27-
run: mvn -B clean deploy -Psign -DskipTests
27+
run: mvn -B clean deploy -Psign
2828
env:
2929
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
3030
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}

README.md

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,7 @@ Customizable way:
6464
String apiKey = System.getenv("OPENAI_API_KEY");
6565
6666
OpenAiClient client = OpenAiClient.builder()
67+
.baseUrl(baseUrl)
6768
.openAiApiKey(apiKey)
6869
.organizationId(orgId)
6970
.callTimeout(ofSeconds(60))
@@ -91,7 +92,7 @@ Customizable way:
9192

9293
```
9394
CompletionRequest request = CompletionRequest.builder()
94-
.model(TEXT_DAVINCI_003)
95+
.model(GPT_3_5_TURBO_INSTRUCT)
9596
.prompt("Write a poem about ChatGPT")
9697
.temperature(0.9)
9798
...
@@ -115,7 +116,7 @@ Customizable way:
115116

116117
```
117118
CompletionRequest request = CompletionRequest.builder()
118-
.model(TEXT_DAVINCI_003)
119+
.model(GPT_3_5_TURBO_INSTRUCT)
119120
.prompt("Write a poem about ChatGPT")
120121
.temperature(0.9)
121122
...
@@ -143,7 +144,7 @@ Customizable way:
143144

144145
```
145146
CompletionRequest request = CompletionRequest.builder()
146-
.model(TEXT_DAVINCI_003)
147+
.model(GPT_3_5_TURBO_INSTRUCT)
147148
.prompt("Write a poem about ChatGPT")
148149
.temperature(0.9)
149150
...

src/main/java/dev/ai4j/openai4j/OpenAiClient.java

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,8 @@
2121
import dev.ai4j.openai4j.spi.OpenAiClientBuilderFactory;
2222
import dev.ai4j.openai4j.spi.ServiceHelper;
2323

24+
import static dev.ai4j.openai4j.LogLevel.DEBUG;
25+
2426
public abstract class OpenAiClient {
2527

2628
public abstract SyncOrAsyncOrStreaming<CompletionResponse> completion(CompletionRequest request);
@@ -67,7 +69,7 @@ public abstract static class Builder<T extends OpenAiClient, B extends Builder<T
6769
public Proxy proxy;
6870
public boolean logRequests;
6971
public boolean logResponses;
70-
public LogLevel logLevel;
72+
public LogLevel logLevel = DEBUG;
7173
public boolean logStreamingResponses;
7274
public Path persistTo;
7375

@@ -190,7 +192,7 @@ public B logRequests(Boolean logRequests) {
190192

191193
public B logLevel(LogLevel logLevel) {
192194
if (logLevel == null) {
193-
logLevel = LogLevel.DEBUG;
195+
logLevel = DEBUG;
194196
}
195197
this.logLevel = logLevel;
196198
return (B) this;

src/main/java/dev/ai4j/openai4j/completion/CompletionModel.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
public enum CompletionModel {
44

5-
TEXT_DAVINCI_003("text-davinci-003");
5+
GPT_3_5_TURBO_INSTRUCT("gpt-3.5-turbo-instruct");
66

77
private final String value;
88

src/main/java/dev/ai4j/openai4j/completion/CompletionRequest.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
import java.util.Map;
55
import java.util.Objects;
66

7-
import static dev.ai4j.openai4j.completion.CompletionModel.TEXT_DAVINCI_003;
7+
import static dev.ai4j.openai4j.completion.CompletionModel.GPT_3_5_TURBO_INSTRUCT;
88
import static java.util.Collections.unmodifiableList;
99
import static java.util.Collections.unmodifiableMap;
1010

@@ -186,7 +186,7 @@ public static Builder builder() {
186186

187187
public static final class Builder {
188188

189-
private String model = TEXT_DAVINCI_003.toString();
189+
private String model = GPT_3_5_TURBO_INSTRUCT.toString();
190190
private String prompt;
191191
private String suffix;
192192
private Integer maxTokens;

src/test/java/dev/ai4j/openai4j/chat/ChatCompletionAsyncTest.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
class ChatCompletionAsyncTest extends RateLimitAwareTest {
2626

2727
private final OpenAiClient client = OpenAiClient.builder()
28+
.baseUrl(System.getenv("OPENAI_BASE_URL"))
2829
.openAiApiKey(System.getenv("OPENAI_API_KEY"))
2930
.logRequests()
3031
.logResponses()

src/test/java/dev/ai4j/openai4j/chat/ChatCompletionStreamingTest.java

Lines changed: 38 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@
2828
class ChatCompletionStreamingTest extends RateLimitAwareTest {
2929

3030
private final OpenAiClient client = OpenAiClient.builder()
31+
.baseUrl(System.getenv("OPENAI_BASE_URL"))
3132
.openAiApiKey(System.getenv("OPENAI_API_KEY"))
3233
.logRequests()
3334
.logResponses()
@@ -754,34 +755,51 @@ void testGpt4Vision() throws Exception {
754755
@Test
755756
void testCancelStreamingAfterStreamingStarted() throws Exception {
756757

758+
OpenAiClient client = OpenAiClient.builder()
759+
// without caching
760+
.openAiApiKey(System.getenv("OPENAI_API_KEY"))
761+
.logRequests()
762+
.logResponses()
763+
.logStreamingResponses()
764+
.build();
765+
757766
AtomicBoolean streamingStarted = new AtomicBoolean(false);
758767
AtomicBoolean streamingCancelled = new AtomicBoolean(false);
759768
AtomicBoolean cancellationSucceeded = new AtomicBoolean(true);
760769

761770
ResponseHandle responseHandle = client.chatCompletion("Write a poem about AI in 10 words")
762771
.onPartialResponse(partialResponse -> {
763772
streamingStarted.set(true);
773+
System.out.println("[[streaming started]]");
764774
if (streamingCancelled.get()) {
765775
cancellationSucceeded.set(false);
776+
System.out.println("[[cancellation failed]]");
766777
}
767778
})
768-
.onComplete(() -> cancellationSucceeded.set(false))
769-
.onError(e -> cancellationSucceeded.set(false))
779+
.onComplete(() -> {
780+
cancellationSucceeded.set(false);
781+
System.out.println("[[cancellation failed]]");
782+
})
783+
.onError(e -> {
784+
cancellationSucceeded.set(false);
785+
System.out.println("[[cancellation failed]]");
786+
})
770787
.execute();
771788

772789
while (!streamingStarted.get()) {
773-
Thread.sleep(200);
790+
Thread.sleep(10);
774791
}
775792

776793
newSingleThreadExecutor().execute(() -> {
777794
responseHandle.cancel();
778795
streamingCancelled.set(true);
796+
System.out.println("[[streaming cancelled]]");
779797
});
780798

781799
while (!streamingCancelled.get()) {
782-
Thread.sleep(200);
800+
Thread.sleep(10);
783801
}
784-
Thread.sleep(5000);
802+
Thread.sleep(2000);
785803

786804
assertThat(cancellationSucceeded).isTrue();
787805
}
@@ -792,22 +810,32 @@ void testCancelStreamingBeforeStreamingStarted() throws Exception {
792810
AtomicBoolean cancellationSucceeded = new AtomicBoolean(true);
793811

794812
ResponseHandle responseHandle = client.chatCompletion("Write a poem about AI in 10 words")
795-
.onPartialResponse(partialResponse -> cancellationSucceeded.set(false))
796-
.onComplete(() -> cancellationSucceeded.set(false))
797-
.onError(e -> cancellationSucceeded.set(false))
813+
.onPartialResponse(partialResponse -> {
814+
cancellationSucceeded.set(false);
815+
System.out.println("[[cancellation failed]]");
816+
})
817+
.onComplete(() -> {
818+
cancellationSucceeded.set(false);
819+
System.out.println("[[cancellation failed]]");
820+
})
821+
.onError(e -> {
822+
cancellationSucceeded.set(false);
823+
System.out.println("[[cancellation failed]]");
824+
})
798825
.execute();
799826

800827
AtomicBoolean streamingCancelled = new AtomicBoolean(false);
801828

802829
newSingleThreadExecutor().execute(() -> {
803830
responseHandle.cancel();
804831
streamingCancelled.set(true);
832+
System.out.println("[[streaming cancelled]]");
805833
});
806834

807835
while (!streamingCancelled.get()) {
808-
Thread.sleep(200);
836+
Thread.sleep(10);
809837
}
810-
Thread.sleep(5000);
838+
Thread.sleep(2000);
811839

812840
assertThat(cancellationSucceeded).isTrue();
813841
}

src/test/java/dev/ai4j/openai4j/chat/ChatCompletionTest.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ class ChatCompletionTest extends RateLimitAwareTest {
3535
static final Tool WEATHER_TOOL = Tool.from(WEATHER_FUNCTION);
3636

3737
private final OpenAiClient client = OpenAiClient.builder()
38+
.baseUrl(System.getenv("OPENAI_BASE_URL"))
3839
.openAiApiKey(System.getenv("OPENAI_API_KEY"))
3940
.logRequests()
4041
.logResponses()

src/test/java/dev/ai4j/openai4j/completion/CompletionAsyncTest.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ class CompletionAsyncTest extends RateLimitAwareTest {
1414
private static final String PROMPT = "write exactly the following 2 words: 'hello world'";
1515

1616
private final OpenAiClient client = OpenAiClient.builder()
17+
.baseUrl(System.getenv("OPENAI_BASE_URL"))
1718
.openAiApiKey(System.getenv("OPENAI_API_KEY"))
1819
.logRequests()
1920
.logResponses()

0 commit comments

Comments
 (0)