diff --git a/.github/workflows/pre-release-tests.yml b/.github/workflows/pre-release-tests.yml
new file mode 100644
index 00000000000..2325863da39
--- /dev/null
+++ b/.github/workflows/pre-release-tests.yml
@@ -0,0 +1,27 @@
+name: Pre-release Tests
+
+on:
+ pull_request:
+ branches:
+ - main
+ push:
+ branches:
+ - main
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ if: github.ref_name == 'release-please--branches--main' || github.head_ref == 'release-please--branches--main'
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-java@v4
+ with:
+ distribution: 'temurin'
+ java-version: '21'
+ cache: 'maven'
+ - name: Install Root
+ run: mvn -B -ntp install -DskipTests -Djacoco.skip=true -Dclirr.skip=true
+ - name: Compile Examples
+ run: mvn -B -ntp compile -f examples/pom.xml
+ - name: Generate Javadoc
+ run: mvn -B -ntp package -DperformRelease=true -DskipTests -Djacoco.skip=true -Dclirr.skip=true
\ No newline at end of file
diff --git a/.github/workflows/publish-javadoc.yml b/.github/workflows/publish-javadoc.yml
index 2c763bfb2cf..7c18f72053d 100644
--- a/.github/workflows/publish-javadoc.yml
+++ b/.github/workflows/publish-javadoc.yml
@@ -20,4 +20,4 @@ jobs:
java-version: 17
target-folder: javadoc
project: maven
- custom-command: mvn javadoc:javadoc
\ No newline at end of file
+ custom-command: mvn dokka:javadoc
\ No newline at end of file
diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml
new file mode 100644
index 00000000000..5fdcb436f99
--- /dev/null
+++ b/.github/workflows/stale.yml
@@ -0,0 +1,33 @@
+# This workflow warns and then closes issues that have had no activity for a specified amount of time.
+name: Mark and close stale issues
+
+on:
+ schedule:
+ # Scheduled to run at 1:30 UTC everyday
+ - cron: '30 1 * * *'
+
+jobs:
+ stale:
+
+ runs-on: ubuntu-latest
+ permissions:
+ issues: write
+
+ steps:
+ - uses: actions/stale@v5
+ with:
+ repo-token: ${{ secrets.GITHUB_TOKEN }}
+ days-before-issue-stale: 7
+ days-before-issue-close: 2
+ stale-issue-label: "status:stale"
+ close-issue-reason: not_planned
+ any-of-labels: "status:awaiting user response"
+ remove-stale-when-updated: true
+ labels-to-remove-when-unstale: 'status:awaiting user response,status:stale'
+ stale-issue-message: >
+ This issue has been marked as stale because it has been open for 7 days with no activity. It will be closed in 2 days if no further activity occurs.
+ close-issue-message: >
+ This issue was closed because it has been inactive for 9 days.
+ Please post a new issue if you need further assistance. Thanks!
+ # Label that can be assigned to issues to exclude them from being marked as stale
+ exempt-issue-labels: 'override-stale'
diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml
index 7ffb0c9a55c..d2fd993b39c 100644
--- a/.github/workflows/unit-tests.yml
+++ b/.github/workflows/unit-tests.yml
@@ -14,10 +14,9 @@ jobs:
strategy:
matrix:
java: [8, 11, 17, 21]
- testgroup: [
- # Includes everything in tests/ except the ones listed here
- '**/*,!com.google.genai.ClientTest,!com.google.genai.TableTest,!com.google.genai.HttpApiClientTest,!com.google.genai.ModelsTest,!com.google.genai.OperationsTest'
- ]
+ # Only run unit tests.
+ testgroup: ["**/*Test"]
+ fail-fast: false
name: unit-test (${{matrix.java}})
steps:
- uses: actions/checkout@v4
@@ -27,6 +26,4 @@ jobs:
java-version: ${{matrix.java}}
cache: 'maven'
- name: Java Unit Tests
- run: mvn clean test -Dtest=${{matrix.testgroup}}
- env:
- GOOGLE_API_KEY: testkey
+ run: mvn clean test -Dtest=${{matrix.testgroup}} -Djacoco.skip=true
\ No newline at end of file
diff --git a/.github/workflows/validation-java-version.yml b/.github/workflows/validation-java-version.yml
deleted file mode 100644
index 9c62d4b2b35..00000000000
--- a/.github/workflows/validation-java-version.yml
+++ /dev/null
@@ -1,27 +0,0 @@
-# This workflow runs a simple test to verify we are supporing the required Java versions.
-name: validation
-
-on:
- pull_request:
- branches:
- - main
-
-jobs:
- java-version:
- runs-on: ubuntu-latest
- strategy:
- matrix:
- java-version: ['8', '11', '17', '21']
-
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Set up Java ${{ matrix.java-version }}
- uses: actions/setup-java@v4
- with:
- distribution: temurin
- java-version: ${{ matrix.java-version }}
-
- - name: Compile ${{ matrix.java-version }}
- run: mvn clean compile
\ No newline at end of file
diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index fbd9082d716..0b97f7533e3 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "1.5.0"
+ ".": "1.53.0"
}
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 0dc8aaed03a..541c7639398 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,624 @@
# Changelog
+## [1.53.0](https://github.com/googleapis/java-genai/compare/v1.52.0...v1.53.0) (2026-05-05)
+
+
+### Features
+
+* [Python] Multimodal file search ([b12cd29](https://github.com/googleapis/java-genai/commit/b12cd29933be12951f159052ec519480f6559fe3))
+* Multimodal file search ([aaf0c40](https://github.com/googleapis/java-genai/commit/aaf0c40d15cc9c8a128d75c7fa71c6f9fb6ea480))
+
+## [1.52.0](https://github.com/googleapis/java-genai/compare/v1.51.0...v1.52.0) (2026-04-30)
+
+
+### Features
+
+* [Interactions] Add FileCitation.{custom_metadata,media_id,page_number} ([a09716e](https://github.com/googleapis/java-genai/commit/a09716e170ff952c7587dc8a16c06852219c725e))
+* Add `output_info` to `BatchJob` ([e337ba9](https://github.com/googleapis/java-genai/commit/e337ba96df0ce919fe90481c2324e7c5505a2101))
+* Add ImageResizeMode for GenerateVideos ([e089fcb](https://github.com/googleapis/java-genai/commit/e089fcb3d99a03cc1ecb7786b8ef58966c25ac4d))
+* Add new Gemini Deep Research agent models ([88d7019](https://github.com/googleapis/java-genai/commit/88d7019fee1be2445c070b3d2fdc23f891764fb0))
+* Add Vertex Dataset input and output options for batch jobs ([612601d](https://github.com/googleapis/java-genai/commit/612601da1c5b26f7650bc148124cb568ee52af67))
+* **interaction-api:** Add grounding tool usage breakdown to Interaction Usage. ([bc2f815](https://github.com/googleapis/java-genai/commit/bc2f815b8bd9a9a4f439ca2619fb9208fe3867bd))
+* introduce enterprise and GOOGLE_GENAI_USE_ENTERPRISE ([97adcd9](https://github.com/googleapis/java-genai/commit/97adcd91cbe9b7c13ed6dfdf476d709b99804572))
+* Replace the more ambiguous rate field with sample_rate. ([d762b6e](https://github.com/googleapis/java-genai/commit/d762b6e2fd91e11201dfc84d856fbd2a8e657951))
+
+
+### Documentation
+
+* replace Vertex AI with Gemini Enterprise Agent Platform ([84c1d43](https://github.com/googleapis/java-genai/commit/84c1d4328fd3827e26f5d1a2367435fa6ab12a35))
+* update doc string to replace `Vertex AI` with `Gemini Enterprise Agent Platform`, update method error message to replace `Vertex AI` with `Gemini Enterprise Agent Platform (previously known as Vertex AI)`, update converter error message to replace `Vertex AI` with `Gemini Enterprise Agent Platform` ([c5aefa7](https://github.com/googleapis/java-genai/commit/c5aefa754b75330d018993866d798c3dec23a19a))
+* update Gemini Enterprise Agent Platform home page url ([7b46a6f](https://github.com/googleapis/java-genai/commit/7b46a6fdd08a6a80dc2f2bfe8b194264fca2191f))
+
+## [1.51.0](https://github.com/googleapis/java-genai/compare/v1.50.0...v1.51.0) (2026-04-16)
+
+
+### Features
+
+* Add gemini-3.1-flash-tts-preview model to options ([d9a5733](https://github.com/googleapis/java-genai/commit/d9a573332ebefdf385a088360bf3eb5cc889a5a2))
+
+
+### Bug Fixes
+
+* ResponseStream fails to parse error message and multi-line SSE data payloads ([3a1cb22](https://github.com/googleapis/java-genai/commit/3a1cb226bc31f5528a56a2d9fd62fcfe89ca77e1))
+
+
+## [1.50.0](https://github.com/googleapis/java-genai/compare/v1.49.0...v1.50.0) (2026-04-14)
+
+
+### Features
+
+* [Experimental] Add interactions support. ([abc1ee4](https://github.com/googleapis/java-genai/commit/abc1ee4b47c1af5fe7da9f98ce64fe4c7e3fcec7))
+* Add "eu" as a supported service location for Vertex AI platform. ([78e1bc1](https://github.com/googleapis/java-genai/commit/78e1bc19d154ba5dad4e4986a9d6e7f8a6c585b8))
+* Add Live Avatar new fields ([8e8146a](https://github.com/googleapis/java-genai/commit/8e8146a3a8d6d37e8fe691a6f96e7fd2b055d988))
+* Add webhook_config to batches.create() and models.generate_videos() ([ac0e49e](https://github.com/googleapis/java-genai/commit/ac0e49e187ecbda2b58d10e60161b3ab6fd24667))
+
+### Bug Fixes
+
+* fix broken javadoc by switching to dokka. ([4b76baf](https://github.com/googleapis/java-genai/commit/4b76baf1c1f6f8d802169fc138b90098c416bf42))
+* internal workflow change ([b80b142](https://github.com/googleapis/java-genai/commit/b80b1425359952ad1db25a3b5e5606cf5e6c6b5e))
+
+## [1.49.0](https://github.com/googleapis/java-genai/compare/v1.48.0...v1.49.0) (2026-04-14)
+
+
+### Bug Fixes
+
+* fix broken javadoc by switching to dokka. ([4b76baf](https://github.com/googleapis/java-genai/commit/4b76baf1c1f6f8d802169fc138b90098c416bf42))
+* internal workflow change ([b80b142](https://github.com/googleapis/java-genai/commit/b80b1425359952ad1db25a3b5e5606cf5e6c6b5e))
+
+## [1.48.0](https://github.com/googleapis/java-genai/compare/v1.47.0...v1.48.0) (2026-04-13)
+
+
+### Features
+
+* [Experimental] Add interactions support. ([abc1ee4](https://github.com/googleapis/java-genai/commit/abc1ee4b47c1af5fe7da9f98ce64fe4c7e3fcec7))
+* Add "eu" as a supported service location for Vertex AI platform. ([78e1bc1](https://github.com/googleapis/java-genai/commit/78e1bc19d154ba5dad4e4986a9d6e7f8a6c585b8))
+* Add Live Avatar new fields ([8e8146a](https://github.com/googleapis/java-genai/commit/8e8146a3a8d6d37e8fe691a6f96e7fd2b055d988))
+* Add webhook_config to batches.create() and models.generate_videos() ([ac0e49e](https://github.com/googleapis/java-genai/commit/ac0e49e187ecbda2b58d10e60161b3ab6fd24667))
+
+## [1.47.0](https://github.com/googleapis/java-genai/compare/v1.46.0...v1.47.0) (2026-04-08)
+
+
+### Documentation
+
+* Remove deprecated product recontext model samples from docstrings ([6f73c4a](https://github.com/googleapis/java-genai/commit/6f73c4a996f5ccdc1f2b73000c849cadcdeb8a62))
+
+## [1.46.0](https://github.com/googleapis/java-genai/compare/v1.45.0...v1.46.0) (2026-04-01)
+
+
+### Bug Fixes
+
+* Fix service_tier enums. ([d4641e4](https://github.com/googleapis/java-genai/commit/d4641e422c4dff90fc6a62cdbbe3e0d634b64740))
+
+## [1.45.0](https://github.com/googleapis/java-genai/compare/v1.44.0...v1.45.0) (2026-03-28)
+
+
+### Features
+
+* Add consent_audio and voice_consent_signature and AsyncSession.setup_complete ([959c01b](https://github.com/googleapis/java-genai/commit/959c01bb6ea75d4a498a6781f52d8741f3bade94))
+* Add labels field to Veo configs ([86f235a](https://github.com/googleapis/java-genai/commit/86f235af5751234c433236b93c92dc1d05e479bc))
+* Add model_status to GenerateContentResponse (Gemini API only) ([6c35249](https://github.com/googleapis/java-genai/commit/6c352493d503fb98e7738af3ea88ad7556b19a10))
+* Add part_metadata in Part (Gemini API only) ([6c35249](https://github.com/googleapis/java-genai/commit/6c352493d503fb98e7738af3ea88ad7556b19a10))
+* Add service tier to GenerateContent. ([5f61a16](https://github.com/googleapis/java-genai/commit/5f61a168d697588c2878461976cb31912cee9b3c))
+* **genai:** add TURN_INCLUDES_AUDIO_ACTIVITY_AND_ALL_VIDEO to TurnCoverage ([dbc4c33](https://github.com/googleapis/java-genai/commit/dbc4c33887d8d66cca0e474fde5faa8e98e13802))
+* support hyperparameters in distillation tuning ([fdedc74](https://github.com/googleapis/java-genai/commit/fdedc744dc3d447e94859c59379748286066353c))
+* Support rendered_parts in GroundingSupport ([6c35249](https://github.com/googleapis/java-genai/commit/6c352493d503fb98e7738af3ea88ad7556b19a10))
+
+
+### Bug Fixes
+
+* support us region routing ([3296bfe](https://github.com/googleapis/java-genai/commit/3296bfe392dc685a499a3c1c8477b23674089d77))
+
+## [1.44.0](https://github.com/googleapis/java-genai/compare/v1.43.0...v1.44.0) (2026-03-18)
+
+
+### Features
+
+* Support include_server_side_tool_invocations for genai. ([d92fdb1](https://github.com/googleapis/java-genai/commit/d92fdb167b1c06ae0dc3cbe8ad0c4c903a29f2ee))
+
+## [1.43.0](https://github.com/googleapis/java-genai/compare/v1.42.0...v1.43.0) (2026-03-12)
+
+
+### Features
+
+* Add inference_generation_config to EvaluationConfig for Tuning ([c9632f0](https://github.com/googleapis/java-genai/commit/c9632f024d80f0ede17b6a4df423c33cd09c6fa9))
+* Allow custom endpoints for authentication with Vertex AI in Java ([5b38728](https://github.com/googleapis/java-genai/commit/5b3872897a68d00f536b94bb11bb580c45e8cd55))
+* enable language code for audio transcription config in Live API for Vertex AI ([fb034ff](https://github.com/googleapis/java-genai/commit/fb034ff044d3c73721f3253845ac5e2c00e8d262))
+
+## [1.42.0](https://github.com/googleapis/java-genai/compare/v1.41.0...v1.42.0) (2026-03-04)
+
+
+### Features
+
+* Update data types from discovery doc. ([850d527](https://github.com/googleapis/java-genai/commit/850d527dd945a040c2aa9a50f82060c53fde4d64))
+
+
+### Bug Fixes
+
+* initialize comprehensive and curated history in constructor ([f74a426](https://github.com/googleapis/java-genai/commit/f74a426adcf282f726a49b38958c4df3982a2d0d))
+
+## [1.41.0](https://github.com/googleapis/java-genai/compare/v1.40.0...v1.41.0) (2026-02-26)
+
+
+### Features
+
+* Add Image Grounding support to GoogleSearch tool ([0daefbc](https://github.com/googleapis/java-genai/commit/0daefbc3ea09a341162ff95b68bf7f2e25fa41ea))
+* enable server side MCP and disable all other AFC when server side MCP is configured. ([498a2c4](https://github.com/googleapis/java-genai/commit/498a2c422dabb493d9b3b133b88156e5b44abae0))
+* examples ([917aee0](https://github.com/googleapis/java-genai/commit/917aee09737fec695fdb4fb665e6aaebfcf0a2fc))
+* initial integration ([917aee0](https://github.com/googleapis/java-genai/commit/917aee09737fec695fdb4fb665e6aaebfcf0a2fc))
+* set up pom + fix test ([917aee0](https://github.com/googleapis/java-genai/commit/917aee09737fec695fdb4fb665e6aaebfcf0a2fc))
+
+
+### Bug Fixes
+
+* use `NonClosingDelegatingHttpClient` ([917aee0](https://github.com/googleapis/java-genai/commit/917aee09737fec695fdb4fb665e6aaebfcf0a2fc))
+
+
+### Documentation
+
+* explain how to run tests ([4f6a811](https://github.com/googleapis/java-genai/commit/4f6a8112e8513bed34a5d35a16f21459006944c3))
+
+## [1.40.0](https://github.com/googleapis/java-genai/compare/v1.39.0...v1.40.0) (2026-02-19)
+
+
+### Features
+
+* Add `registerFiles` for Java. ([ce0b638](https://github.com/googleapis/java-genai/commit/ce0b6389330762ba649d25fc40d52b926232e7d1))
+* Add UnifiedMetric support to Vertex Tuning evaluation config ([a28ebfc](https://github.com/googleapis/java-genai/commit/a28ebfc3ddff73b9c998015a0d6d78bb6171b2aa))
+* Support multimodal embedding for Gemini Embedding 2.0 and support MaaS models in Models.embed_content() (Vertex AI API) ([23a7913](https://github.com/googleapis/java-genai/commit/23a7913309416b09e4d8fb39d704b5dc26aa22f1))
+
+## [1.39.0](https://github.com/googleapis/java-genai/compare/v1.38.0...v1.39.0) (2026-02-07)
+
+
+### Features
+
+* Support encryption_spec in tuning job creation configuration for GenAI SDK ([7a4fb51](https://github.com/googleapis/java-genai/commit/7a4fb51127dd9cba8e32587866ae03608505b2bd))
+
+## [1.38.0](https://github.com/googleapis/java-genai/compare/v1.37.0...v1.38.0) (2026-02-05)
+
+
+### Features
+
+
+### Bug Fixes
+
+* 'No SLF4J providers were found' on examples ([97fa11f](https://github.com/googleapis/java-genai/commit/97fa11f7166b88b5d85dd6450ba345af7310b975))
+* Make Apache HTTP Components an optional Maven dependency in GenAI. ([a01e464](https://github.com/googleapis/java-genai/commit/a01e464573d0e7eea093734fcaad2d824173b41c))
+* Replace System .err & .out with correct (JUL) Logging in GenAI AsyncLive. ([1f756d6](https://github.com/googleapis/java-genai/commit/1f756d6a52b600f33793f898f30dd5cadaa52d23))
+
+## [1.37.0](https://github.com/googleapis/java-genai/compare/v1.36.0...v1.37.0) (2026-01-30)
+
+
+### Features
+
+* Support distillation tuning ([cec1b88](https://github.com/googleapis/java-genai/commit/cec1b88ab5a8d5cb801f4db19ce73f3f01732c70))
+* Support OSS Tuning in GenAI SDK ([868d8ed](https://github.com/googleapis/java-genai/commit/868d8edee6a449937ed8b74f909071d8496fc68f))
+
+
+### Bug Fixes
+
+* Add metadata in batch inlined response ([c80dd07](https://github.com/googleapis/java-genai/commit/c80dd070d9f23ec6463e7e704f003ffb4dba354c))
+
+## [1.36.0](https://github.com/googleapis/java-genai/compare/v1.35.0...v1.36.0) (2026-01-22)
+
+
+### Features
+
+* Add ModelArmorConfig support for prompt and response sanitization via the Model Armor service ([9c77a8f](https://github.com/googleapis/java-genai/commit/9c77a8f05959b205e025c70eb794740ac2e1724b))
+
+## [1.35.0](https://github.com/googleapis/java-genai/compare/v1.34.0...v1.35.0) (2026-01-14)
+
+
+### Features
+
+* voice activity support ([5ffcf2b](https://github.com/googleapis/java-genai/commit/5ffcf2b20c95bf7cd84070700383b105e849d7a3))
+
+## [1.34.0](https://github.com/googleapis/java-genai/compare/v1.33.0...v1.34.0) (2026-01-08)
+
+
+### Features
+
+* Add gemini-3-pro-preview support for local tokenizer ([40480f4](https://github.com/googleapis/java-genai/commit/40480f4e784f076be787c0c3213918e88ffc4296))
+
+
+### Documentation
+
+* Update Virtual Try-On model id in samples and docstrings ([e349635](https://github.com/googleapis/java-genai/commit/e349635621abe4b27a88a30e2f7c1ad960851767))
+
+## [1.33.0](https://github.com/googleapis/java-genai/compare/v1.32.0...v1.33.0) (2026-01-07)
+
+**Note:** The artifacts for this version were not published to Maven. Please upgrade directly to version **1.34.0**.
+
+## [1.32.0](https://github.com/googleapis/java-genai/compare/v1.31.0...v1.32.0) (2025-12-17)
+
+
+### Features
+
+* Add minimal and medium thinking levels. ([ecfadfe](https://github.com/googleapis/java-genai/commit/ecfadfe6717870d1ea403091863d16cec5fcff79))
+* Add PersonGeneration to ImageConfig for Vertex Gempix ([d8a4c43](https://github.com/googleapis/java-genai/commit/d8a4c432e5024bd171cf4b791925a40b285d7793))
+* Add ultra high resolution to the media resolution in Parts. ([7c9b7f6](https://github.com/googleapis/java-genai/commit/7c9b7f62275487183b7f5ded4db40af9a4422a67))
+* support multi speaker for Vertex AI ([c50e47b](https://github.com/googleapis/java-genai/commit/c50e47bfba44d7bd979b37b7c4b024424c08c29a))
+
+## [1.31.0](https://github.com/googleapis/java-genai/compare/v1.30.0...v1.31.0) (2025-12-11)
+
+
+### Features
+
+* Add enableEnhancedCivicAnswers feature in GenerateContentConfig ([0570478](https://github.com/googleapis/java-genai/commit/05704781fa7627efb5f4486b0a8763698bd6e6f2))
+* Add IMAGE_RECITATION and IMAGE_OTHER enum values to FinishReason ([26c3c35](https://github.com/googleapis/java-genai/commit/26c3c351fde231c943a3695af35961535923c8b1))
+* Add voice activity detection signal. ([d1ca685](https://github.com/googleapis/java-genai/commit/d1ca6854006248e1eab10212d1b1dade56e9b158))
+
+
+### Documentation
+
+* Add an example for ClientOptions usage ([0a8a26e](https://github.com/googleapis/java-genai/commit/0a8a26e0e75e791c2a99b012aba5c9bc09430cd7))
+
+## [1.30.0](https://github.com/googleapis/java-genai/compare/v1.29.0...v1.30.0) (2025-12-08)
+
+
+### Features
+
+* Add ProxyOptions in ClientOptions for configuring proxies ([eafdf79](https://github.com/googleapis/java-genai/commit/eafdf791f9beea50b6944a8f2118e9a1934f3a17))
+* Ephemeral token for Gemini Live API support in Java ([4ce094b](https://github.com/googleapis/java-genai/commit/4ce094b015e39574976086fb5c84a468481794b8))
+* Support ReplicatedVoiceConfig ([dbe314d](https://github.com/googleapis/java-genai/commit/dbe314de08a2a9b60f72ca4e67464a6f704c1ccb))
+
+## [1.29.0](https://github.com/googleapis/java-genai/compare/v1.28.0...v1.29.0) (2025-12-03)
+
+
+### Features
+
+* Add empty response for tunings.cancel() ([57218f5](https://github.com/googleapis/java-genai/commit/57218f56512ac6221a72f930d95a07dac2209cf6))
+
+
+### Bug Fixes
+
+* Match the versions of the Java Protobuf and Google API Common dependencies (com.google.protobuf:protobuf-java:3.25.5 & com.google.api:api-common:2.45.0) with Google Cloud Java SDKs. ([6c37f58](https://github.com/googleapis/java-genai/commit/6c37f5858f81f4bfd338c92c712d45222670e24b))
+
+
+### Documentation
+
+* Recommend using response_json_schema in error messages and docstrings. ([6b952e9](https://github.com/googleapis/java-genai/commit/6b952e949d46ae9d2123045a8dd741305c50a2ce))
+
+## [1.28.0](https://github.com/googleapis/java-genai/compare/v1.27.0...v1.28.0) (2025-11-17)
+
+
+### Features
+
+* add display name to FunctionResponseBlob ([8db8c57](https://github.com/googleapis/java-genai/commit/8db8c576247813991110e6ea10df999b756771ae))
+* add display name to FunctionResponseFileData ([f5ee8b7](https://github.com/googleapis/java-genai/commit/f5ee8b7744d7a729ac83c9072882c52345841625))
+* Add generate_content_config.thinking_level ([a47df92](https://github.com/googleapis/java-genai/commit/a47df920d935ba6e8e9751c0a50c0a2bb36c4189))
+* Add image output options to ImageConfig for Vertex ([3eac0b8](https://github.com/googleapis/java-genai/commit/3eac0b87c4e45e99123d7e321eb7d77007367b19))
+* Add part.media_resolution ([a47df92](https://github.com/googleapis/java-genai/commit/a47df920d935ba6e8e9751c0a50c0a2bb36c4189))
+* support Function call argument streaming for all languages ([f310452](https://github.com/googleapis/java-genai/commit/f3104521fc0cc934c2f5e29dedd8e22970a99897))
+* support upload to file search stores ([7862ce3](https://github.com/googleapis/java-genai/commit/7862ce38e950e3e6ff269e4f571474c42f4b19e4))
+
+## [1.27.0](https://github.com/googleapis/java-genai/compare/v1.26.0...v1.27.0) (2025-11-12)
+
+
+### Features
+
+* Add `images()` convenience method to `GenerateImagesResponse` ([155df8d](https://github.com/googleapis/java-genai/commit/155df8d25bdbcf1e6ef4b9859b5be28dfe9f943e))
+* Add EvaluationConfig support to tune() in Java ([795cf73](https://github.com/googleapis/java-genai/commit/795cf73d87894243d06c793262bed9b488167f95))
+* Automatically set response type in FunctionDeclaration during the AFC ([5ce99df](https://github.com/googleapis/java-genai/commit/5ce99df9136b1606fed3f385ff68b4ef84e931eb))
+* Support overriding the max read length in the JSON parser ([29d2fca](https://github.com/googleapis/java-genai/commit/29d2fcac0202bd8ba81c0973f172432b18bc3f79))
+
+
+### Bug Fixes
+
+* Add missing fields to the model types ([7b7b41f](https://github.com/googleapis/java-genai/commit/7b7b41f05b21a37c4ce9bd712a28d9432d07105f))
+* Fix base_steps parameter for recontext_image ([85aaa79](https://github.com/googleapis/java-genai/commit/85aaa79121489181879f1e0ad84d683c1d000f53))
+* Fix models.list() filter parameter ([123ada5](https://github.com/googleapis/java-genai/commit/123ada51cba5de22ec755716e551f398a6210a38))
+
+
+### Documentation
+
+* Add README for Files API ([6d206aa](https://github.com/googleapis/java-genai/commit/6d206aaf3f85cf1460418ddabcd1b6cd07693357))
+
+## [1.26.0](https://github.com/googleapis/java-genai/compare/v1.25.0...v1.26.0) (2025-11-05)
+
+
+### Features
+
+* Add clearXxx methods to data type builders ([a4900c9](https://github.com/googleapis/java-genai/commit/a4900c97ec7c256b45b729ae68404aea4fbf5830))
+* add complete stats to BatchJob ([659c65c](https://github.com/googleapis/java-genai/commit/659c65cc777c35ae5dc8ef84caf00f4aa30bb3db))
+* Add FileSearch tool and associated FileSearchStore management APIs ([8ada6ef](https://github.com/googleapis/java-genai/commit/8ada6efb0c2b2a2231acc08c952e2fc76e20a29d))
+* Add image_size to ImageConfig (Early Access Program) ([c1af981](https://github.com/googleapis/java-genai/commit/c1af981dc19fad22db68126ec6153d9fa20ec734))
+* Added phish filtering feature. ([ed4e12c](https://github.com/googleapis/java-genai/commit/ed4e12c44e9fec3c2131a52d995cb0602dc246a0))
+* Return response headers for generateContentStream ([82a8118](https://github.com/googleapis/java-genai/commit/82a8118968bb5da37ec81dce83580bd86767bf62))
+* Support lists as function parameters in AFC (fixes [#527](https://github.com/googleapis/java-genai/issues/527)) ([452d2e5](https://github.com/googleapis/java-genai/commit/452d2e50a6c13ab94d5de380d32dcb164379a2cb))
+
+
+### Bug Fixes
+
+* disable AFC when there are AFC incompatible tool presented. ([6099d87](https://github.com/googleapis/java-genai/commit/6099d871ff703606dffbf533a231869725f32bc6))
+
+
+### Documentation
+
+* add blank line before version update marker ([82616c2](https://github.com/googleapis/java-genai/commit/82616c23be6cf7295049026985d105c5c2993c7a))
+* Update Java SDK README spacing ([82616c2](https://github.com/googleapis/java-genai/commit/82616c23be6cf7295049026985d105c5c2993c7a))
+
+## [1.25.0](https://github.com/googleapis/java-genai/compare/v1.24.0...v1.25.0) (2025-10-29)
+
+
+### Features
+
+* Add safety_filter_level and person_generation for Imagen upscaling ([09a8075](https://github.com/googleapis/java-genai/commit/09a80754b202fdf903039341f5266f62d9b879cb))
+* Add support for preference optimization tuning in the SDK. ([5d4123c](https://github.com/googleapis/java-genai/commit/5d4123c0391d443e94bb1e81524ccae8779462d7))
+* Added Operations.get which is a generic method which will handle all Operation types. ([c1dc32f](https://github.com/googleapis/java-genai/commit/c1dc32f84d0e4d14a16345dcb404c8b2bef05338))
+* Pass file name to the backend when uploading with a file path ([081a9a6](https://github.com/googleapis/java-genai/commit/081a9a6a67d1ba542edb1d1330dfa56579204a43))
+* support default global location when not using api key with vertexai backend ([f9028a7](https://github.com/googleapis/java-genai/commit/f9028a71d4e736a8dc97daa54e6e4275b5016abd))
+* Support retries in API requests ([3d5de00](https://github.com/googleapis/java-genai/commit/3d5de000277eb0da172d6b19795c6f2d4b88c213))
+
+
+### Documentation
+
+* Add docstring for classes and fields that are not supported in Gemini or Vertex API ([7a03dac](https://github.com/googleapis/java-genai/commit/7a03dac0a4e3388f98be199765794fcf511bfe83))
+* Add docstring for enum classes that are not supported in Gemini or Vertex API ([830a12f](https://github.com/googleapis/java-genai/commit/830a12f3dcbb8beb1dd5ff3ff82f6b19ebb2af93))
+* Add documentation for the retry behavior ([4fbcf51](https://github.com/googleapis/java-genai/commit/4fbcf514321fdc2cbee1393fc6babe33fd0e5e74))
+
+## [1.24.0](https://github.com/googleapis/java-genai/compare/v1.23.0...v1.24.0) (2025-10-22)
+
+
+### Features
+
+* Add enable_enhanced_civic_answers in GenerationConfig ([684a2c5](https://github.com/googleapis/java-genai/commit/684a2c5b582fa4ca13cb9cfe819ef759778101b0))
+* support createEmbeddings in Batches.java ([8947f6f](https://github.com/googleapis/java-genai/commit/8947f6fc20fbdd90a7d17071dee1bd2e5bea0c3e))
+* support jailbreak in HarmCategory and BlockedReason ([3dab40b](https://github.com/googleapis/java-genai/commit/3dab40bc367168ed48d8d1acfb278f5bc6edb83f))
+
+
+### Bug Fixes
+
+* Make async methods in Batches module truly non-blocking ([f2ae75a](https://github.com/googleapis/java-genai/commit/f2ae75ac364702f483c376e458a120d1ffa93b17))
+* Make async methods in Caches, Tuning, and Operations modules truly non-blocking ([db56239](https://github.com/googleapis/java-genai/commit/db56239bbebbfe3cb95e00d2d3eac253b76f22fe))
+* Make async methods in Models module truly non-blocking ([c205d01](https://github.com/googleapis/java-genai/commit/c205d0172ca40e01f7d8de17a3bc9d38eeb5fc21))
+
+## [1.23.0](https://github.com/googleapis/java-genai/compare/v1.22.0...v1.23.0) (2025-10-15)
+
+
+### Features
+
+* Support video extension for Veo on Gemini Developer API ([b398509](https://github.com/googleapis/java-genai/commit/b398509697a3e9aa27bad5e804382c5a4db333ab))
+
+## [1.22.0](https://github.com/googleapis/java-genai/compare/v1.21.0...v1.22.0) (2025-10-10)
+
+
+### Features
+
+* Enable Google Maps tool for Genai. ([a4baf3c](https://github.com/googleapis/java-genai/commit/a4baf3c610ddcb1ed36c1501fcb2248b5a6bd610))
+* Support enableWidget feature in GoogleMaps ([aefbd5c](https://github.com/googleapis/java-genai/commit/aefbd5c1519f453cd2fe158a2765c195a9454322))
+* Support Gemini batch inline request's metadata and add test coverage to safety setting ([17033b3](https://github.com/googleapis/java-genai/commit/17033b38a93d6952b29699f5a4c79ed9dd862976))
+
+## [1.21.0](https://github.com/googleapis/java-genai/compare/v1.20.0...v1.21.0) (2025-10-08)
+
+
+### Features
+
+* Add `NO_IMAGE` enum value to `FinishReason` ([6b00c0b](https://github.com/googleapis/java-genai/commit/6b00c0b7dc8c85fcefc5aac643c3588048317614))
+* Add labels field to Imagen configs ([e69cf68](https://github.com/googleapis/java-genai/commit/e69cf68583ca581f1a7fad89b04292036433cdb4))
+* Add thinking_config for live ([274c21d](https://github.com/googleapis/java-genai/commit/274c21d34310e630b9b4ad296b4c8314a4249d0c))
+* Add utility methods for creating `FunctionResponsePart` and creating FunctionResponse `Part` with `FunctionResponseParts` ([af16a4c](https://github.com/googleapis/java-genai/commit/af16a4c994e0cc4e6fbc2cdbda825246df9aa253))
+* Enable Ingredients to Video and Advanced Controls for Veo on Gemini Developer API (Early Access Program) ([4c42e65](https://github.com/googleapis/java-genai/commit/4c42e6527a7fe43c0b534e381d65b5d9650e8709))
+
+
+### Bug Fixes
+
+* Ensure Live server message are properly converted ([206dc88](https://github.com/googleapis/java-genai/commit/206dc88e3b220a875f784a507fc9470bc411de36))
+
+## [1.20.0](https://github.com/googleapis/java-genai/compare/v1.19.0...v1.20.0) (2025-10-01)
+
+
+### Features
+
+* Add `ImageConfig` to `GenerateContentConfig` ([6fb5eba](https://github.com/googleapis/java-genai/commit/6fb5eba0e916ada8f300dd5ad333f269e9044ea3))
+
+## [1.19.0](https://github.com/googleapis/java-genai/compare/v1.18.0...v1.19.0) (2025-09-30)
+
+
+### Features
+
+* expose session id in Live API ([b6d5389](https://github.com/googleapis/java-genai/commit/b6d5389899bd1443d5c508776dfe5909eb1d7400))
+* rename ComputerUse tool (early access) ([4bbba2b](https://github.com/googleapis/java-genai/commit/4bbba2b53eedec0b28a5d98d7fc193683c565f50))
+
+## [1.18.0](https://github.com/googleapis/java-genai/compare/v1.17.0...v1.18.0) (2025-09-25)
+
+
+### Features
+
+* Add FunctionResponsePart & ToolComputerUse.excludedPredefinedFunctions ([1a24bed](https://github.com/googleapis/java-genai/commit/1a24bedc752851236b0a7239a7dba7090e4ac4e8))
+* Support Imagen 4 Ingredients on Vertex ([b5eed8d](https://github.com/googleapis/java-genai/commit/b5eed8d1323a3d37b53c1d8c5c5557392ce7ed44))
+
+
+### Bug Fixes
+
+* Expose `JOB_STATE_RUNNING` and `JOB_STATE_EXPIRED` for Gemini Batches states ([c5b4fdf](https://github.com/googleapis/java-genai/commit/c5b4fdf58b9d0d74efdd2c7e740bed8b6b661c99))
+* initialization of `pre_tuned_model_checkpoint_id` from tuning config. ([c293633](https://github.com/googleapis/java-genai/commit/c293633a8fe298668f030ba3b257347a8fd0eedf))
+* Make async generateContent and generateContentStream truly non-blocking ([5cb18fd](https://github.com/googleapis/java-genai/commit/5cb18fd4f07f9b1f21efb82fe961e473325f6257))
+* only run unit tests in github action ([9b2861b](https://github.com/googleapis/java-genai/commit/9b2861bb79d50c10c152aa010bedf0bc48a04ad8))
+
+## [1.17.0](https://github.com/googleapis/java-genai/compare/v1.16.0...v1.17.0) (2025-09-16)
+
+
+### Features
+
+* Add 'turn_complete_reason' and 'waiting_for_input' fields. ([5bc4873](https://github.com/googleapis/java-genai/commit/5bc48732fd9281162942b158de34173343d7b179))
+* Add `VideoGenerationMaskMode` enum for Veo 2 Editing ([e5c8277](https://github.com/googleapis/java-genai/commit/e5c82778586dfee4ed7d04a9eabb2a4d8eac6185))
+* Add labels to create tuning job config ([695e17a](https://github.com/googleapis/java-genai/commit/695e17a7b1adebbccb1651d30b768d27f81c3977))
+* generate the function_call class's converters ([38703c7](https://github.com/googleapis/java-genai/commit/38703c726606cbe1b6f5f5f4eb809310b0df94a8))
+* java local tokenizer ([d774185](https://github.com/googleapis/java-genai/commit/d7741856cafd3b8e05803f7b452335fbc4ce8977))
+* Support Veo 2 Editing on Vertex ([d401d3c](https://github.com/googleapis/java-genai/commit/d401d3cf6a5f9ef3d2a76a548eed9d218169170e))
+
+
+### Bug Fixes
+
+* Enable `id` field in `FunctionCall` for Vertex AI. ([3773fe7](https://github.com/googleapis/java-genai/commit/3773fe75007b9ce83692de0031853f0f607bff3e))
+* update Live API audio example with better interruption handling ([cad8df9](https://github.com/googleapis/java-genai/commit/cad8df9c4edaf0806a641869fef6379ed05f0189))
+
+## [1.16.0](https://github.com/googleapis/java-genai/compare/v1.15.0...v1.16.0) (2025-09-02)
+
+
+### Features
+
+* Add resolution field for Gemini Developer API Veo 3 generation ([eec410c](https://github.com/googleapis/java-genai/commit/eec410c5b68de471e9a824e61f0efb819841dfe6))
+* add the response body for generateContent ([a011580](https://github.com/googleapis/java-genai/commit/a0115804e438bac120d5155c91ece53c79ada677))
+
+
+### Documentation
+
+* Refactor/update docstrings for Imagen and Veo ([2470101](https://github.com/googleapis/java-genai/commit/24701018feb91d147bf1817b04752e2595bf40ab))
+
+## [1.15.0](https://github.com/googleapis/java-genai/compare/v1.14.0...v1.15.0) (2025-08-27)
+
+
+### Features
+
+* add `sdkHttpResponse.headers` to *Delete responses. ([4be038d](https://github.com/googleapis/java-genai/commit/4be038de86c782d103d21258db51055f35e5af21))
+* Add output_gcs_uri to Imagen upscale_image ([7649467](https://github.com/googleapis/java-genai/commit/76494678d3937229778c5063b4f4ff340f977bba))
+* add the response body for generateContent ([6e28ab4](https://github.com/googleapis/java-genai/commit/6e28ab4236565be61fb11e79ca9f2f31a2013598))
+* add the response body for generateContent ([b2a5b3f](https://github.com/googleapis/java-genai/commit/b2a5b3f5a6ef7a8bb4d011980d90ffdc3c745603))
+* Add VALIDATED mode into FunctionCallingConfigMode ([4bb8680](https://github.com/googleapis/java-genai/commit/4bb868046199d3249f75ede213ef7d77e0b7783f))
+* Add VideoGenerationReferenceType enum for generate_videos ([df9d910](https://github.com/googleapis/java-genai/commit/df9d910537ec7de6188f777801b4d50e84cd91e7))
+* Support GenerateVideosSource for Veo GenerateVideos ([c26af63](https://github.com/googleapis/java-genai/commit/c26af6396002cf21c0ed272290d44b09b6a41840))
+* support tunings.cancel in the genai SDK for Python, Java, JS, and Go ([9982251](https://github.com/googleapis/java-genai/commit/9982251d2dd80d3151aefb4462d9e4864d8e064e))
+
+
+### Documentation
+
+* Refactor model IDs into a Constants class ([dacd787](https://github.com/googleapis/java-genai/commit/dacd7875d41f810e50f2655e5d0e62f031197e61))
+
+## [1.14.0](https://github.com/googleapis/java-genai/compare/v1.13.0...v1.14.0) (2025-08-22)
+
+
+### Features
+
+* Add add_watermark field for recontext_image (Virtual Try-On, Product Recontext) ([5aacbc0](https://github.com/googleapis/java-genai/commit/5aacbc06435fb36fffde0c3641b3077493f13577))
+
+
+### Bug Fixes
+
+* Fix the bug that files.create doesn't return the upload URL correctly ([eb40c5f](https://github.com/googleapis/java-genai/commit/eb40c5f7f255b46a7a820da044e210127c7aac18))
+
+
+### Documentation
+
+* update TokensInfo docstring ([48eba7f](https://github.com/googleapis/java-genai/commit/48eba7fcb369537ca4266ec61107e016f7c242ed))
+
+## [1.13.0](https://github.com/googleapis/java-genai/compare/v1.12.0...v1.13.0) (2025-08-18)
+
+
+### Features
+
+* expose JsonSerializable.stringToJsonNode to help user better use *JsonSchema fields. ([35d783b](https://github.com/googleapis/java-genai/commit/35d783b5d1655b6f0d52afefa633c608f39d4e01))
+* Return response headers for all methods (except streaming methods) ([7e8b71b](https://github.com/googleapis/java-genai/commit/7e8b71b0769362a728e2bf9b93738563113a4edc))
+* Support Imagen image segmentation on Vertex ([e2a561b](https://github.com/googleapis/java-genai/commit/e2a561b11b53f3a7cc30aacb4a0dcf6a26e01645))
+* Support Veo 2 Reference Images to Video Generation on Vertex ([2f5580f](https://github.com/googleapis/java-genai/commit/2f5580fd1e78d6e8e4f371f291dacf98c7c617ef))
+
+## [1.12.0](https://github.com/googleapis/java-genai/compare/v1.11.0...v1.12.0) (2025-08-13)
+
+
+### Features
+
+* enable continuous fine-tuning on a pre-tuned model in the SDK. ([e49d350](https://github.com/googleapis/java-genai/commit/e49d3509355f717d391a88b6ff1a6f4f6d83fddc))
+* support document name in grounding metadata ([8273922](https://github.com/googleapis/java-genai/commit/8273922ebfbce4ffafa8993bcc6928b47b5ff821))
+* Support exclude_domains in Google Search and Enterprise Web Search ([e975d28](https://github.com/googleapis/java-genai/commit/e975d284f78e0c9a3cd2199d304b4739bad36fe1))
+
+## [1.11.0](https://github.com/googleapis/java-genai/compare/v1.10.0...v1.11.0) (2025-08-06)
+
+
+### Features
+
+* Add image_size field for Gemini Developer API Imagen 4 generation ([c50c755](https://github.com/googleapis/java-genai/commit/c50c755c08efbed5a62e1006890b1d0bd9956702))
+* enable responseId for Gemini Developer API ([4912ff4](https://github.com/googleapis/java-genai/commit/4912ff421d6d3bc40edd70a939f71f5f33f58597))
+* support extraBody in HttpOptions class ([036bac8](https://github.com/googleapis/java-genai/commit/036bac89fda15022ec4d9c5c73ba81ad0a6cc9be))
+* Support image recontext on Vertex ([e7de8c8](https://github.com/googleapis/java-genai/commit/e7de8c83bbd2e7e37c2198c3501e2d5bee58c0a2))
+* Support new enum types for UrlRetrievalStatus ([cb27222](https://github.com/googleapis/java-genai/commit/cb27222a7f7cdf442a7d6b61496709f7cf084a91))
+* support response headers in Go for all methods. ([222b41e](https://github.com/googleapis/java-genai/commit/222b41e196afc13775cc22292a58567d7b4859fa))
+
+
+### Bug Fixes
+
+* Remove duplicate JavaTimeModule in JsonSerializable ([a7dbd4c](https://github.com/googleapis/java-genai/commit/a7dbd4c527456f20aa5d154bde14f74f6e66d174))
+
+
+### Documentation
+
+* Add Imagen and Veo to README ([cc0a0aa](https://github.com/googleapis/java-genai/commit/cc0a0aa28cae618acf617ab92819df78d80afea5))
+* Add latest models features in README ([a2eccaf](https://github.com/googleapis/java-genai/commit/a2eccafae5c6c9b82341a148b572bf9bc80f241b))
+* mark Client as thread safe and Chat as not thread safe ([be3e50e](https://github.com/googleapis/java-genai/commit/be3e50e4217780329c0636fd7f8a1b743e7f9597))
+
+## [1.10.0](https://github.com/googleapis/java-genai/compare/v1.9.0...v1.10.0) (2025-07-23)
+
+
+### Features
+
+* Add image_size field for Vertex Imagen 4 generation ([950c0c6](https://github.com/googleapis/java-genai/commit/950c0c657f786039e3a301bf1237a57ae324ff62))
+* Support API keys for VertexAI mode for Java SDK ([826c0dc](https://github.com/googleapis/java-genai/commit/826c0dca02e06fcb6c7980259b23e955db176ec6))
+* Support http headers in GenerateContentResponse ([5282774](https://github.com/googleapis/java-genai/commit/528277406279d772c01ac2a48544962408ac235b))
+
+
+### Bug Fixes
+
+* Defer loading ADC when credentials is provided explicitly in Live API ([a540614](https://github.com/googleapis/java-genai/commit/a5406140aeaf9774265a5bcce79dae0707ed9287))
+* **live:** Enhance security by moving api key from query parameters to header ([e48c7f1](https://github.com/googleapis/java-genai/commit/e48c7f1e73dadf5c5198f9b58cea322deb7a4ed0))
+* Pager throws an exception if list request returns nothing(correct behavior is returning a Pager without any items in it) ([0a2301b](https://github.com/googleapis/java-genai/commit/0a2301b19fcd3e4d3694d42780da8f5ffe5f9207))
+
+
+### Documentation
+
+* Update README with latest features in Client ([dcf70cc](https://github.com/googleapis/java-genai/commit/dcf70cc64a93355cdc6a2eedf172399dd332750e))
+
+## [1.9.0](https://github.com/googleapis/java-genai/compare/v1.8.0...v1.9.0) (2025-07-16)
+
+
+### Features
+
+* Add `addWatermark` parameter to the edit image configuration. ([c4598da](https://github.com/googleapis/java-genai/commit/c4598da0903d5dacb0c7bb4462aec1226ba259bf))
+* add Tuning support for Java ([0ab209d](https://github.com/googleapis/java-genai/commit/0ab209db99bf98b58f7273fb12843984c42cb910))
+
+
+### Documentation
+
+* Update generated video resolution config docstring ([9a2ced8](https://github.com/googleapis/java-genai/commit/9a2ced8ed3a1896b8170cc9ca117b61cb9eea705))
+
+## [1.8.0](https://github.com/googleapis/java-genai/compare/v1.7.0...v1.8.0) (2025-07-09)
+
+
+### Features
+
+* Add new languages for Imagen 4 prompt language ([7e1e6d2](https://github.com/googleapis/java-genai/commit/7e1e6d2ead45c7a0737e4a010fce266fb436d2dd))
+
+## [1.7.0](https://github.com/googleapis/java-genai/compare/v1.6.0...v1.7.0) (2025-07-01)
+
+
+### Features
+
+* Support Batches delete ([782465d](https://github.com/googleapis/java-genai/commit/782465d9c85c3637586fef490983771c4b4b5df0))
+* Support different media input in Vertex Live API ([7f4c6bf](https://github.com/googleapis/java-genai/commit/7f4c6bf58804764d568bd3412086ead75a388df0))
+
+
+### Bug Fixes
+
+* Remove default timeout ([d1f6201](https://github.com/googleapis/java-genai/commit/d1f6201892de9f37b913044dd494c68b81bcc13a))
+
+## [1.6.0](https://github.com/googleapis/java-genai/compare/v1.5.0...v1.6.0) (2025-06-25)
+
+
+### Features
+
+* Add compressionQuality enum for generate_videos ([b0e665b](https://github.com/googleapis/java-genai/commit/b0e665bf6ae09dc2146e49714a4855443a270776))
+* Add enhance_input_image and image_preservation_factor fields for upscale_image ([94a329a](https://github.com/googleapis/java-genai/commit/94a329abcd3c668065abfae511b55766ed051668))
+* allow users to access headers for generateContent method and generateContentStream ([0315357](https://github.com/googleapis/java-genai/commit/03153578ea64f0c34836ac62395aa867f44eac07))
+* Batches support in Java ([5ce13e9](https://github.com/googleapis/java-genai/commit/5ce13e9c79c4791d405b1dfa71c1d9358dc5a08d))
+* configure release-please to automatically update package version across all files during releases. ([9131ac2](https://github.com/googleapis/java-genai/commit/9131ac24fde477afb25deb516c7ace51530ed8d9))
+* expose the responseJsonSchema in GenerateContentConfig ([9d9acdb](https://github.com/googleapis/java-genai/commit/9d9acdb494358155cbb3c2ce3acbe55209bbdb7e))
+* support client.caches.update method ([345c2b9](https://github.com/googleapis/java-genai/commit/345c2b93789913d6d84cdde9c30f86ec4041bd24))
+
+
+### Documentation
+
+* add more comments to make it easier to follow live api code, and to explain the usage of new concepts like thenCompose. ([96c792c](https://github.com/googleapis/java-genai/commit/96c792c3aa84f632e0b46bb986de403ecbf4edc1))
+* improve generate images documentation ([44c21dd](https://github.com/googleapis/java-genai/commit/44c21dd78e0d0be0e681e991b15d3dae3be360f2))
+* Update description of thinking_budget. ([265f20a](https://github.com/googleapis/java-genai/commit/265f20addd9e9e76c249e6042d653c8cec9f27a4))
+
## [1.5.0](https://github.com/googleapis/java-genai/compare/v1.4.1...v1.5.0) (2025-06-19)
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 3ad5fb65e0f..c5a26ec83a4 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,3 +1,20 @@
# Contributing
-The Google Gen AI SDK will accept contributions in the future.
\ No newline at end of file
+The Google Gen AI SDK will accept contributions in the future.
+
+## Running tests
+
+```sh
+mvn clean test -Dtest='**/*Test' -Djacoco.skip=true
+```
+
+## JDK version
+
+The tests may fail if you use the wrong JDK version. See the currently supported JDK versions in [.github/workflows/unit-tests.yml].
+
+To run the tests with a specific JDK version, which you must have installed, set the `JAVA_HOME` environment variable. For example:
+
+```sh
+# Run with JDK 21
+JAVA_HOME=$(/usr/libexec/java_home -v 21) mvn clean test -Dtest='**/*Test' -Djacoco.skip=true
+```
diff --git a/GEMINI.md b/GEMINI.md
new file mode 100644
index 00000000000..c2c2e40f9f2
--- /dev/null
+++ b/GEMINI.md
@@ -0,0 +1,83 @@
+---
+trigger: always_on
+description: Read this file when working on the GenAI Java SDK
+---
+
+> [!IMPORTANT] When working in this directory, you must also read the
+> generator's context file at `google3/google/cloud/aiplatform/sdk/GEMINI.md`.
+
+# Java SDK for Google GenAI
+
+This directory contains the Java SDK for the Google GenAI project. It is designed to provide a Java interface for interacting with Gemini models, supporting both the Gemini Developer API (MLDev) and Gemini Enterprise Agent Platform API (Vertex AI).
+
+## Project Overview
+
+The Java SDK is largely auto-generated based on API discovery documents. However, it also includes hand-written core infrastructure to support authentication, HTTP communication, and custom serialization needs.
+
+### Relationship to the Central Generator
+
+As described in `google3/google/cloud/aiplatform/sdk/GEMINI.md`, this SDK is produced by the central Python-based multi-language SDK generator located at `//google/cloud/aiplatform/sdk/generator`.
+- Language-specific logic for Java is contained in `java_generator.py` and `java_datatypes.py` (in the generator directory).
+- The generator produces data classes and API methods based on discovery docs and configuration.
+- **Important**: The generator updates files in `src/private` from files in `src/main`. Do NOT edit files in `src/private` directly.
+
+## Architecture and Key Components
+
+### Hand-written Core Files
+
+The following files in `src/main/java/com/google/genai/` are hand-written and form the core infrastructure of the SDK:
+
+- **`Client.java`**: The main entry point for the SDK. It provides access to all services (Models, Chats, Files, etc.) in both synchronous and asynchronous modes. Handles initialization with API keys or credentials.
+- **`ApiClient.java`**: An abstract base class for issuing HTTP requests. It handles request building, URL construction, header execution (including auth), and OkHttpClient configuration.
+- **`HttpApiClient.java`**: The default concrete implementation of `ApiClient` that uses OkHttp to make real network calls.
+- **`ApiResponse.java`**: A simple wrapper around the HTTP response, providing access to status code, headers, and body content.
+- **`Common.java`**: Contains utility methods for setting and getting values by path in JSON objects, URL encoding, and other common tasks.
+- **`JsonSerializable.java`**: Provides base functionality for JSON serialization and deserialization using Jackson.
+- **`Transformers.java`**: Contains static methods to transform parameters and types to match API expectations (e.g., converting model names to resource paths).
+- **`ResponseStream.java`**: Manages streaming responses, ensuring SSE compliance and detecting application-level errors.
+- **`ReplayApiClient.java`**: A testing-specific implementation of `ApiClient` that reads responses from replay files instead of making network calls.
+- **`ReplayApiResponse.java`**: Used with `ReplayApiClient` to represent mocked responses.
+- **`Chat.java`, `AsyncChat.java`, `ChatBase.java`**: Manage multi-turn chat sessions and history.
+- **`UploadClient.java`**: Handles resumable file uploads to the File API.
+- **`GoogleCredentialsHttpClient.java`**: Handles authentication using Google Credentials (typically for Vertex AI).
+- **`AfcUtil.java`**: Utilities for Auto-Function Calling.
+
+### Custom Serialization (`CustomDurationDeserializer`)
+
+To handle API response fields that return duration as a string with an 's' suffix (e.g., "3.5s"), the SDK uses a custom deserializer in `JsonSerializable.java`: `CustomDurationDeserializer`.
+- This deserializer ensures that fields mapped to `java.time.Duration` in Java can be successfully decoded from JSON.
+- It parses the numeric part and creates a `Duration` instance.
+- `JsonSerializable` also includes a `CustomDurationSerializer` to output the same format when serializing.
+
+## Development and Testing
+
+### Running Tests
+
+The project uses Maven for building and testing.
+
+To run all unit tests, you must set the environment variable for replay tests:
+
+```bash
+export GOOGLE_GENAI_REPLAYS_DIRECTORY="`blaze info workspace`/google/cloud/aiplatform/sdk/genai/replays"
+mvn clean test
+```
+
+### Running Examples
+
+Examples are located in `examples/`. To run all examples (recommended before release):
+```bash
+./run_examples.sh
+```
+
+## Modifying the SDK
+
+- **For changes to generated code**: Do NOT edit the generated files directly in `src/private`. Modify the generator configuration or templates in `//google/cloud/aiplatform/sdk/` or modify files in `src/main` and then run the generator:
+ ```bash
+ google/cloud/aiplatform/sdk/generator/run_generator.sh --langs java
+ ```
+- **For changes to core infrastructure**: Modify the hand-written files in `src/main`.
+- **Knowledge Sharing**: Every time you make a code change in this Java GenAI SDK, try to add useful knowledge into this `GEMINI.md` file if it is necessary or helpful for future maintainers (e.g., documenting new hand-written files, custom serialization patterns, or specific workflow gotchas).
+
+## Legacy Names
+
+- Similar to other SDKs, you might still find legacy names like "mldev" (representing Gemini API) or "vertex" (representing Gemini Enterprise Agent Platform API) in generator code and test files.
diff --git a/README.md b/README.md
index cf62d054f5f..47233672678 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
# Google Gen AI Java SDK
Java idiomatic SDK for the
-[Gemini Developer APIs][gemini-api-doc] and [Vertex AI][vertex-api-doc] APIs.
+[Gemini Developer APIs][gemini-api-doc] and [Gemini Enterprise Agent Platform][gemini-enterprise-agent-platform-doc] APIs.
[![Maven][maven-version-image]][maven-version-link]
[![Javadoc][javadoc-image]][javadoc-link]
@@ -10,19 +10,17 @@ Java idiomatic SDK for the
If you're using Maven, add the following to your dependencies:
-
-
+[//]: # ({x-version-update-start:google-genai:released})
```xml
com.google.genaigoogle-genai
- 1.5.0
+ 1.47.0
```
-
-
+[//]: # ({x-version-update-end})
## Getting Started
@@ -45,6 +43,9 @@ Client client = Client.builder().apiKey("your-api-key").build();
```
#### Instantiate a client that uses Vertex AI API
+
+##### Using project and location
+
```java
import com.google.genai.Client;
@@ -57,39 +58,188 @@ Client client = Client.builder()
.build();
```
+##### Using API key on Vertex AI (GCP Express Mode)
+
+```java
+import com.google.genai.Client;
+
+// Explicitly set the `apiKey` and `vertexAI(true)` to use Vertex AI backend
+// in express mode.
+Client client = Client.builder()
+ .apiKey("your-api-key")
+ .vertexAI(true)
+ .build();
+```
+
#### (Optional) Using environment variables:
You can create a client by configuring the necessary environment variables.
Configuration setup instructions depends on whether you're using the Gemini
Developer API or the Gemini API in Vertex AI.
-**Gemini Developer API:** Set `GOOGLE_API_KEY` as shown below:
+**Gemini Developer API:** Set the `GOOGLE_API_KEY`. It will automatically be
+picked up by the client. Note that `GEMINI_API_KEY` is a legacy environment
+variable, it's recommended to use `GOOGLE_API_KEY` only. But if both are set,
+`GOOGLE_API_KEY` takes precedence.
```bash
export GOOGLE_API_KEY='your-api-key'
```
**Gemini API on Vertex AI:** Set `GOOGLE_GENAI_USE_VERTEXAI`,
-`GOOGLE_CLOUD_PROJECT` and `GOOGLE_CLOUD_LOCATION`, as shown below:
+`GOOGLE_CLOUD_PROJECT` and `GOOGLE_CLOUD_LOCATION`, or `GOOGLE_API_KEY` for
+Vertex AI express mode. It's recommended that you set only project & location,
+or API key. But if both are set, project & location takes precedence.
```bash
export GOOGLE_GENAI_USE_VERTEXAI=true
+
+// Set project and location for Vertex AI authentication
export GOOGLE_CLOUD_PROJECT='your-project-id'
export GOOGLE_CLOUD_LOCATION='us-central1'
+// or API key for express mode
+export GOOGLE_API_KEY='your-api-key'
```
+After configuring the environment variables, you can instantiate a client
+without passing any variables.
+
```java
import com.google.genai.Client;
Client client = new Client();
```
+### API Selection
+
+By default, the SDK uses the beta API endpoints provided by Google to support
+preview features in the APIs. The stable API endpoints can be selected by
+setting the API version to `v1`.
+
+To set the API version use `HttpOptions`. For example, to set the API version to
+`v1` for Vertex AI:
+
+```java
+import com.google.genai.Client;
+import com.google.genai.types.HttpOptions;
+
+Client client = Client.builder()
+ .project("your-project")
+ .location("your-location")
+ .vertexAI(true)
+ .httpOptions(HttpOptions.builder().apiVersion("v1"))
+ .build();
+```
+
+To set the API version to `v1alpha` for the Gemini Developer API:
+
+```java
+import com.google.genai.Client;
+import com.google.genai.types.HttpOptions;
+
+Client client = Client.builder()
+ .apiKey("your-api-key")
+ .httpOptions(HttpOptions.builder().apiVersion("v1alpha"))
+ .build();
+```
+
+### HttpOptions
+
+Besides `apiVersion`, [HttpOptions](https://github.com/googleapis/java-genai/blob/main/src/main/java/com/google/genai/types/HttpOptions.java)
+also allows for flexible customization of HTTP request parameters such as
+`baseUrl`, `headers`, and `timeout`:
+
+```java
+HttpOptions httpOptions = HttpOptions.builder()
+ .baseUrl("your-own-endpoint.com")
+ .headers(ImmutableMap.of("key", "value"))
+ .timeout(600)
+ .build();
+```
+
+Beyond client-level configuration, `HttpOptions` can also be set on a
+per-request basis, providing maximum flexibility for diverse API call settings.
+See [this example](https://github.com/googleapis/java-genai/blob/main/examples/src/main/java/com/google/genai/examples/RequestLevelHttpOptions.java)
+for more details.
+
+### HttpRetryOptions
+
+[HttpRetryOptions](https://github.com/googleapis/java-genai/blob/main/src/main/java/com/google/genai/types/HttpRetryOptions.java)
+allows you to configure the automatic retry behavior for failed API calls. You
+can customize key settings like:
+
+ * Total number of attempts.
+ * Which HTTP status codes should trigger a retry (e.g., 429 for rate limits).
+ * Backoff strategy, including the initial delay and maximum delay between retries.
+
+```java
+HttpOptions httpOptions = HttpOptions.builder()
+ .retryOptions(
+ HttpRetryOptions.builder()
+ .attempts(3)
+ .httpStatusCodes(408, 429))
+ .build();
+```
+
+Since HttpRetryOptions is part of HttpOptions, it supports being set at the
+client level (as shown) or on a per-request basis. Note that Providing
+`HttpRetryOptions` for a specific request will completely override any default
+retry settings configured on the client.
+
+### ClientOptions
+[ClientOptions](https://github.com/googleapis/java-genai/blob/main/src/main/java/com/google/genai/types/ClientOptions.java)
+enables you to customize the behavior of the HTTP client, including connection
+pool settings and proxy configurations.
+
+#### Connection Pool
+You can configure the connection pool via `maxConnections` (total maximum
+connections) and `maxConnectionsPerHost` (maximum connections to a single host).
+
+```java
+import com.google.genai.Client;
+import com.google.genai.types.ClientOptions;
+
+Client client =
+ Client.builder()
+ .apiKey("your-api-key")
+ .clientOptions(
+ ClientOptions.builder().maxConnections(64).maxConnectionsPerHost(16).build())
+ .build();
+```
+
+#### Proxy
+If your environment requires connecting through a proxy, you can configure it
+using `ProxyOptions`. The SDK supports `HTTP`, `SOCKS`, and `DIRECT` (no proxy)
+connection types, along with basic proxy authentication.
+
+```java
+import com.google.genai.Client;
+import com.google.genai.types.ClientOptions;
+import com.google.genai.types.ProxyOptions;
+import com.google.genai.types.ProxyType;
+
+ClientOptions clientOptions =
+ ClientOptions.builder()
+ .proxyOptions(
+ ProxyOptions.builder()
+ .type(ProxyType.Known.HTTP)
+ .host("your-proxy-host")
+ .port(8080)
+ .username("your-proxy-username")
+ .password("your-proxy-password"))
+ .build();
+Client client = Client.builder().apiKey("your-api-key").clientOptions(clientOptions).build();
+```
+
+If `ProxyOptions` is provided with `type` set to `DIRECT`, it will enforce a
+direct connection, bypassing any system-level proxy settings.
+
### Interact with models
-The Gen AI Java SDK allows you to access the service programmatically.
+The Google Gen AI Java SDK allows you to access the service programmatically.
The following code snippets are some basic usages of model inferencing.
#### Generate Content
-Use `generateContent` method for the most basic text generation.
+Use `generateContent` method for the most basic content generation.
##### with text input
@@ -106,10 +256,17 @@ public class GenerateContentWithTextInput {
Client client = new Client();
GenerateContentResponse response =
- client.models.generateContent("gemini-2.0-flash-001", "What is your name?", null);
+ client.models.generateContent("gemini-2.5-flash", "What is your name?", null);
// Gets the text string from the response by the quick accessor method `text()`.
System.out.println("Unary response: " + response.text());
+
+ // Gets the http headers from the response.
+ response
+ .sdkHttpResponse()
+ .ifPresent(
+ httpResponse ->
+ System.out.println("Response headers: " + httpResponse.headers().orElse(null)));
}
}
```
@@ -139,7 +296,68 @@ public class GenerateContentWithImageInput {
Part.fromUri("gs://path/to/image.jpg", "image/jpeg"));
GenerateContentResponse response =
- client.models.generateContent("gemini-2.0-flash-001", content, null);
+ client.models.generateContent("gemini-2.5-flash", content, null);
+
+ System.out.println("Response: " + response.text());
+ }
+}
+```
+
+##### Generate Content with extra configs
+To set configurations like System Instructions and Safety Settings, you can pass
+a `GenerateContentConfig` to the `GenerateContent` method.
+
+```java
+package ;
+
+import com.google.common.collect.ImmutableList;
+import com.google.genai.Client;
+import com.google.genai.types.Content;
+import com.google.genai.types.GenerateContentConfig;
+import com.google.genai.types.GenerateContentResponse;
+import com.google.genai.types.GoogleSearch;
+import com.google.genai.types.HarmBlockThreshold;
+import com.google.genai.types.HarmCategory;
+import com.google.genai.types.Part;
+import com.google.genai.types.SafetySetting;
+import com.google.genai.types.ThinkingConfig;
+import com.google.genai.types.Tool;
+
+public class GenerateContentWithConfigs {
+ public static void main(String[] args) {
+ Client client = new Client();
+
+ // Sets the safety settings in the config.
+ ImmutableList safetySettings =
+ ImmutableList.of(
+ SafetySetting.builder()
+ .category(HarmCategory.Known.HARM_CATEGORY_HATE_SPEECH)
+ .threshold(HarmBlockThreshold.Known.BLOCK_ONLY_HIGH)
+ .build(),
+ SafetySetting.builder()
+ .category(HarmCategory.Known.HARM_CATEGORY_DANGEROUS_CONTENT)
+ .threshold(HarmBlockThreshold.Known.BLOCK_LOW_AND_ABOVE)
+ .build());
+
+ // Sets the system instruction in the config.
+ Content systemInstruction = Content.fromParts(Part.fromText("You are a history teacher."));
+
+ // Sets the Google Search tool in the config.
+ Tool googleSearchTool = Tool.builder().googleSearch(GoogleSearch.builder()).build();
+
+ GenerateContentConfig config =
+ GenerateContentConfig.builder()
+ // Sets the thinking budget to 0 to disable thinking mode
+ .thinkingConfig(ThinkingConfig.builder().thinkingBudget(0))
+ .candidateCount(1)
+ .maxOutputTokens(1024)
+ .safetySettings(safetySettings)
+ .systemInstruction(systemInstruction)
+ .tools(googleSearchTool)
+ .build();
+
+ GenerateContentResponse response =
+ client.models.generateContent("gemini-2.5-flash", "Tell me the history of LLM", config);
System.out.println("Response: " + response.text());
}
@@ -188,20 +406,20 @@ public class GenerateContentWithFunctionCall {
public static void main(String[] args) throws NoSuchMethodException {
Client client = new Client();
+ // Load the method as a reflected Method object so that it can be
+ // automatically executed on the client side.
Method method =
GenerateContentWithFunctionCall.class.getMethod(
"getCurrentWeather", String.class, String.class);
GenerateContentConfig config =
GenerateContentConfig.builder()
- .tools(
- ImmutableList.of(
- Tool.builder().functions(ImmutableList.of(method)).build()))
+ .tools(Tool.builder().functions(method))
.build();
GenerateContentResponse response =
client.models.generateContent(
- "gemini-2.0-flash-001",
+ "gemini-2.5-flash",
"What is the weather in Vancouver?",
config);
@@ -213,7 +431,7 @@ public class GenerateContentWithFunctionCall {
}
```
-#### Stream Generated Content
+##### Stream Generated Content
To get a streamed response, you can use the `generateContentStream` method:
```java
@@ -225,13 +443,11 @@ import com.google.genai.types.GenerateContentResponse;
public class StreamGeneration {
public static void main(String[] args) {
- // Instantiate the client using Vertex API. The client gets the project and location from the
- // environment variables `GOOGLE_CLOUD_PROJECT` and `GOOGLE_CLOUD_LOCATION`.
- Client client = Client.builder().vertexAI(true).build();
+ Client client = new Client();
ResponseStream responseStream =
client.models.generateContentStream(
- "gemini-2.0-flash-001", "Tell me a story in 300 words.", null);
+ "gemini-2.5-flash", "Tell me a story in 300 words.", null);
System.out.println("Streaming response: ");
for (GenerateContentResponse res : responseStream) {
@@ -245,7 +461,7 @@ public class StreamGeneration {
}
```
-#### Async Generate Content
+##### Async Generate Content
To get a response asynchronously, you can use the `generateContent` method from
the `client.async.models` namespace.
@@ -258,12 +474,11 @@ import java.util.concurrent.CompletableFuture;
public class GenerateContentAsync {
public static void main(String[] args) {
- // Instantiates the client using Gemini API, and sets the API key in the builder.
- Client client = Client.builder().apiKey("your-api-key").build();
+ Client client = new Client();
CompletableFuture responseFuture =
client.async.models.generateContent(
- "gemini-2.0-flash-001", "Introduce Google AI Studio.", null);
+ "gemini-2.5-flash", "Introduce Google AI Studio.", null);
responseFuture
.thenAccept(
@@ -275,104 +490,489 @@ public class GenerateContentAsync {
}
```
-#### Generate Content with extra configs
-To set configurations like System Instructions and Safety Settings, you can pass
-a `GenerateContentConfig` to the `GenerateContent` method.
+##### Generate Content with JSON response schema
+To get a response in JSON by passing in a response schema to the
+`GenerateContent` API.
```java
package ;
import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
import com.google.genai.Client;
-import com.google.genai.types.Content;
import com.google.genai.types.GenerateContentConfig;
import com.google.genai.types.GenerateContentResponse;
-import com.google.genai.types.GoogleSearch;
-import com.google.genai.types.HarmBlockThreshold;
-import com.google.genai.types.HarmCategory;
-import com.google.genai.types.Part;
-import com.google.genai.types.SafetySetting;
-import com.google.genai.types.Tool;
+import com.google.genai.types.Schema;
+import com.google.genai.types.Type;
-public class GenerateContentWithConfigs {
+public class GenerateContentWithSchema {
public static void main(String[] args) {
Client client = new Client();
- // Sets the safety settings in the config.
- ImmutableList safetySettings =
- ImmutableList.of(
- SafetySetting.builder()
- .category(HarmCategory.Known.HARM_CATEGORY_HATE_SPEECH)
- .threshold(HarmBlockThreshold.Known.BLOCK_ONLY_HIGH)
- .build(),
- SafetySetting.builder()
- .category(HarmCategory.Known.HARM_CATEGORY_DANGEROUS_CONTENT)
- .threshold(HarmBlockThreshold.Known.BLOCK_LOW_AND_ABOVE)
- .build());
-
- // Sets the system instruction in the config.
- Content systemInstruction = Content.fromParts(Part.fromText("You are a history teacher."));
-
- // Sets the Google Search tool in the config.
- Tool googleSearchTool = Tool.builder().googleSearch(GoogleSearch.builder().build()).build();
-
+ // Define the schema for the response, in Json format.
+ ImmutableMap schema = ImmutableMap.of(
+ "type", "object",
+ "properties", ImmutableMap.of(
+ "recipe_name", ImmutableMap.of("type", "string"),
+ "ingredients", ImmutableMap.of(
+ "type", "array",
+ "items", ImmutableMap.of("type", "string")
+ )
+ ),
+ "required", ImmutableList.of("recipe_name", "ingredients")
+ );
+
+ // Set the response schema in GenerateContentConfig
GenerateContentConfig config =
GenerateContentConfig.builder()
+ .responseMimeType("application/json")
.candidateCount(1)
- .maxOutputTokens(1024)
- .safetySettings(safetySettings)
- .systemInstruction(systemInstruction)
- .tools(ImmutableList.of(googleSearchTool))
+ .responseSchema(schema)
.build();
GenerateContentResponse response =
- client.models.generateContent("gemini-2.0-flash-001", "Tell me the history of LLM", config);
+ client.models.generateContent("gemini-2.5-flash", "Tell me your name", config);
System.out.println("Response: " + response.text());
}
}
```
-#### Generate Content with JSON response schema
-To get a response in JSON by passing in a response schema to the
-`GenerateContent` API.
+#### Count Tokens and Compute Tokens
+
+The `countTokens` method allows you to calculate the number of tokens your
+prompt will use before sending it to the model, helping you manage costs and
+stay within the context window.
```java
package ;
-import com.google.common.collect.ImmutableMap;
import com.google.genai.Client;
-import com.google.genai.types.GenerateContentConfig;
-import com.google.genai.types.GenerateContentResponse;
-import com.google.genai.types.Schema;
-import com.google.genai.types.Type;
+import com.google.genai.types.CountTokensResponse;
-public class GenerateContentWithSchema {
+public class CountTokens {
public static void main(String[] args) {
Client client = new Client();
- Schema schema =
- Schema.builder()
- .type("object")
- .properties(
- ImmutableMap.of(
- "name", Schema.builder().type(Type.Known.STRING).description("Your Name").build()))
+ CountTokensResponse response =
+ client.models.countTokens("gemini-2.5-flash", "What is your name?", null);
+
+ System.out.println("Count tokens response: " + response);
+ }
+}
+```
+
+The `computeTokens` method returns the Tokens Info that contains tokens and
+token IDs given your prompt. This method is only supported in Vertex AI.
+
+```java
+package ;
+
+import com.google.genai.Client;
+import com.google.genai.types.ComputeTokensResponse;
+
+public class ComputeTokens {
+ public static void main(String[] args) {
+ Client client = Client.builder().vertexAI(true).build();
+
+ ComputeTokensResponse response =
+ client.models.computeTokens("gemini-2.5-flash", "What is your name?", null);
+
+ System.out.println("Compute tokens response: " + response);
+ }
+}
+```
+
+#### Embed Content
+
+The `embedContent` method allows you to generate embeddings for words, phrases,
+sentences, and code, as well as multimodal content like images or videos via Vertex AI.
+
+```java
+package ;
+
+import com.google.genai.Client;
+import com.google.genai.types.EmbedContentConfig;
+import com.google.genai.types.EmbedContentResponse;
+
+public class EmbedContent {
+ public static void main(String[] args) {
+ Client client = new Client();
+
+ EmbedContentResponse response =
+ client.models.embedContent("gemini-embedding-001", "why is the sky blue?", null);
+
+ System.out.println("Embedding response: " + response);
+
+ // Multimodal embedding with Vertex AI
+ Client vertexClient = Client.builder().vertexAI(true).build();
+ EmbedContentConfig config =
+ EmbedContentConfig.builder()
+ .outputDimensionality(10)
+ .title("test_title")
+ .taskType("RETRIEVAL_DOCUMENT")
.build();
- GenerateContentConfig config =
- GenerateContentConfig.builder()
- .responseMimeType("application/json")
- .candidateCount(1)
- .responseSchema(schema)
+
+ EmbedContentResponse mmResponse =
+ vertexClient.models.embedContent(
+ "gemini-embedding-2-exp-11-2025",
+ Content.fromParts(
+ Part.fromText("Hello"),
+ Part.fromUri("gs://cloud-samples-data/generative-ai/image/a-man-and-a-dog.png", "image/png")),
+ config);
+ System.out.println("Multimodal embedding response: " + mmResponse);
+ }
+}
+```
+
+### Imagen
+
+Imagen is a text-to-image GenAI service.
+
+#### Generate Images
+
+The `generateImages` method helps you create high-quality, unique images given a
+text prompt.
+
+```java
+package ;
+
+import com.google.genai.Client;
+import com.google.genai.types.GenerateImagesConfig;
+import com.google.genai.types.GenerateImagesResponse;
+import com.google.genai.types.Image;
+
+public class GenerateImages {
+ public static void main(String[] args) {
+ Client client = new Client();
+
+ GenerateImagesConfig config =
+ GenerateImagesConfig.builder()
+ .numberOfImages(1)
+ .outputMimeType("image/jpeg")
+ .includeSafetyAttributes(true)
.build();
- GenerateContentResponse response =
- client.models.generateContent("gemini-2.0-flash-001", "Tell me your name", config);
+ GenerateImagesResponse response =
+ client.models.generateImages(
+ "imagen-3.0-generate-002", "Robot holding a red skateboard", config);
- System.out.println("Response: " + response.text());
+ if (generatedImagesResponse.images().isEmpty()) {
+ System.out.println("Unable to generate images.");
+ }
+ System.out.println("Generated " + generatedImagesResponse.images().size() + " images.");
+ Image generatedImage = generatedImagesResponse.images().get(0);
}
}
```
+#### Upscale Image
+
+The `upscaleImage` method allows you to upscale an image. This feature is only
+supported in Vertex AI.
+
+```java
+package ;
+
+import com.google.genai.Client;
+import com.google.genai.types.Image;
+import com.google.genai.types.UpscaleImageConfig;
+import com.google.genai.types.UpscaleImageResponse;
+
+public class UpscaleImage {
+ public static void main(String[] args) {
+ Client client = Client.builder().vertexAI(true).build();
+
+ Image image = Image.fromFile("path/to/your/image");
+
+ UpscaleImageConfig config =
+ UpscaleImageConfig.builder()
+ .outputMimeType("image/jpeg")
+ .enhanceInputImage(true)
+ .imagePreservationFactor(0.6f)
+ .build();
+
+ UpscaleImageResponse response =
+ client.models.upscaleImage("imagen-3.0-generate-002", image, "x2", config);
+
+ response.generatedImages().ifPresent(
+ images -> {
+ Image upscaledImage = images.get(0).image().orElse(null);
+ // Do something with the upscaled image.
+ }
+ );
+ }
+}
+```
+
+#### Edit Image
+
+The `editImage` method lets you edit an image. You can input reference images
+(ex. mask reference for inpainting, or style reference for style transfer) in
+addition to a text prompt to guide the editing.
+
+This feature uses a different model than `generateImages` and `upscaleImage`. It
+is only supported in Vertex AI.
+
+```java
+package ;
+
+import com.google.genai.Client;
+import com.google.genai.types.EditImageConfig;
+import com.google.genai.types.EditImageResponse;
+import com.google.genai.types.EditMode;
+import com.google.genai.types.Image;
+import com.google.genai.types.MaskReferenceConfig;
+import com.google.genai.types.MaskReferenceImage;
+import com.google.genai.types.MaskReferenceMode;
+import com.google.genai.types.RawReferenceImage;
+import com.google.genai.types.ReferenceImage;
+import java.util.ArrayList;
+
+public class EditImage {
+ public static void main(String[] args) {
+ Client client = Client.builder().vertexAI(true).build();
+
+ Image image = Image.fromFile("path/to/your/image");
+
+ // Edit image with a mask.
+ EditImageConfig config =
+ EditImageConfig.builder()
+ .editMode(EditMode.Known.EDIT_MODE_INPAINT_INSERTION)
+ .numberOfImages(1)
+ .outputMimeType("image/jpeg")
+ .build();
+
+ ArrayList referenceImages = new ArrayList<>();
+ RawReferenceImage rawReferenceImage =
+ RawReferenceImage.builder().referenceImage(image).referenceId(1).build();
+ referenceImages.add(rawReferenceImage);
+
+ MaskReferenceImage maskReferenceImage =
+ MaskReferenceImage.builder()
+ .referenceId(2)
+ .config(
+ MaskReferenceConfig.builder()
+ .maskMode(MaskReferenceMode.Known.MASK_MODE_BACKGROUND)
+ .maskDilation(0.0f))
+ .build();
+ referenceImages.add(maskReferenceImage);
+
+ EditImageResponse response =
+ client.models.editImage(
+ "imagen-3.0-capability-001", "Sunlight and clear sky", referenceImages, config);
+
+ response.generatedImages().ifPresent(
+ images -> {
+ Image editedImage = images.get(0).image().orElse(null);
+ // Do something with the edited image.
+ }
+ );
+ }
+}
+```
+
+### Veo
+
+Veo is a video generation GenAI service.
+
+#### Generate Videos (Text to Video)
+
+```java
+package ;
+
+import com.google.genai.Client;
+import com.google.genai.types.GenerateVideosConfig;
+import com.google.genai.types.GenerateVideosOperation;
+import com.google.genai.types.Video;
+
+public class GenerateVideosWithText {
+ public static void main(String[] args) {
+ Client client = new Client();
+
+ GenerateVideosConfig config =
+ GenerateVideosConfig.builder()
+ .numberOfVideos(1)
+ .enhancePrompt(true)
+ .durationSeconds(5)
+ .build();
+
+ // generateVideos returns an operation
+ GenerateVideosOperation operation =
+ client.models.generateVideos(
+ "veo-2.0-generate-001", "A neon hologram of a cat driving at top speed", null, config);
+
+ // When the operation hasn't been finished, operation.done() is empty
+ while (!operation.done().isPresent()) {
+ try {
+ System.out.println("Waiting for operation to complete...");
+ Thread.sleep(10000);
+ // Sleep for 10 seconds and check the operation again
+ operation = client.operations.getVideosOperation(operation, null);
+ } catch (InterruptedException e) {
+ System.out.println("Thread was interrupted while sleeping.");
+ Thread.currentThread().interrupt();
+ }
+ }
+
+ operation.response().ifPresent(
+ response -> {
+ response.generatedVideos().ifPresent(
+ videos -> {
+ System.out.println("Generated " + videos.size() + " videos.");
+ Video video = videos.get(0).video().orElse(null);
+ // Do something with the generated video
+ }
+ );
+ }
+ );
+ }
+}
+```
+
+#### Generate Videos (Image to Video)
+
+```java
+package ;
+
+import com.google.genai.Client;
+import com.google.genai.types.GenerateVideosConfig;
+import com.google.genai.types.GenerateVideosOperation;
+import com.google.genai.types.Image;
+import com.google.genai.types.Video;
+
+public class GenerateVideosWithImage {
+ public static void main(String[] args) {
+ Client client = new Client();
+
+ Image image = Image.fromFile("path/to/your/image");
+
+ GenerateVideosConfig config =
+ GenerateVideosConfig.builder()
+ .numberOfVideos(1)
+ .enhancePrompt(true)
+ .durationSeconds(5)
+ .build();
+
+ // generateVideos returns an operation
+ GenerateVideosOperation operation =
+ client.models.generateVideos(
+ "veo-2.0-generate-001",
+ "Night sky",
+ image,
+ config);
+
+ // When the operation hasn't been finished, operation.done() is empty
+ while (!operation.done().isPresent()) {
+ try {
+ System.out.println("Waiting for operation to complete...");
+ Thread.sleep(10000);
+ // Sleep for 10 seconds and check the operation again
+ operation = client.operations.getVideosOperation(operation, null);
+ } catch (InterruptedException e) {
+ System.out.println("Thread was interrupted while sleeping.");
+ Thread.currentThread().interrupt();
+ }
+ }
+
+ operation.response().ifPresent(
+ response -> {
+ response.generatedVideos().ifPresent(
+ videos -> {
+ System.out.println("Generated " + videos.size() + " videos.");
+ Video video = videos.get(0).video().orElse(null);
+ // Do something with the generated video
+ }
+ );
+ }
+ );
+ }
+}
+```
+
+### Files API
+
+Gemini models support various input data types, including text, images, and
+audio. The Files API allows you to upload and manage these media files for use
+with Gemini models. **This feature is exclusively supported by the Gemini API**.
+
+#### Usage info
+You can use the Files API to upload and interact with media files. The Files API
+lets you store up to 20 GB of files per project, with a per-file maximum size of
+2 GB. Files are stored for 48 hours. During that time, you can use the API to
+get metadata about the files, but you can't download the files. The Files API is
+available at no cost in all regions where the Gemini API is available.
+
+The basic operations are:
+
+1. **Upload**: You can use the Files API to upload a media file. Always use
+the Files API when the total request size (including the files, text prompt,
+system instructions, etc.) is larger than 20 MB.
+
+2. **Get**: You can verify that the API successfully stored the uploaded file
+and get its metadata.
+
+3. **List**: You can upload multiple files using the Files API. The following
+code gets a list of all the files uploaded.
+
+4. **Delete**: Files are automatically deleted after 48 hours. You can also
+manually delete an uploaded file:
+
+#### Sample usage
+
+```java
+package ;
+
+import com.google.genai.Client;
+import com.google.genai.errors.GenAiIOException;
+import com.google.genai.types.Content;
+import com.google.genai.types.DeleteFileResponse;
+import com.google.genai.types.File;
+import com.google.genai.types.GenerateContentResponse;
+import com.google.genai.types.ListFilesConfig;
+import com.google.genai.types.Part;
+import com.google.genai.types.UploadFileConfig;
+
+/** An example of how to use the Files module to upload, retrieve, list, and delete files. */
+public final class FileOperations {
+ public static void main(String[] args) {
+ Client client = new Client();
+
+ // Upload a file to the API.
+ try {
+ File file =
+ client.files.upload(
+ "path/to/your/file.pdf",
+ UploadFileConfig.builder().mimeType("application/pdf").build());
+
+ // Use the uploaded file in the generateContent
+ Content content =
+ Content.fromParts(
+ Part.fromText("Summary this pdf."),
+ Part.fromUri(file.name().get(), file.mimeType().get()));
+ GenerateContentResponse response =
+ client.models.generateContent("gemini-2.5-flash", content, null);
+
+ // Get the uploaded file.
+ File retrievedFile = client.files.get(file.name().get(), null);
+
+ // List all files.
+ System.out.println("List files: ");
+ for (File f : client.files.list(ListFilesConfig.builder().pageSize(10).build())) {
+ System.out.println("File name: " + f.name().get());
+ }
+
+ // Delete the uploaded file.
+ client.files.delete(file.name().get(), null);
+
+ } catch (GenAiIOException e) {
+ System.out.println("An error occurred while uploading the file: " + e.getMessage());
+ }
+ }
+}
+```
+
+
## Versioning
This library follows [Semantic Versioning](http://semver.org/).
diff --git a/examples/pom.xml b/examples/pom.xml
index 95ba06f36db..18b400fef78 100644
--- a/examples/pom.xml
+++ b/examples/pom.xml
@@ -5,7 +5,7 @@
com.google.genai.examplesgoogle-genai-examples
- 1.6.0-SNAPSHOT
+ 1.54.0-SNAPSHOTgoogle-genai-examples
@@ -13,11 +13,10 @@
1.81.8
- 1.6.0-SNAPSHOT
+ 1.54.0-SNAPSHOT
- org.apache.maven.plugins
@@ -29,8 +28,39 @@
+
+ org.codehaus.mojo
+ exec-maven-plugin
+ 3.5.0
+
+ false
+
+
+
+ org.graalvm.buildtools
+ native-maven-plugin
+ 0.11.1
+ true
+
+
+
+ build
+ test
+
+ package
+
+
+
+
+ -Ob
+ --no-fallback
+ --verbose
+
+ com.google.genai.examples.GenerateContent
+ genai-example-app
+
+
-
@@ -39,5 +69,15 @@
google-genai${google-genai.version}
+
+ org.slf4j
+ jul-to-slf4j
+ 2.0.17
+
+
+ org.slf4j
+ slf4j-simple
+ 2.0.17
+
diff --git a/examples/src/main/java/com/google/genai/examples/BatchInlinedRequests.java b/examples/src/main/java/com/google/genai/examples/BatchInlinedRequests.java
new file mode 100644
index 00000000000..7e2eee4f361
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/BatchInlinedRequests.java
@@ -0,0 +1,112 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.BatchInlinedRequests"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.common.collect.ImmutableList;
+import com.google.genai.Client;
+import com.google.genai.types.BatchJob;
+import com.google.genai.types.BatchJobSource;
+import com.google.genai.types.Content;
+import com.google.genai.types.CreateBatchJobConfig;
+import com.google.genai.types.GenerateContentConfig;
+import com.google.genai.types.InlinedRequest;
+import com.google.genai.types.Part;
+
+/** An example of creating a batch job with inlined requests. */
+public final class BatchInlinedRequests {
+
+ public static void main(String[] args) {
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Inlined requests are not supported for Vertex AI backend.");
+ return;
+ } else {
+ System.out.println("Calling GeminiAPI Backend...");
+ }
+
+ InlinedRequest request1 =
+ InlinedRequest.builder()
+ .contents(Content.builder().parts(Part.fromText("Tell me a one-sentence joke.")))
+ .config(
+ GenerateContentConfig.builder()
+ .systemInstruction(
+ Content.builder()
+ .parts(
+ Part.fromText(
+ "You are a funny comedian. Always respond with humor and"
+ + " wit.")))
+ .temperature(0.5f))
+ .build();
+
+ InlinedRequest request2 =
+ InlinedRequest.builder()
+ .contents(Content.builder().parts(Part.fromText("Why is the sky blue?")))
+ .config(
+ GenerateContentConfig.builder()
+ .systemInstruction(
+ Content.builder()
+ .parts(
+ Part.fromText(
+ "You are a helpful science teacher. Explain complex concepts in"
+ + " simple terms.")))
+ .temperature(0.5f))
+ .build();
+
+ BatchJobSource batchJobSource =
+ BatchJobSource.builder().inlinedRequests(ImmutableList.of(request1, request2)).build();
+
+ CreateBatchJobConfig config =
+ CreateBatchJobConfig.builder().displayName("inlined-requests-job-1").build();
+
+ BatchJob batchJob =
+ client.batches.create(Constants.GEMINI_MODEL_NAME, batchJobSource, config);
+
+ System.out.println("Created batch job: " + batchJob.name().get());
+ }
+}
diff --git a/examples/src/main/java/com/google/genai/examples/BatchManagement.java b/examples/src/main/java/com/google/genai/examples/BatchManagement.java
new file mode 100644
index 00000000000..f200a1ee6d6
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/BatchManagement.java
@@ -0,0 +1,132 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.BatchManagement"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.types.BatchJob;
+import com.google.genai.types.BatchJobDestination;
+import com.google.genai.types.BatchJobSource;
+import com.google.genai.types.Content;
+import com.google.genai.types.CreateBatchJobConfig;
+import com.google.genai.types.InlinedRequest;
+import com.google.genai.types.ListBatchJobsConfig;
+import com.google.genai.types.Part;
+
+/** An example of using the Unified Gen AI Java SDK to do operations on batch jobs. */
+public final class BatchManagement {
+
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ // Create a batch job.
+ BatchJobSource batchJobSource =
+ BatchJobSource.builder()
+ .gcsUri("gs://unified-genai-tests/batches/input/generate_content_requests.jsonl")
+ .format("jsonl")
+ .build();
+ CreateBatchJobConfig config =
+ CreateBatchJobConfig.builder()
+ .displayName("summarize the pdf")
+ .dest(
+ BatchJobDestination.builder()
+ .gcsUri("gs://unified-genai-tests/batches/output")
+ .format("jsonl"))
+ .build();
+ BatchJob batchJob1 = client.batches.create(modelId, batchJobSource, config);
+ System.out.println("Created batch job: " + batchJob1);
+ // Get the batch job by name.
+ BatchJob batchJob2 = client.batches.get(batchJob1.name().get(), null);
+ System.out.println("Get batch job: " + batchJob2);
+ // Cancel the batch job.
+ client.batches.cancel(batchJob1.name().get(), null);
+ System.out.println("Cancelled batch job: " + batchJob1.name().get());
+ } else {
+ System.out.println("Using Gemini Developer API");
+ // Create a batch job.
+ BatchJobSource batchJobSource =
+ BatchJobSource.builder()
+ .inlinedRequests(
+ InlinedRequest.builder()
+ .contents(Content.builder().parts(Part.fromText("Hello!"))))
+ .build();
+ CreateBatchJobConfig config =
+ CreateBatchJobConfig.builder().displayName("test-batch-job-java").build();
+ BatchJob batchJob1 = client.batches.create(modelId, batchJobSource, config);
+ System.out.println("Created batch job: " + batchJob1);
+ // Get the batch job by name.
+ BatchJob batchJob2 = client.batches.get(batchJob1.name().get(), null);
+ System.out.println("Get batch job: " + batchJob2);
+ // Cancel the batch job.
+ client.batches.cancel(batchJob1.name().get(), null);
+ System.out.println("Cancelled batch job: " + batchJob1.name().get());
+ }
+
+ // List all batch jobs.
+ System.out.println("List batch jobs resource names: ");
+ for (BatchJob b :
+ client.batches.list(ListBatchJobsConfig.builder().pageSize(5).build()).page()) {
+ System.out.println(b.name().get());
+ System.out.println(b.state().get());
+ }
+
+ }
+
+ private BatchManagement() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/BatchManagementAsync.java b/examples/src/main/java/com/google/genai/examples/BatchManagementAsync.java
new file mode 100644
index 00000000000..df00b9b3d15
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/BatchManagementAsync.java
@@ -0,0 +1,182 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.BatchManagementAsync"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.AsyncPager;
+import com.google.genai.Client;
+import com.google.genai.types.BatchJob;
+import com.google.genai.types.BatchJobDestination;
+import com.google.genai.types.BatchJobSource;
+import com.google.genai.types.Content;
+import com.google.genai.types.CreateBatchJobConfig;
+import com.google.genai.types.InlinedRequest;
+import com.google.genai.types.ListBatchJobsConfig;
+import com.google.genai.types.Part;
+import java.util.concurrent.CompletableFuture;
+
+/** An example of using the Unified Gen AI Java SDK to do async operations on batch jobs. */
+public final class BatchManagementAsync {
+
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ // Create a batch job.
+ BatchJobSource batchJobSource =
+ BatchJobSource.builder()
+ .bigqueryUri(
+ "bq://vertex-sdk-dev.unified_genai_tests_batches.generate_content_requests")
+ .format("bigquery")
+ .build();
+ CreateBatchJobConfig config =
+ CreateBatchJobConfig.builder()
+ .displayName("test batch")
+ .dest(
+ BatchJobDestination.builder()
+ .bigqueryUri(
+ "bq://vertex-sdk-dev.unified_genai_tests_batches.generate_content_output")
+ .format("bigquery"))
+ .build();
+ CompletableFuture finalFuture =
+ client
+ .async
+ .batches
+ .create(modelId, batchJobSource, config)
+ .thenCompose(
+ batchJob1 -> {
+ System.out.println("Created batch job: " + batchJob1.name());
+ CompletableFuture batchJob2Future =
+ client.async.batches.get(batchJob1.name().get(), null);
+
+ return batchJob2Future.thenCompose(
+ batchJob2 -> {
+ System.out.println("Get batch job: " + batchJob2.name());
+ // Cancel the batch job. This is now nested, so it happens after get
+ // completes.
+ return client
+ .async
+ .batches
+ .cancel(batchJob1.name().get(), null)
+ .thenAccept(
+ cancelResponse -> {
+ System.out.println(
+ "Successfully initiated cancellation for batch job: "
+ + batchJob1.name());
+ });
+ });
+ });
+ finalFuture.join();
+ System.out.println("All batch job operations completed.");
+ } else {
+ System.out.println("Using Gemini Developer API");
+ // Create a batch job.
+ BatchJobSource batchJobSource =
+ BatchJobSource.builder()
+ .inlinedRequests(
+ InlinedRequest.builder()
+ .contents(Content.builder().parts(Part.fromText("Hello!"))))
+ .build();
+ CreateBatchJobConfig config =
+ CreateBatchJobConfig.builder().displayName("test-batch-job-java").build();
+ CompletableFuture finalFuture =
+ client
+ .async
+ .batches
+ .create(modelId, batchJobSource, config)
+ .thenCompose(
+ batchJob1 -> {
+ System.out.println("Created batch job: " + batchJob1.name());
+ CompletableFuture batchJob2Future =
+ client.async.batches.get(batchJob1.name().get(), null);
+
+ return batchJob2Future.thenCompose(
+ batchJob2 -> {
+ System.out.println("Get batch job: " + batchJob2.name());
+ // Cancel the batch job. This is now nested, so it happens after get
+ // completes.
+ return client
+ .async
+ .batches
+ .cancel(batchJob1.name().get(), null)
+ .thenAccept(
+ cancelResponse -> {
+ System.out.println(
+ "Successfully initiated cancellation for batch job: "
+ + batchJob1.name());
+ });
+ });
+ });
+ finalFuture.join();
+ System.out.println("All batch job operations completed.");
+ }
+ // List all batch jobs.
+ CompletableFuture> asyncPagerFuture =
+ client.async.batches.list(ListBatchJobsConfig.builder().pageSize(10).build());
+ asyncPagerFuture
+ .thenCompose(
+ asyncPager -> {
+ System.out.println("List all batch job names: ");
+ return asyncPager.forEach(
+ item -> System.out.println("Batch job name: " + item.name().get()));
+ })
+ .join();
+ }
+
+ private BatchManagementAsync() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/CachedContentOperations.java b/examples/src/main/java/com/google/genai/examples/CachedContentOperations.java
index bcd6d30b997..ad37e1ffe31 100644
--- a/examples/src/main/java/com/google/genai/examples/CachedContentOperations.java
+++ b/examples/src/main/java/com/google/genai/examples/CachedContentOperations.java
@@ -50,7 +50,6 @@
import com.google.genai.types.ListCachedContentsConfig;
import com.google.genai.types.Part;
import com.google.genai.types.UpdateCachedContentConfig;
-
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
@@ -63,9 +62,11 @@
public final class CachedContentOperations {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/CachedContentOperationsAsync.java b/examples/src/main/java/com/google/genai/examples/CachedContentOperationsAsync.java
index 223c927a64a..62f16eda543 100644
--- a/examples/src/main/java/com/google/genai/examples/CachedContentOperationsAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/CachedContentOperationsAsync.java
@@ -66,11 +66,11 @@
public final class CachedContentOperationsAsync {
public static void main(String[] args) {
- String modelId;
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
} else {
- modelId = "gemini-2.0-flash-001";
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/ChatWithFunctionCall.java b/examples/src/main/java/com/google/genai/examples/ChatWithFunctionCall.java
index 584a4c1e76a..618d6acb360 100644
--- a/examples/src/main/java/com/google/genai/examples/ChatWithFunctionCall.java
+++ b/examples/src/main/java/com/google/genai/examples/ChatWithFunctionCall.java
@@ -60,9 +60,11 @@ public static Integer divideTwoIntegers(int numerator, int denominator) {
}
public static void main(String[] args) throws NoSuchMethodException {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/ChatWithHistory.java b/examples/src/main/java/com/google/genai/examples/ChatWithHistory.java
index 83a2b4b8af0..f79b0a38db6 100644
--- a/examples/src/main/java/com/google/genai/examples/ChatWithHistory.java
+++ b/examples/src/main/java/com/google/genai/examples/ChatWithHistory.java
@@ -49,9 +49,11 @@
/** An example of using the Unified Gen AI Java SDK to create a chat session with history. */
public final class ChatWithHistory {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/ChatWithHistoryAsync.java b/examples/src/main/java/com/google/genai/examples/ChatWithHistoryAsync.java
index aa9da312e55..24c5b17a71a 100644
--- a/examples/src/main/java/com/google/genai/examples/ChatWithHistoryAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/ChatWithHistoryAsync.java
@@ -50,9 +50,11 @@
/** An example of using the Unified Gen AI Java SDK to create an async chat session with history. */
public final class ChatWithHistoryAsync {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/ChatWithHistoryAsyncStreaming.java b/examples/src/main/java/com/google/genai/examples/ChatWithHistoryAsyncStreaming.java
index 89112f774fb..427c3840b80 100644
--- a/examples/src/main/java/com/google/genai/examples/ChatWithHistoryAsyncStreaming.java
+++ b/examples/src/main/java/com/google/genai/examples/ChatWithHistoryAsyncStreaming.java
@@ -54,9 +54,11 @@
*/
public final class ChatWithHistoryAsyncStreaming {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/ChatWithHistoryAsyncStreamingFunctionCall.java b/examples/src/main/java/com/google/genai/examples/ChatWithHistoryAsyncStreamingFunctionCall.java
new file mode 100644
index 00000000000..489051775c6
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/ChatWithHistoryAsyncStreamingFunctionCall.java
@@ -0,0 +1,164 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java
+ * -Dexec.mainClass="com.google.genai.examples.ChatWithHistoryAsyncStreamingFunctionCall"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.genai.AsyncChat;
+import com.google.genai.Client;
+import com.google.genai.ResponseStream;
+import com.google.genai.types.Content;
+import com.google.genai.types.FunctionCallingConfig;
+import com.google.genai.types.FunctionResponse;
+import com.google.genai.types.GenerateContentConfig;
+import com.google.genai.types.GenerateContentResponse;
+import com.google.genai.types.Part;
+import com.google.genai.types.Tool;
+import com.google.genai.types.ToolConfig;
+import java.lang.reflect.Method;
+import java.util.concurrent.CompletableFuture;
+
+/**
+ * An example of using the Unified Gen AI Java SDK to create an async streaming chat session with
+ * history.
+ */
+public final class ChatWithHistoryAsyncStreamingFunctionCall {
+ /** A callable function to get the current weather. */
+ public static String getCurrentWeather(String location) {
+ return "The weather in " + location + " is " + "very nice.";
+ }
+
+ public static void main(String[] args) throws NoSuchMethodException {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_3_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Gemini Developer API does not support streaming function calling.");
+ return;
+ }
+
+ // Add the methods as callable functions to the tool.
+ Method method1 =
+ ChatWithHistoryAsyncStreamingFunctionCall.class.getDeclaredMethod(
+ "getCurrentWeather", String.class);
+
+ GenerateContentConfig config =
+ GenerateContentConfig.builder()
+ .tools(Tool.builder().functions(method1))
+ .toolConfig(
+ ToolConfig.builder()
+ .functionCallingConfig(
+ FunctionCallingConfig.builder().streamFunctionCallArguments(true)))
+ .build();
+
+ AsyncChat chatSession = client.async.chats.create(modelId, config);
+
+ CompletableFuture> chatResponseFuture =
+ chatSession.sendMessageStream("what is the weather in San Francisco?", null);
+
+ chatResponseFuture
+ .thenAccept(
+ responseStream -> {
+ System.out.println("Streaming response:");
+ // Iterate over the stream and print each response as it arrives.
+ for (GenerateContentResponse response : responseStream) {
+ System.out.print(response.functionCalls());
+ }
+ })
+ .whenComplete(
+ (response, throwable) -> {
+ if (throwable != null) {
+ System.out.println("Chat response future failed: " + throwable.getMessage());
+ }
+ })
+ .join();
+
+ FunctionResponse functionResponse =
+ FunctionResponse.builder()
+ .name("getCurrentWeather")
+ .response(ImmutableMap.of("response", "The weather in San Francisco is very nice."))
+ .build();
+
+ CompletableFuture> chatResponse2Future =
+ chatSession.sendMessageStream(
+ Content.builder()
+ .parts(Part.builder().functionResponse(functionResponse).build())
+ .role("user")
+ .build());
+
+ chatResponse2Future
+ .thenAccept(
+ responseStream -> {
+ System.out.println("\n\nFinal Model Response:");
+ for (GenerateContentResponse response : responseStream) {
+ if (response.text() != null) {
+ System.out.print(response.text());
+ }
+ }
+ })
+ .join();
+
+ // Get the history of the chat session.
+ // History is added after the stream is consumed and includes the aggregated response from the
+ // stream, so chatSession.getHistory(false) here returns 2 items (1 user-model message pair)
+ System.out.println("History: " + chatSession.getHistory(false));
+ }
+
+ private ChatWithHistoryAsyncStreamingFunctionCall() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/ChatWithHistoryStreaming.java b/examples/src/main/java/com/google/genai/examples/ChatWithHistoryStreaming.java
index c7c69e7c40b..54b9be2f62f 100644
--- a/examples/src/main/java/com/google/genai/examples/ChatWithHistoryStreaming.java
+++ b/examples/src/main/java/com/google/genai/examples/ChatWithHistoryStreaming.java
@@ -52,9 +52,11 @@
*/
public final class ChatWithHistoryStreaming {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/ChatWithHistoryStreamingFunctionCall.java b/examples/src/main/java/com/google/genai/examples/ChatWithHistoryStreamingFunctionCall.java
new file mode 100644
index 00000000000..1f4d03acb4b
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/ChatWithHistoryStreamingFunctionCall.java
@@ -0,0 +1,152 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java
+ * -Dexec.mainClass="com.google.genai.examples.ChatWithHistoryStreamingFunctionCall"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.genai.Chat;
+import com.google.genai.Client;
+import com.google.genai.ResponseStream;
+import com.google.genai.types.Content;
+import com.google.genai.types.FunctionCallingConfig;
+import com.google.genai.types.FunctionResponse;
+import com.google.genai.types.GenerateContentConfig;
+import com.google.genai.types.GenerateContentResponse;
+import com.google.genai.types.Part;
+import com.google.genai.types.Tool;
+import com.google.genai.types.ToolConfig;
+import java.lang.reflect.Method;
+
+/**
+ * An example of using the Unified Gen AI Java SDK to create a chat session and stream the response.
+ */
+public final class ChatWithHistoryStreamingFunctionCall {
+ /** A callable function to get the current weather. */
+ public static String getCurrentWeather(String location) {
+ return "The weather in " + location + " is " + "very nice.";
+ }
+
+ public static void main(String[] args) throws NoSuchMethodException {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_3_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Gemini Developer API does not support streaming function calling.");
+ return;
+ }
+
+ // Add the methods as callable functions to the tool.
+ Method method1 =
+ ChatWithHistoryStreamingFunctionCall.class.getDeclaredMethod(
+ "getCurrentWeather", String.class);
+
+ GenerateContentConfig config =
+ GenerateContentConfig.builder()
+ .tools(Tool.builder().functions(method1))
+ .toolConfig(
+ ToolConfig.builder()
+ .functionCallingConfig(
+ FunctionCallingConfig.builder().streamFunctionCallArguments(true)))
+ .build();
+
+ // Create a chat session.
+ Chat chatSession = client.chats.create(modelId, config);
+
+ ResponseStream responseStream =
+ chatSession.sendMessageStream("what is the weather in San Francisco?", null);
+
+ System.out.println("Streaming response:");
+ for (GenerateContentResponse response : responseStream) {
+ // Iterate over the stream and print each response as it arrives.
+ System.out.print(response.functionCalls());
+ }
+
+ FunctionResponse functionResponse =
+ FunctionResponse.builder()
+ .name("getCurrentWeather")
+ .response(ImmutableMap.of("response", "The weather in San Francisco is very nice."))
+ .build();
+ ResponseStream responseStream2 =
+ chatSession.sendMessageStream(
+ Content.builder()
+ .parts(Part.builder().functionResponse(functionResponse).build())
+ .role("user")
+ .build(),
+ null);
+ System.out.println("Streaming response 2:");
+ for (GenerateContentResponse response : responseStream2) {
+ // Iterate over the stream and print each response as it arrives.
+ System.out.print(response.text());
+ }
+
+ ResponseStream responseStream3 =
+ chatSession.sendMessageStream("Thanks!", null);
+ for (GenerateContentResponse response : responseStream3) {
+ // Iterate over the stream and print each response as it arrives.
+ System.out.print(response.text());
+ }
+
+ // Get the history of the chat session.
+ // History is added after the stream is consumed and includes the aggregated response from the
+ // stream, so chatSession.getHistory(false) here returns 4 items (2 user-model message pairs)
+ System.out.println("History: " + chatSession.getHistory(false));
+ }
+
+ private ChatWithHistoryStreamingFunctionCall() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/ComputeTokens.java b/examples/src/main/java/com/google/genai/examples/ComputeTokens.java
index 38b2d373026..e819816b19e 100644
--- a/examples/src/main/java/com/google/genai/examples/ComputeTokens.java
+++ b/examples/src/main/java/com/google/genai/examples/ComputeTokens.java
@@ -46,9 +46,11 @@
/** An example of using the Unified Gen AI Java SDK to compute tokens for simple text input. */
public final class ComputeTokens {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -64,7 +66,8 @@ public static void main(String[] args) {
if (client.vertexAI()) {
System.out.println("Using Vertex AI");
} else {
- System.out.println("Using Gemini Developer API");
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
}
ComputeTokensResponse response =
diff --git a/examples/src/main/java/com/google/genai/examples/Constants.java b/examples/src/main/java/com/google/genai/examples/Constants.java
new file mode 100644
index 00000000000..9bb51eb960e
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/Constants.java
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.genai.examples;
+
+/** A final class to hold constants shared across all examples. */
+public final class Constants {
+
+ private Constants() {}
+
+ /** The name of the generative model to be used in the examples. */
+ public static final String GEMINI_MODEL_NAME = "gemini-2.5-flash";
+
+ /** The name of the gemini 3 model to be used in the examples. */
+ public static final String GEMINI_3_MODEL_NAME = "gemini-3-pro-preview";
+
+ /** The name of the live model to be used in the examples. */
+ public static final String GEMINI_LIVE_MODEL_NAME = "gemini-live-2.5-flash";
+
+ /** The name of the preview live model to be used in the examples. */
+ public static final String GEMINI_LIVE_MODEL_NAME_PREVIEW =
+ "gemini-2.5-flash-native-audio-preview-09-2025";
+
+ /** The name of the image generation model to be used in the examples. */
+ public static final String GEMINI_IMAGE_GENERATION_MODEL_NAME = "gemini-2.5-flash-image";
+
+ /** The name of the Imagen generate model to be used in the examples. */
+ public static final String IMAGEN_GENERATE_MODEL_NAME = "imagen-4.0-generate-001";
+
+ /** The name of the Imagen model to be used for image editing in the examples. */
+ public static final String IMAGEN_CAPABILITY_MODEL_NAME = "imagen-3.0-capability-001";
+
+ /** The name of the Imagen ingredients model to be used in the examples. */
+ public static final String IMAGEN_INGREDIENTS_MODEL_NAME = "imagen-4.0-ingredients-preview";
+
+ /** The name of the Virtual try-on model to be used in the examples. */
+ public static final String VIRTUAL_TRY_ON_MODEL_NAME = "virtual-try-on-001";
+
+ /** The name of the segment image model to be used in the examples. */
+ public static final String SEGMENT_IMAGE_MODEL_NAME = "image-segmentation-001";
+
+ /** The name of the Veo model to be used in the examples. */
+ public static final String VEO_MODEL_NAME = "veo-3.1-generate-preview";
+
+ /** The name of the embedding model to be used in the examples. */
+ public static final String EMBEDDING_MODEL_NAME = "text-embedding-004";
+
+ /** The name of the vertex multimodal embedding model to be used in the examples. */
+ public static final String VERTEX_MULTIMODAL_EMBEDDING_MODEL_NAME =
+ "gemini-embedding-2-exp-11-2025";
+
+ /** The file path to be used in the files operations examples. */
+ public static final String UPLOAD_FILE_PATH = "./resources/test.txt";
+}
diff --git a/examples/src/main/java/com/google/genai/examples/CountTokens.java b/examples/src/main/java/com/google/genai/examples/CountTokens.java
index 5a6cfa28bb5..1e80883700c 100644
--- a/examples/src/main/java/com/google/genai/examples/CountTokens.java
+++ b/examples/src/main/java/com/google/genai/examples/CountTokens.java
@@ -46,9 +46,11 @@
/** An example of using the Unified Gen AI Java SDK to count tokens for simple text input. */
public final class CountTokens {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/CountTokensWithConfigs.java b/examples/src/main/java/com/google/genai/examples/CountTokensWithConfigs.java
index 770dbcbcbcd..1ac426d41c3 100644
--- a/examples/src/main/java/com/google/genai/examples/CountTokensWithConfigs.java
+++ b/examples/src/main/java/com/google/genai/examples/CountTokensWithConfigs.java
@@ -54,9 +54,11 @@
*/
public final class CountTokensWithConfigs {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -72,7 +74,10 @@ public static void main(String[] args) {
if (client.vertexAI()) {
System.out.println("Using Vertex AI");
} else {
- System.out.println("Using Gemini Developer API");
+ System.out.println(
+ "Gemini Developer API is not supported for this example since system instruction is not"
+ + " supported.");
+ System.exit(0);
}
// Sets the system instruction in the config.
diff --git a/examples/src/main/java/com/google/genai/examples/EditImageAsync.java b/examples/src/main/java/com/google/genai/examples/EditImageAsync.java
index 81163a3937c..0761645158d 100644
--- a/examples/src/main/java/com/google/genai/examples/EditImageAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/EditImageAsync.java
@@ -50,17 +50,17 @@
import com.google.genai.types.MaskReferenceMode;
import com.google.genai.types.RawReferenceImage;
import com.google.genai.types.ReferenceImage;
-import java.io.IOException;
import java.util.ArrayList;
import java.util.concurrent.CompletableFuture;
-import org.apache.http.HttpException;
/** An example of using the Unified Gen AI Java SDK to edit an image asynchronously. */
public final class EditImageAsync {
- public static void main(String[] args) throws IOException, HttpException {
- String modelId = "imagen-3.0-capability-001";
+ public static void main(String[] args) {
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.IMAGEN_CAPABILITY_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -76,7 +76,8 @@ public static void main(String[] args) throws IOException, HttpException {
if (client.vertexAI()) {
System.out.println("Using Vertex AI");
} else {
- System.out.println("Using Gemini Developer API");
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
}
// Base image created using generateImages with prompt:
diff --git a/examples/src/main/java/com/google/genai/examples/EditImageContentReference.java b/examples/src/main/java/com/google/genai/examples/EditImageContentReference.java
new file mode 100644
index 00000000000..2a66cfdc520
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/EditImageContentReference.java
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.EditImageContentReference"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.types.ContentReferenceImage;
+import com.google.genai.types.EditImageConfig;
+import com.google.genai.types.EditImageResponse;
+import com.google.genai.types.Image;
+import com.google.genai.types.ReferenceImage;
+import com.google.genai.types.StyleReferenceConfig;
+import com.google.genai.types.StyleReferenceImage;
+import java.util.ArrayList;
+
+/** An example of using the Unified Gen AI Java SDK to edit an image (Mask reference). */
+public final class EditImageContentReference {
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.IMAGEN_INGREDIENTS_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
+ }
+
+ EditImageConfig editImageConfig =
+ EditImageConfig.builder().numberOfImages(1).outputMimeType("image/jpeg").build();
+
+ ArrayList referenceImages = new ArrayList<>();
+ Image dogImage = Image.builder().gcsUri("gs://genai-sdk-tests/inputs/images/dog.jpg").build();
+ ContentReferenceImage contentReferenceImage =
+ ContentReferenceImage.builder().referenceImage(dogImage).referenceId(1).build();
+ referenceImages.add(contentReferenceImage);
+
+ Image cyberpunkImage =
+ Image.builder().gcsUri("gs://genai-sdk-tests/inputs/images/cyberpunk.jpg").build();
+ StyleReferenceImage styleReferenceImage =
+ StyleReferenceImage.builder()
+ .referenceId(2)
+ .referenceImage(cyberpunkImage)
+ .config(StyleReferenceConfig.builder().styleDescription("cyberpunk style").build())
+ .build();
+ referenceImages.add(styleReferenceImage);
+
+ EditImageResponse editImageResponse =
+ client.models.editImage(
+ modelId,
+ "Dog in [1] sleeping on the ground at the bottom of the image with the cyberpunk city"
+ + " landscape in [2] in the background visible on the side of the mug.",
+ referenceImages,
+ editImageConfig);
+
+ Image editedImage = editImageResponse.generatedImages().get().get(0).image().get();
+ // Do something with editedImage.
+ }
+
+ private EditImageContentReference() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/EditImageControlReference.java b/examples/src/main/java/com/google/genai/examples/EditImageControlReference.java
index 7955580d4c4..81842b18500 100644
--- a/examples/src/main/java/com/google/genai/examples/EditImageControlReference.java
+++ b/examples/src/main/java/com/google/genai/examples/EditImageControlReference.java
@@ -55,9 +55,11 @@
/** An example of using the Unified Gen AI Java SDK to edit an image (Control reference). */
public final class EditImageControlReference {
public static void main(String[] args) {
- String modelId = "imagen-3.0-capability-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.IMAGEN_CAPABILITY_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -73,7 +75,8 @@ public static void main(String[] args) {
if (client.vertexAI()) {
System.out.println("Using Vertex AI");
} else {
- System.out.println("Using Gemini Developer API");
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
}
// Base image created using generateImages with prompt:
diff --git a/examples/src/main/java/com/google/genai/examples/EditImageMaskReference.java b/examples/src/main/java/com/google/genai/examples/EditImageMaskReference.java
index 3b832a4ff60..e0044276578 100644
--- a/examples/src/main/java/com/google/genai/examples/EditImageMaskReference.java
+++ b/examples/src/main/java/com/google/genai/examples/EditImageMaskReference.java
@@ -57,9 +57,11 @@
/** An example of using the Unified Gen AI Java SDK to edit an image (Mask reference). */
public final class EditImageMaskReference {
public static void main(String[] args) {
- String modelId = "imagen-3.0-capability-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.IMAGEN_CAPABILITY_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -75,7 +77,8 @@ public static void main(String[] args) {
if (client.vertexAI()) {
System.out.println("Using Vertex AI");
} else {
- System.out.println("Using Gemini Developer API");
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
}
// Base image created using generateImages with prompt:
diff --git a/examples/src/main/java/com/google/genai/examples/EditImageStyleTransfer.java b/examples/src/main/java/com/google/genai/examples/EditImageStyleTransfer.java
index 021e5702495..0e69cf8754b 100644
--- a/examples/src/main/java/com/google/genai/examples/EditImageStyleTransfer.java
+++ b/examples/src/main/java/com/google/genai/examples/EditImageStyleTransfer.java
@@ -54,9 +54,11 @@
/** An example of using the Unified Gen AI Java SDK to edit an image (Style transfer). */
public final class EditImageStyleTransfer {
public static void main(String[] args) {
- String modelId = "imagen-3.0-capability-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.IMAGEN_CAPABILITY_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -72,7 +74,8 @@ public static void main(String[] args) {
if (client.vertexAI()) {
System.out.println("Using Vertex AI");
} else {
- System.out.println("Using Gemini Developer API");
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
}
// Base image created using generateImages with prompt:
diff --git a/examples/src/main/java/com/google/genai/examples/EditImageSubjectReference.java b/examples/src/main/java/com/google/genai/examples/EditImageSubjectReference.java
index 897dcc85635..0ba00a64f2f 100644
--- a/examples/src/main/java/com/google/genai/examples/EditImageSubjectReference.java
+++ b/examples/src/main/java/com/google/genai/examples/EditImageSubjectReference.java
@@ -55,9 +55,11 @@
/** An example of using the Unified Gen AI Java SDK to edit an image (Subject reference). */
public final class EditImageSubjectReference {
public static void main(String[] args) {
- String modelId = "imagen-3.0-capability-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.IMAGEN_CAPABILITY_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -73,7 +75,8 @@ public static void main(String[] args) {
if (client.vertexAI()) {
System.out.println("Using Vertex AI");
} else {
- System.out.println("Using Gemini Developer API");
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
}
// Base image created using generateImages with prompt:
diff --git a/examples/src/main/java/com/google/genai/examples/EmbedContent.java b/examples/src/main/java/com/google/genai/examples/EmbedContent.java
index 2886f90929a..2cf4cba131f 100644
--- a/examples/src/main/java/com/google/genai/examples/EmbedContent.java
+++ b/examples/src/main/java/com/google/genai/examples/EmbedContent.java
@@ -41,14 +41,20 @@
package com.google.genai.examples;
import com.google.genai.Client;
+import com.google.genai.types.Content;
import com.google.genai.types.EmbedContentResponse;
+import com.google.genai.types.FileData;
+import com.google.genai.types.Part;
+import java.util.Arrays;
/** An example of using the Unified Gen AI Java SDK to embed content. */
public final class EmbedContent {
public static void main(String[] args) {
- String modelId = "text-embedding-004";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.EMBEDDING_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -71,6 +77,24 @@ public static void main(String[] args) {
client.models.embedContent(modelId, "why is the sky blue?", null);
System.out.println("Embedding response: " + response);
+
+ if (client.vertexAI()) {
+ System.out.println("Embed content with GCS image example.");
+ Part textPart = Part.builder().text("What is in this image?").build();
+ Part imagePart =
+ Part.builder()
+ .fileData(
+ FileData.builder()
+ .fileUri("gs://cloud-samples-data/generative-ai/image/a-man-and-a-dog.png")
+ .mimeType("image/png")
+ .build())
+ .build();
+ Content content = Content.builder().parts(Arrays.asList(textPart, imagePart)).build();
+ response =
+ client.models.embedContent(
+ Constants.VERTEX_MULTIMODAL_EMBEDDING_MODEL_NAME, content, null);
+ System.out.println("Embedding response with GCS image: " + response);
+ }
}
private EmbedContent() {}
diff --git a/examples/src/main/java/com/google/genai/examples/EmbedContentAsync.java b/examples/src/main/java/com/google/genai/examples/EmbedContentAsync.java
index 538f017c19a..2a77865a4ff 100644
--- a/examples/src/main/java/com/google/genai/examples/EmbedContentAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/EmbedContentAsync.java
@@ -41,15 +41,21 @@
package com.google.genai.examples;
import com.google.genai.Client;
+import com.google.genai.types.Content;
import com.google.genai.types.EmbedContentResponse;
+import com.google.genai.types.FileData;
+import com.google.genai.types.Part;
+import java.util.Arrays;
import java.util.concurrent.CompletableFuture;
/** An example of using the Unified Gen AI Java SDK to embed content asynchronously. */
public final class EmbedContentAsync {
public static void main(String[] args) {
- String modelId = "text-embedding-004";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.EMBEDDING_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -77,6 +83,28 @@ public static void main(String[] args) {
System.out.println("Async embedding response: " + response);
})
.join();
+
+ // Vertex Multimodal embedding.
+ if (client.vertexAI()) {
+ System.out.println("Embed content with GCS image example.");
+ Part textPart = Part.builder().text("What is in this image?").build();
+ Part imagePart =
+ Part.builder()
+ .fileData(
+ FileData.builder()
+ .fileUri("gs://cloud-samples-data/generative-ai/image/a-man-and-a-dog.png")
+ .mimeType("image/png")
+ .build())
+ .build();
+ Content content = Content.builder().parts(Arrays.asList(textPart, imagePart)).build();
+ responseFuture =
+ client.async.models.embedContent(Constants.VERTEX_MULTIMODAL_EMBEDDING_MODEL_NAME, content, null);
+ responseFuture
+ .thenAccept(
+ response ->
+ System.out.println("Async embedding response with GCS image: " + response))
+ .join();
+ }
}
private EmbedContentAsync() {}
diff --git a/examples/src/main/java/com/google/genai/examples/EmbedContentWithConfig.java b/examples/src/main/java/com/google/genai/examples/EmbedContentWithConfig.java
index 1ffbd18f123..09770d6512d 100644
--- a/examples/src/main/java/com/google/genai/examples/EmbedContentWithConfig.java
+++ b/examples/src/main/java/com/google/genai/examples/EmbedContentWithConfig.java
@@ -50,9 +50,11 @@
/** An example of using the Unified Gen AI Java SDK to embed content with extra config. */
public final class EmbedContentWithConfig {
public static void main(String[] args) {
- String modelId = "text-embedding-004";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.EMBEDDING_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/FileOperations.java b/examples/src/main/java/com/google/genai/examples/FileOperations.java
index 3dc7eae5771..0a54cb0b1a4 100644
--- a/examples/src/main/java/com/google/genai/examples/FileOperations.java
+++ b/examples/src/main/java/com/google/genai/examples/FileOperations.java
@@ -38,7 +38,7 @@
*
mvn clean compile
*
*
mvn exec:java -Dexec.mainClass="com.google.genai.examples.FileOperations"
- * -Dexec.args="./resources/test.txt"
+ * -Dexec.args="path/to/file"
*/
package com.google.genai.examples;
@@ -52,12 +52,12 @@
/** An example of how to use the Files module to upload, retrieve, and delete files. */
public final class FileOperations {
public static void main(String[] args) {
-
- if (args.length == 0) {
- System.out.println("Please provide a file path on the -Dexec.args argument.");
- return;
+ final String filePath;
+ if (args.length != 0) {
+ filePath = args[0];
+ } else {
+ filePath = Constants.UPLOAD_FILE_PATH;
}
- String filePath = args[0];
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
// key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
@@ -70,7 +70,8 @@ public static void main(String[] args) {
Client client = new Client();
if (client.vertexAI()) {
- System.out.println("Using Vertex AI");
+ System.out.println("Vertex AI API is not supported for this example.");
+ System.exit(0);
} else {
System.out.println("Using Gemini Developer API");
}
diff --git a/examples/src/main/java/com/google/genai/examples/FileOperationsAsync.java b/examples/src/main/java/com/google/genai/examples/FileOperationsAsync.java
index 6fc53aba435..f35c7ed057a 100644
--- a/examples/src/main/java/com/google/genai/examples/FileOperationsAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/FileOperationsAsync.java
@@ -56,12 +56,12 @@
*/
public final class FileOperationsAsync {
public static void main(String[] args) {
-
- if (args.length == 0) {
- System.out.println("Please provide a file path on the -Dexec.args argument.");
- return;
+ final String filePath;
+ if (args.length != 0) {
+ filePath = args[0];
+ } else {
+ filePath = Constants.UPLOAD_FILE_PATH;
}
- String filePath = args[0];
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
// key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
@@ -74,7 +74,8 @@ public static void main(String[] args) {
Client client = new Client();
if (client.vertexAI()) {
- System.out.println("Using Vertex AI");
+ System.out.println("Vertex AI API is not supported for this example.");
+ System.exit(0);
} else {
System.out.println("Using Gemini Developer API");
}
diff --git a/examples/src/main/java/com/google/genai/examples/FileSearchStores.java b/examples/src/main/java/com/google/genai/examples/FileSearchStores.java
new file mode 100644
index 00000000000..5a6a7b24e6f
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/FileSearchStores.java
@@ -0,0 +1,164 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.FileSearchStores"
+ * -Dexec.args="path/to/file"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.errors.GenAiIOException;
+import com.google.genai.types.Document;
+import com.google.genai.types.File;
+import com.google.genai.types.FileSearchStore;
+import com.google.genai.types.ImportFileOperation;
+import com.google.genai.types.ListFileSearchStoresConfig;
+import com.google.genai.types.UploadFileConfig;
+import com.google.genai.types.UploadToFileSearchStoreOperation;
+
+/**
+ * An example of how to use the FileSearchStores module to upload, retrieve, and delete file search
+ * stores.
+ */
+public final class FileSearchStores {
+ public static void main(String[] args) {
+ final String filePath;
+ if (args.length != 0) {
+ filePath = args[0];
+ } else {
+ filePath = Constants.UPLOAD_FILE_PATH;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Vertex AI API is not supported for this example.");
+ System.exit(0);
+ } else {
+ System.out.println("Using Gemini Developer API");
+ }
+
+ try {
+ FileSearchStore fileSearchStore = client.fileSearchStores.create(null);
+ System.out.println("Created file store: " + fileSearchStore.name().get());
+
+ // Get the uploaded file search store.
+ FileSearchStore retrievedFileStore =
+ client.fileSearchStores.get(fileSearchStore.name().get(), null);
+ System.out.println("Retrieved file store: " + retrievedFileStore.name().get());
+
+ // List all file stores.
+ System.out.println("List file stores: ");
+ for (FileSearchStore f :
+ client.fileSearchStores.list(ListFileSearchStoresConfig.builder().pageSize(10).build())) {
+ System.out.println(" File store name: " + f.name().get());
+ }
+
+ // Upload a file to the Files Service.
+ File file =
+ client.files.upload(filePath, UploadFileConfig.builder().mimeType("text/plain").build());
+ System.out.println("Uploaded file: " + file.name().get());
+
+ // Import the uploaded file to the file search store.
+ ImportFileOperation importOperation =
+ client.fileSearchStores.importFile(fileSearchStore.name().get(), file.name().get(), null);
+ System.out.println("Import file operation: " + importOperation.name().get());
+ while (importOperation.done().filter(Boolean::booleanValue).isEmpty()) {
+ try {
+ Thread.sleep(5000); // Sleep for 5 seconds.
+ importOperation = client.operations.get(importOperation, null);
+ System.out.println("Waiting for import operation to complete...");
+ } catch (InterruptedException e) {
+ System.out.println("Thread was interrupted while sleeping.");
+ Thread.currentThread().interrupt();
+ }
+ }
+
+ // Upload a file to the file search store.
+ UploadToFileSearchStoreOperation uploadOperation =
+ client.fileSearchStores.uploadToFileSearchStore(
+ fileSearchStore.name().get(), filePath, null);
+ System.out.println("Upload to file search store operation: " + uploadOperation.name().get());
+ while (uploadOperation.done().filter(Boolean::booleanValue).isEmpty()) {
+ try {
+ Thread.sleep(5000); // Sleep for 5 seconds.
+ uploadOperation = client.operations.get(uploadOperation, null);
+ System.out.println("Waiting for upload operation to complete...");
+ } catch (InterruptedException e) {
+ System.out.println("Thread was interrupted while sleeping.");
+ Thread.currentThread().interrupt();
+ }
+ }
+
+ String documentName = uploadOperation.response().get().documentName().get();
+ System.out.println("Uploaded document: " + documentName);
+
+ // Get document
+ Document retrievedDocument = client.fileSearchStores.documents.get(documentName, null);
+ System.out.println("Retrieved document: " + retrievedDocument.name().get());
+
+ // List documents
+ System.out.println("List documents: ");
+ for (Document d :
+ client.fileSearchStores.documents.list(fileSearchStore.name().get(), null)) {
+ System.out.println(" Document name: " + d.name().get());
+ }
+
+ // Delete the imported document
+ client.fileSearchStores.documents.delete(documentName, null);
+ System.out.println("Deleted document: " + documentName);
+
+ // Delete the file search store
+ client.fileSearchStores.delete(fileSearchStore.name().get(), null);
+ System.out.println("Deleted file: " + fileSearchStore.name().get());
+ } catch (GenAiIOException e) {
+ System.out.println("An error occurred while uploading the file: " + e.getMessage());
+ }
+ }
+
+ private FileSearchStores() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/FileSearchStoresAsync.java b/examples/src/main/java/com/google/genai/examples/FileSearchStoresAsync.java
new file mode 100644
index 00000000000..2d2d048dedf
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/FileSearchStoresAsync.java
@@ -0,0 +1,216 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.FileSearchStoresAsync"
+ * -Dexec.args="path/to/file"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.types.File;
+import com.google.genai.types.FileSearchStore;
+import com.google.genai.types.ListFileSearchStoresConfig;
+import com.google.genai.types.Operation;
+import com.google.genai.types.UploadFileConfig;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * An example of how to use the FileSearchStores module to upload, retrieve, and delete file search
+ * stores.
+ */
+public final class FileSearchStoresAsync {
+
+ private static > CompletableFuture awaitOperationComplete(
+ Client client, T operation) {
+ if (operation.done().orElse(false)) {
+ System.out.println("Operation " + operation.name().get() + " completed.");
+ return CompletableFuture.completedFuture(operation);
+ }
+
+ System.out.println("Waiting for operation to complete...");
+ return CompletableFuture.supplyAsync(
+ () -> operation, CompletableFuture.delayedExecutor(5, TimeUnit.SECONDS))
+ .thenApply(
+ op -> {
+ try {
+ return client.async.operations.get(op, null).get();
+ } catch (InterruptedException | ExecutionException e) {
+ throw new RuntimeException("Failed to get operation status", e);
+ }
+ })
+ .thenCompose(newOp -> awaitOperationComplete(client, (T) newOp));
+ }
+
+ public static void main(String[] args) throws Exception {
+ String filePath = args.length > 0 ? args[0] : Constants.UPLOAD_FILE_PATH;
+
+ try (Client client = new Client()) {
+ if (client.vertexAI()) {
+ System.out.println("Vertex AI API is not supported for this example.");
+ return;
+ }
+ System.out.println("Using Gemini Developer API");
+
+ // Create store
+ CompletableFuture finalFuture =
+ client
+ .async
+ .fileSearchStores
+ .create(null)
+ .thenCompose(
+ store -> {
+ System.out.println("Created file store: " + store.name().get());
+
+ // Get store
+ return client
+ .async
+ .fileSearchStores
+ .get(store.name().get(), null)
+ .thenAccept(
+ retrievedStore ->
+ System.out.println(
+ "Retrieved file store: " + retrievedStore.name().get() + ")"))
+
+ // List stores.
+ .thenCompose(
+ v ->
+ client.async.fileSearchStores.list(
+ ListFileSearchStoresConfig.builder().pageSize(10).build()))
+ .thenCompose(
+ pager -> {
+ System.out.println("List file stores: ");
+ return pager.forEach(
+ item ->
+ System.out.println(
+ " File store name: " + item.name().get()));
+ })
+ .thenApply(v -> store);
+ })
+ .thenCompose(
+ store -> {
+ // Upload File
+ return client
+ .async
+ .files
+ .upload(filePath, UploadFileConfig.builder().mimeType("text/plain").build())
+ .thenApply(
+ file -> {
+ System.out.println("Uploaded file: " + file.name().get());
+ return new Object[] {store, file};
+ });
+ })
+ .thenCompose(
+ objects -> {
+ FileSearchStore store = (FileSearchStore) objects[0];
+ File file = (File) objects[1];
+
+ // Import File
+ return client
+ .async
+ .fileSearchStores
+ .importFile(store.name().get(), file.name().get(), null)
+ .thenCompose(operation -> awaitOperationComplete(client, operation))
+ .thenApply(
+ completedOp -> {
+ System.out.println("Import File: LRO Completed.");
+ return store;
+ });
+ })
+
+ // Direct upload file to the store
+ .thenCompose(
+ store -> {
+ return client
+ .async
+ .fileSearchStores
+ .uploadToFileSearchStore(store.name().get(), filePath, null)
+ .thenCompose(operation -> awaitOperationComplete(client, operation))
+ .thenApply(
+ completedOp -> {
+ String docName = completedOp.response().get().documentName().get();
+ System.out.println("Direct Upload: Completed document " + docName);
+ return new Object[] {store, docName};
+ });
+ })
+ .thenCompose(
+ objects -> {
+ FileSearchStore store = (FileSearchStore) objects[0];
+ String docName = (String) objects[1];
+ return client
+ .async
+ .fileSearchStores
+ .documents
+ .get(docName, null)
+ .thenAccept(
+ doc ->
+ System.out.println(
+ "Get Document: Success (" + doc.name().get() + ")"))
+
+ // List documents
+ .thenCompose(
+ v ->
+ client.async.fileSearchStores.documents.list(
+ store.name().get(), null))
+ .thenCompose(
+ pager -> {
+ System.out.println("List all document names: ");
+ return pager.forEach(
+ item ->
+ System.out.println(" document name: " + item.name().get()));
+ })
+
+ // Delete document
+ .thenCompose(
+ v -> client.async.fileSearchStores.documents.delete(docName, null))
+ .thenRun(() -> System.out.println("Delete Document: Success."))
+
+ // Delete store
+ .thenCompose(
+ v -> client.async.fileSearchStores.delete(store.name().get(), null))
+ .thenAccept(v -> System.out.println("Delete Store: Success."));
+ });
+ finalFuture.get();
+ }
+ System.out.println("Async execution for file search stores completed successfully.");
+ }
+
+ private FileSearchStoresAsync() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContent.java b/examples/src/main/java/com/google/genai/examples/GenerateContent.java
index 6bc8f357c87..f89c986fd14 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateContent.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContent.java
@@ -46,9 +46,11 @@
/** An example of using the Unified Gen AI Java SDK to generate content. */
public final class GenerateContent {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -72,6 +74,13 @@ public static void main(String[] args) {
// Gets the text string from the response by the quick accessor method `text()`.
System.out.println("Unary response: " + response.text());
+
+ // Gets the http headers from the response.
+ response
+ .sdkHttpResponse()
+ .ifPresent(
+ httpResponse ->
+ System.out.println("Response headers: " + httpResponse.headers().orElse(null)));
}
private GenerateContent() {}
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentAsync.java b/examples/src/main/java/com/google/genai/examples/GenerateContentAsync.java
index 3915a3cf4d2..655c99ce3f8 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateContentAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentAsync.java
@@ -47,9 +47,11 @@
/** An example of using the Unified Gen AI Java SDK to generate content asynchronously. */
public final class GenerateContentAsync {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -74,7 +76,15 @@ public static void main(String[] args) {
responseFuture
.thenAccept(
response -> {
+ // Gets the text string from the response
System.out.println("Async response: " + response.text());
+ // Gets the http headers from the response.
+ response
+ .sdkHttpResponse()
+ .ifPresent(
+ httpResponse ->
+ System.out.println(
+ "Response headers: " + httpResponse.headers().orElse(null)));
})
.join();
}
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentStream.java b/examples/src/main/java/com/google/genai/examples/GenerateContentStream.java
index da242a2924a..421b4202594 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateContentStream.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentStream.java
@@ -47,9 +47,11 @@
/** An example of using the Unified GenAI Java SDK to generate stream of content. */
public final class GenerateContentStream {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -73,7 +75,14 @@ public static void main(String[] args) {
System.out.println("Streaming response: ");
for (GenerateContentResponse res : responseStream) {
+ // Gets the text string from the response by the quick accessor method `text()`.
System.out.print(res.text());
+
+ // Gets the http headers from the response.
+ res.sdkHttpResponse()
+ .ifPresent(
+ httpResponse ->
+ System.out.println("Response headers: " + httpResponse.headers().orElse(null)));
}
// To save resources and avoid connection leaks, it is recommended to close the response
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentStreamingFunctionCall.java b/examples/src/main/java/com/google/genai/examples/GenerateContentStreamingFunctionCall.java
new file mode 100644
index 00000000000..58da1ae1b6d
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentStreamingFunctionCall.java
@@ -0,0 +1,121 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile exec:java
+ * -Dexec.mainClass="com.google.genai.examples.GenerateContentStreamingFunctionCall"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.ResponseStream;
+import com.google.genai.types.FunctionCallingConfig;
+import com.google.genai.types.GenerateContentConfig;
+import com.google.genai.types.GenerateContentResponse;
+import com.google.genai.types.Tool;
+import com.google.genai.types.ToolConfig;
+import java.lang.reflect.Method;
+
+/** An example of using the Unified Gen AI Java SDK to generate content. */
+public final class GenerateContentStreamingFunctionCall {
+ /** A callable function to get the current weather. */
+ public static String getCurrentWeather(String location) {
+ return "The weather in " + location + " is " + "very nice.";
+ }
+
+ public static void main(String[] args) throws NoSuchMethodException {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_3_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Gemini Developer API does not support streaming function calling.");
+ return;
+ }
+
+ // Add the methods as callable functions to the tool.
+ Method method1 =
+ GenerateContentStreamingFunctionCall.class.getDeclaredMethod(
+ "getCurrentWeather", String.class);
+
+ GenerateContentConfig config =
+ GenerateContentConfig.builder()
+ .tools(Tool.builder().functions(method1))
+ .toolConfig(
+ ToolConfig.builder()
+ .functionCallingConfig(
+ FunctionCallingConfig.builder().streamFunctionCallArguments(true)))
+ .build();
+
+ ResponseStream responseStream =
+ client.models.generateContentStream(
+ modelId, "What is the weather in San Francisco?", config);
+
+ System.out.println("Streaming response: ");
+ for (GenerateContentResponse res : responseStream) {
+ // Gets the function calls from the response by the quick accessor method `text()`.
+ System.out.print(res.functionCalls());
+
+ // Gets the http headers from the response.
+ res.sdkHttpResponse()
+ .ifPresent(
+ httpResponse ->
+ System.out.println("Response headers: " + httpResponse.headers().orElse(null)));
+ }
+
+ // To save resources and avoid connection leaks, it is recommended to close the response
+ // stream after consumption (or using try block to get the response stream).
+ responseStream.close();
+ }
+
+ private GenerateContentStreamingFunctionCall() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentWithClientOptions.java b/examples/src/main/java/com/google/genai/examples/GenerateContentWithClientOptions.java
new file mode 100644
index 00000000000..95e98aeae6f
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentWithClientOptions.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.GenerateContentWithClientOptions"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.types.ClientOptions;
+import com.google.genai.types.GenerateContentResponse;
+import com.google.genai.types.ProxyOptions;
+import com.google.genai.types.ProxyType;
+
+/** An example of setting client options in a GenerateContent request. */
+public final class GenerateContentWithClientOptions {
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
+ }
+
+ // Set the client options when creating the client. This applies to all requests made through
+ // this client.
+ ClientOptions clientOptions =
+ ClientOptions.builder()
+ .proxyOptions(ProxyOptions.builder().type(ProxyType.Known.DIRECT))
+ .maxConnections(10)
+ .maxConnectionsPerHost(5)
+ .build();
+
+ Client client = Client.builder().clientOptions(clientOptions).build();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Using Gemini Developer API");
+ }
+
+ GenerateContentResponse response =
+ client.models.generateContent(modelId, "Tell me the history of LLM in 100 words", null);
+
+ System.out.println("Response: " + response.text());
+ }
+
+ private GenerateContentWithClientOptions() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentWithConfigs.java b/examples/src/main/java/com/google/genai/examples/GenerateContentWithConfigs.java
index 38833c0de22..770bbeb5751 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateContentWithConfigs.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentWithConfigs.java
@@ -57,9 +57,11 @@
/** An example of using the Unified Gen AI Java SDK to generate content with extra configs. */
public final class GenerateContentWithConfigs {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCall.java b/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCall.java
index dc6d4b3e7d4..1efca4069de 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCall.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCall.java
@@ -47,6 +47,8 @@
import com.google.genai.types.GenerateContentResponse;
import com.google.genai.types.Tool;
import java.lang.reflect.Method;
+import java.util.List;
+import java.util.ArrayList;
/** An example of using the Unified Gen AI Java SDK to generate content with function calling. */
public final class GenerateContentWithFunctionCall {
@@ -56,14 +58,24 @@ public static String getCurrentWeather(String location, String unit) {
}
/** A callable function to divide two integers. */
- public static Integer divideTwoIntegers(Integer numerator, Integer denominator) {
+ public static Integer divideTwoIntegers(int numerator, int denominator) {
return numerator / denominator;
}
+ public static Integer sumInts(List items) {
+ int sum = 0;
+ for (Integer item : items) {
+ sum += item;
+ }
+ return sum;
+ }
+
public static void main(String[] args) throws NoSuchMethodException {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -82,20 +94,28 @@ public static void main(String[] args) throws NoSuchMethodException {
System.out.println("Using Gemini Developer API");
}
+ // Load the two methods as reflected Method objects so that they can be automatically executed
+ // on the client side.
Method method1 =
GenerateContentWithFunctionCall.class.getMethod(
"getCurrentWeather", String.class, String.class);
Method method2 =
- GenerateContentWithFunctionCall.class.getMethod(
- "divideTwoIntegers", Integer.class, Integer.class);
+ GenerateContentWithFunctionCall.class.getMethod("divideTwoIntegers", int.class, int.class);
+
+ Method method3 = GenerateContentWithFunctionCall.class.getMethod("sumInts", List.class);
// Add the two methods as callable functions to the list of tools.
GenerateContentConfig config =
- GenerateContentConfig.builder().tools(Tool.builder().functions(method1, method2)).build();
+ GenerateContentConfig.builder()
+ .tools(Tool.builder().functions(method1, method2, method3))
+ .build();
GenerateContentResponse response =
client.models.generateContent(
- modelId, "What is the weather in Vancouver? And can you divide 10 by 0?", config);
+ modelId,
+ "What is the weather in Vancouver? And can you divide 10 by 0? And can you sum the"
+ + " integers 1, 2, 3, 4, and 5?",
+ config);
System.out.println("The response is: " + response.text());
System.out.println(
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCallAsync.java b/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCallAsync.java
new file mode 100644
index 00000000000..66f87fcfae2
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCallAsync.java
@@ -0,0 +1,125 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java
+ * -Dexec.mainClass="com.google.genai.examples.GenerateContentWithFunctionCallAsync"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.types.GenerateContentConfig;
+import com.google.genai.types.GenerateContentResponse;
+import com.google.genai.types.Tool;
+import java.lang.reflect.Method;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
+
+/**
+ * An example of using the Unified Gen AI Java SDK to generate content with (automatic) function
+ * calling asynchronously.
+ */
+public final class GenerateContentWithFunctionCallAsync {
+ /** A callable function to get the weather. */
+ public static String getCurrentWeather(String location, String unit) {
+ return "The weather in " + location + " is " + "very nice.";
+ }
+
+ /** A callable function to divide two integers. */
+ public static Integer divideTwoIntegers(int numerator, int denominator) {
+ return numerator / denominator;
+ }
+
+ public static void main(String[] args) throws NoSuchMethodException, InterruptedException {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Using Gemini Developer API");
+ }
+
+ // Load the two methods as reflected Method objects so that they can be automatically executed
+ // on the client side.
+ Method method1 =
+ GenerateContentWithFunctionCall.class.getMethod(
+ "getCurrentWeather", String.class, String.class);
+ Method method2 =
+ GenerateContentWithFunctionCall.class.getMethod("divideTwoIntegers", int.class, int.class);
+
+ // Add the two methods as callable functions to the list of tools.
+ GenerateContentConfig config =
+ GenerateContentConfig.builder().tools(Tool.builder().functions(method1, method2)).build();
+
+ // --- Asynchronous Call ---
+ CompletableFuture future =
+ client.async.models.generateContent(
+ modelId, "What is the weather in Vancouver? And can you divide 10 by 0?", config);
+
+ try {
+ GenerateContentResponse response = future.get();
+
+ System.out.println("The response is: " + response.text());
+ System.out.println(
+ "The automatic function calling history is: "
+ + response.automaticFunctionCallingHistory().get());
+
+ } catch (ExecutionException e) {
+ // This shows how to handle errors in the async call.
+ System.err.println("Error during execution: " + e.getCause());
+ }
+ }
+
+ private GenerateContentWithFunctionCallAsync() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCallJson.java b/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCallJson.java
new file mode 100644
index 00000000000..6dd91f1c7bf
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCallJson.java
@@ -0,0 +1,116 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.GenerateContentWithFunctionCallJson"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.genai.Client;
+import com.google.genai.types.FunctionDeclaration;
+import com.google.genai.types.GenerateContentConfig;
+import com.google.genai.types.GenerateContentResponse;
+import com.google.genai.types.Tool;
+
+/** An example of using the Unified Gen AI Java SDK to generate content with function calling. */
+public final class GenerateContentWithFunctionCallJson {
+ /** A callable function to get the weather. */
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Using Gemini Developer API");
+ }
+
+ // Define the schema for the function declaration, in Json format.
+ ImmutableMap parametersSchema =
+ ImmutableMap.of(
+ "type", "object",
+ "properties", ImmutableMap.of("location", ImmutableMap.of("type", "string")),
+ "required", ImmutableList.of("location"));
+
+ ImmutableMap responseSchema =
+ ImmutableMap.of(
+ "type", "object",
+ "properties", ImmutableMap.of("weather", ImmutableMap.of("type", "string")),
+ "required", ImmutableList.of("weather"));
+
+ // Define the tool with the function declaration.
+ Tool toolWithFunctionDeclarations =
+ Tool.builder()
+ .functionDeclarations(
+ FunctionDeclaration.builder()
+ .name("get_weather")
+ .description("Returns the weather in a given location.")
+ .parametersJsonSchema(parametersSchema)
+ .responseJsonSchema(responseSchema)
+ .build())
+ .build();
+
+ // Add the tool to the GenerateContentConfig.
+ GenerateContentConfig config =
+ GenerateContentConfig.builder().tools(toolWithFunctionDeclarations).build();
+
+ GenerateContentResponse response =
+ client.models.generateContent(modelId, "What is the weather in Vancouver?", config);
+
+ System.out.println("The response is: " + response.functionCalls());
+ }
+
+ private GenerateContentWithFunctionCallJson() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCallJsonString.java b/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCallJsonString.java
new file mode 100644
index 00000000000..774fc540039
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCallJsonString.java
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.GenerateContentWithFunctionCallJsonString"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.JsonSerializable;
+import com.google.genai.types.FunctionDeclaration;
+import com.google.genai.types.GenerateContentConfig;
+import com.google.genai.types.GenerateContentResponse;
+import com.google.genai.types.Tool;
+
+/** An example of using the Unified Gen AI Java SDK to generate content with function calling. */
+public final class GenerateContentWithFunctionCallJsonString {
+ /** A callable function to get the weather. */
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Using Gemini Developer API");
+ }
+
+ // Define the schema for the function declaration, in Json format. Note if you have java 15 or
+ // above, you can use the following string block instead:
+ // String parametersSchemaString =
+ // """{
+ // "type": "object",
+ // "properties": {
+ // "location": {
+ // "type": "string"
+ // }
+ // },
+ // "required": [
+ // "location"
+ // ]
+ // }""";
+ // String responseSchemaString =
+ // """{
+ // "type": "object",
+ // "properties": {
+ // "weather": {
+ // "type": "string"
+ // }
+ // },
+ // "required": [
+ // "weather"
+ // ]
+ // }""";
+ String parametersSchemaString =
+ "{\"type\":\"object\",\"properties\":{\"location\":{\"type\":\"string\"}},\"required\":[\"location\"]}";
+ String responseSchemaString =
+ "{\"type\":\"object\",\"properties\":{\"weather\":{\"type\":\"string\"}},\"required\":[\"weather\"]}";
+
+ // Define the tool with the function declaration.
+ Tool toolWithFunctionDeclarations =
+ Tool.builder()
+ .functionDeclarations(
+ FunctionDeclaration.builder()
+ .name("get_weather")
+ .description("Returns the weather in a given location.")
+ .parametersJsonSchema(JsonSerializable.stringToJsonNode(parametersSchemaString))
+ .responseJsonSchema(JsonSerializable.stringToJsonNode(responseSchemaString))
+ .build())
+ .build();
+
+ // Add the tool to the GenerateContentConfig.
+ GenerateContentConfig config =
+ GenerateContentConfig.builder().tools(toolWithFunctionDeclarations).build();
+
+ GenerateContentResponse response =
+ client.models.generateContent(modelId, "What is the weather in Vancouver?", config);
+
+ System.out.println("The response is: " + response.functionCalls());
+ }
+
+ private GenerateContentWithFunctionCallJsonString() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentWithHttpOptions.java b/examples/src/main/java/com/google/genai/examples/GenerateContentWithHttpOptions.java
new file mode 100644
index 00000000000..a45e5334e59
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentWithHttpOptions.java
@@ -0,0 +1,84 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.GenerateContentWithHttpOptions"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.types.GenerateContentResponse;
+import com.google.genai.types.HttpOptions;
+import com.google.genai.types.HttpRetryOptions;
+
+/** An example of setting http options in a GenerateContent request. */
+public final class GenerateContentWithHttpOptions {
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
+ }
+
+ // Set the client level http options when creating the client. All the API requests will share
+ // the same http options.
+ HttpOptions httpOptions =
+ HttpOptions.builder()
+ .apiVersion("v1")
+ .timeout(10000)
+ .retryOptions(HttpRetryOptions.builder().attempts(3).httpStatusCodes(408, 429, 504))
+ .build();
+
+ Client client = Client.builder().httpOptions(httpOptions).build();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Using Gemini Developer API");
+ }
+
+ GenerateContentResponse response =
+ client.models.generateContent(modelId, "Tell me the history of LLM in 100 words", null);
+
+ System.out.println("Response: " + response.text());
+ }
+
+ private GenerateContentWithHttpOptions() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentWithImageInput.java b/examples/src/main/java/com/google/genai/examples/GenerateContentWithImageInput.java
index 997737cb60d..316dbb1ccbd 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateContentWithImageInput.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentWithImageInput.java
@@ -50,9 +50,11 @@
/** An example of using the Unified Gen AI Java SDK to generate content with image input. */
public final class GenerateContentWithImageInput {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -68,9 +70,10 @@ public static void main(String[] args) {
if (client.vertexAI()) {
System.out.println("Using Vertex AI");
} else {
- throw new IllegalArgumentException(
+ System.out.println(
"This example is not supported for Gemini Developer API since the image uri from GCS is"
+ " only supported in Vertex AI.");
+ System.exit(0);
}
Content content =
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseJsonSchema.java b/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseJsonSchema.java
new file mode 100644
index 00000000000..b6ec9707d3f
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseJsonSchema.java
@@ -0,0 +1,106 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java
+ * -Dexec.mainClass="com.google.genai.examples.GenerateContentWithResponseJsonSchema"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.genai.Client;
+import com.google.genai.types.GenerateContentConfig;
+import com.google.genai.types.GenerateContentResponse;
+
+/**
+ * GenerateContentWithResponseJsonSchema generates a content and returns a json object by passing a
+ * schema.
+ */
+public final class GenerateContentWithResponseJsonSchema {
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Using Gemini Developer API");
+ }
+
+ ImmutableMap schema = ImmutableMap.of(
+ "type", "object",
+ "properties", ImmutableMap.of(
+ "recipe_name", ImmutableMap.of("type", "string"),
+ "ingredients", ImmutableMap.of(
+ "type", "array",
+ "items", ImmutableMap.of("type", "string")
+ )
+ ),
+ "required", ImmutableList.of("recipe_name", "ingredients")
+ );
+ GenerateContentConfig config =
+ GenerateContentConfig.builder()
+ .responseMimeType("application/json")
+ .candidateCount(1)
+ .responseJsonSchema(schema)
+ .build();
+
+ GenerateContentResponse response =
+ client.models.generateContent(modelId, "List a few popular cookie recipes.", config);
+
+ System.out.println("Response: " + response.text());
+ }
+
+ private GenerateContentWithResponseJsonSchema() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseJsonSchemaString.java b/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseJsonSchemaString.java
new file mode 100644
index 00000000000..8cb4e6a4d5b
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseJsonSchemaString.java
@@ -0,0 +1,115 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java
+ * -Dexec.mainClass="com.google.genai.examples.GenerateContentWithResponseJsonSchemaString"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.JsonSerializable;
+import com.google.genai.types.GenerateContentConfig;
+import com.google.genai.types.GenerateContentResponse;
+
+/**
+ * GenerateContentWithResponseJsonSchema generates a content and returns a json object by passing a
+ * schema.
+ */
+public final class GenerateContentWithResponseJsonSchemaString {
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Using Gemini Developer API");
+ }
+ // Note if you have java 15 or above, you can use the following string block instead:
+ // String schema = """{
+ // "type": "object",
+ // "properties": {
+ // "recipe_name": {
+ // "type": "string"
+ // },
+ // "ingredients": {
+ // "type": "array",
+ // "items": {
+ // "type": "string"
+ // }
+ // }
+ // },
+ // "required": [
+ // "recipe_name",
+ // "ingredients"
+ // ]
+ // }""";
+ String schema =
+ "{\"type\":\"object\",\"properties\":{\"recipe_name\":{\"type\":\"string\"},\"ingredients\":{\"type\":\"array\",\"items\":{\"type\":\"string\"}}},\"required\":[\"recipe_name\",\"ingredients\"]}";
+
+ GenerateContentConfig config =
+ GenerateContentConfig.builder()
+ .responseMimeType("application/json")
+ .candidateCount(1)
+ .responseJsonSchema(JsonSerializable.stringToJsonNode(schema))
+ .build();
+
+ GenerateContentResponse response =
+ client.models.generateContent(modelId, "List a few popular cookie recipes.", config);
+
+ System.out.println("Response: " + response.text());
+ }
+
+ private GenerateContentWithResponseJsonSchemaString() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseModality.java b/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseModality.java
index c6422966202..55c5e1d08c3 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseModality.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseModality.java
@@ -49,9 +49,11 @@
/** An example of using the Unified Gen AI Java SDK to generate content with response modality. */
public final class GenerateContentWithResponseModality {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-preview-image-generation";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_IMAGE_GENERATION_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseSchema.java b/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseSchema.java
index ce9e53e41e4..ea44ba2c6d2 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseSchema.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseSchema.java
@@ -55,9 +55,11 @@
*/
public final class GenerateContentWithResponseSchema {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateImages.java b/examples/src/main/java/com/google/genai/examples/GenerateImages.java
index c146985827e..d6704632746 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateImages.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateImages.java
@@ -48,9 +48,11 @@
/** An example of using the Unified Gen AI Java SDK to generate images. */
public final class GenerateImages {
public static void main(String[] args) {
- String modelId = "imagen-3.0-generate-002";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.IMAGEN_GENERATE_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -80,10 +82,11 @@ public static void main(String[] args) {
client.models.generateImages(
modelId, "Robot holding a red skateboard", generateImagesConfig);
- System.out.println(
- "Generated " + generatedImagesResponse.generatedImages().get().size() + " images.");
-
- Image generatedImage = generatedImagesResponse.generatedImages().get().get(0).image().get();
+ if (generatedImagesResponse.images().isEmpty()) {
+ System.out.println("Unable to generate images.");
+ }
+ System.out.println("Generated " + generatedImagesResponse.images().size() + " images.");
+ Image generatedImage = generatedImagesResponse.images().get(0);
// Do something with the image.
System.out.println(
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateImagesAsync.java b/examples/src/main/java/com/google/genai/examples/GenerateImagesAsync.java
index b9ceb3bcc43..2556b5e472a 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateImagesAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateImagesAsync.java
@@ -49,9 +49,11 @@
/** An example of using the Unified Gen AI Java SDK to generate images asynchronously. */
public final class GenerateImagesAsync {
public static void main(String[] args) {
- String modelId = "imagen-3.0-generate-002";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.IMAGEN_GENERATE_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateVideos.java b/examples/src/main/java/com/google/genai/examples/GenerateVideos.java
index 690ee862abd..b515c3dfc29 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateVideos.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateVideos.java
@@ -41,17 +41,21 @@
package com.google.genai.examples;
import com.google.genai.Client;
+import com.google.genai.JsonSerializable;
import com.google.genai.errors.GenAiIOException;
import com.google.genai.types.GenerateVideosConfig;
import com.google.genai.types.GenerateVideosOperation;
+import com.google.genai.types.GenerateVideosSource;
import com.google.genai.types.Video;
/** An example of using the Unified Gen AI Java SDK to generate videos. */
public final class GenerateVideos {
public static void main(String[] args) {
- String modelId = "veo-2.0-generate-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.VEO_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -70,6 +74,11 @@ public static void main(String[] args) {
System.out.println("Using Gemini Developer API");
}
+ // Optinoal: If the default 20MB limit is not enough for the generated video response, you can
+ // increase the limit via system property `genai.json.maxReadLength` or via this static method
+ // `JsonSerializable.setMaxReadLength`.
+ JsonSerializable.setMaxReadLength(100_000_000);
+
GenerateVideosConfig.Builder generateVideosConfigBuilder =
GenerateVideosConfig.builder().numberOfVideos(1);
@@ -77,17 +86,19 @@ public static void main(String[] args) {
generateVideosConfigBuilder.outputGcsUri("gs://genai-sdk-tests/tmp/videos");
}
GenerateVideosConfig generateVideosConfig = generateVideosConfigBuilder.build();
+ GenerateVideosSource generateVideosSource =
+ GenerateVideosSource.builder()
+ .prompt("A neon hologram of a cat driving at top speed")
+ .build();
GenerateVideosOperation generateVideosOperation =
- client.models.generateVideos(
- modelId, "A neon hologram of a cat driving at top speed", null, generateVideosConfig);
+ client.models.generateVideos(modelId, generateVideosSource, generateVideosConfig);
// GenerateVideosOperation.done() is empty if the operation is not done.
while (!generateVideosOperation.done().filter(Boolean::booleanValue).isPresent()) {
try {
Thread.sleep(10000); // Sleep for 10 seconds.
- generateVideosOperation =
- client.operations.getVideosOperation(generateVideosOperation, null);
+ generateVideosOperation = client.operations.get(generateVideosOperation, null);
System.out.println("Waiting for operation to complete...");
} catch (InterruptedException e) {
System.out.println("Thread was interrupted while sleeping.");
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateVideosAsync.java b/examples/src/main/java/com/google/genai/examples/GenerateVideosAsync.java
index 5f97af5c0d3..55b99cfa087 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateVideosAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateVideosAsync.java
@@ -41,8 +41,10 @@
package com.google.genai.examples;
import com.google.genai.Client;
+import com.google.genai.JsonSerializable;
import com.google.genai.types.GenerateVideosConfig;
import com.google.genai.types.GenerateVideosOperation;
+import com.google.genai.types.GenerateVideosSource;
import com.google.genai.types.Video;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
@@ -50,9 +52,11 @@
/** An example of using the Unified Gen AI Java SDK to generate images asynchronously. */
public final class GenerateVideosAsync {
public static void main(String[] args) {
- String modelId = "veo-2.0-generate-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.VEO_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -71,6 +75,11 @@ public static void main(String[] args) {
System.out.println("Using Gemini Developer API");
}
+ // Optinoal: If the default 20MB limit is not enough for the generated video response, you can
+ // increase the limit via system property `genai.json.maxReadLength` or via this static method
+ // `JsonSerializable.setMaxReadLength`.
+ JsonSerializable.setMaxReadLength(100_000_000);
+
GenerateVideosConfig.Builder generateVideosConfigBuilder =
GenerateVideosConfig.builder().numberOfVideos(1);
@@ -78,10 +87,13 @@ public static void main(String[] args) {
generateVideosConfigBuilder.outputGcsUri("gs://genai-sdk-tests/tmp/videos");
}
GenerateVideosConfig generateVideosConfig = generateVideosConfigBuilder.build();
+ GenerateVideosSource generateVideosSource =
+ GenerateVideosSource.builder()
+ .prompt("A neon hologram of a cat driving at top speed")
+ .build();
CompletableFuture generateVideosOperationFuture =
- client.async.models.generateVideos(
- modelId, "A neon hologram of a cat driving at top speed", null, generateVideosConfig);
+ client.async.models.generateVideos(modelId, generateVideosSource, generateVideosConfig);
generateVideosOperationFuture
.thenAccept(
@@ -92,7 +104,7 @@ public static void main(String[] args) {
try {
Thread.sleep(10000); // Sleep for 10 seconds.
try {
- operation = client.async.operations.getVideosOperation(operation, null).get();
+ operation = client.async.operations.get(operation, null).get();
} catch (ExecutionException e) {
throw new RuntimeException(e);
}
@@ -109,7 +121,7 @@ public static void main(String[] args) {
Video generatedVideo =
operation.response().get().generatedVideos().get().get(0).video().get();
- // Do something with the video.
+ System.out.println("Video URL: " + generatedVideo.uri().get());
})
.join();
}
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateVideosEditOutpaint.java b/examples/src/main/java/com/google/genai/examples/GenerateVideosEditOutpaint.java
new file mode 100644
index 00000000000..ab46adec263
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/GenerateVideosEditOutpaint.java
@@ -0,0 +1,139 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile exec:java
+ * -Dexec.mainClass="com.google.genai.examples.GenerateVideosEditOutpaint"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.JsonSerializable;
+import com.google.genai.types.GenerateVideosConfig;
+import com.google.genai.types.GenerateVideosOperation;
+import com.google.genai.types.GenerateVideosSource;
+import com.google.genai.types.Image;
+import com.google.genai.types.Video;
+import com.google.genai.types.VideoGenerationMask;
+import com.google.genai.types.VideoGenerationMaskMode;
+
+/** An example of using the Unified Gen AI Java SDK to edit a video with outpaint mode. */
+public final class GenerateVideosEditOutpaint {
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = "veo-2.0-generate-preview"; // Only supported on preview model currently.
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
+ }
+
+ // Optinoal: If the default 20MB limit is not enough for the generated video response, you can
+ // increase the limit via system property `genai.json.maxReadLength` or via this static method
+ // `JsonSerializable.setMaxReadLength`.
+ JsonSerializable.setMaxReadLength(100_000_000);
+
+ VideoGenerationMask videoGenerationMask =
+ VideoGenerationMask.builder()
+ .image(
+ Image.builder()
+ .gcsUri("gs://genai-sdk-tests/inputs/videos/video_outpaint_mask.png")
+ .mimeType("image/png")
+ .build())
+ .maskMode(VideoGenerationMaskMode.Known.OUTPAINT)
+ .build();
+
+ GenerateVideosConfig generateVideosConfig =
+ GenerateVideosConfig.builder()
+ .numberOfVideos(1)
+ .outputGcsUri("gs://genai-sdk-tests/tmp/videos")
+ .aspectRatio("16:9")
+ .mask(videoGenerationMask)
+ .build();
+
+ GenerateVideosSource generateVideosSource =
+ GenerateVideosSource.builder()
+ .prompt("A neon hologram of a cat driving at top speed")
+ .video(
+ Video.builder()
+ .uri("gs://genai-sdk-tests/inputs/videos/editing_demo.mp4")
+ .mimeType("video/mp4")
+ .build())
+ .build();
+
+ GenerateVideosOperation generateVideosOperation =
+ client.models.generateVideos(modelId, generateVideosSource, generateVideosConfig);
+
+ // GenerateVideosOperation.done() is empty if the operation is not done.
+ while (!generateVideosOperation.done().filter(Boolean::booleanValue).isPresent()) {
+ try {
+ Thread.sleep(10000); // Sleep for 10 seconds.
+ generateVideosOperation =
+ client.operations.getVideosOperation(generateVideosOperation, null);
+ System.out.println("Waiting for operation to complete...");
+ } catch (InterruptedException e) {
+ System.out.println("Thread was interrupted while sleeping.");
+ Thread.currentThread().interrupt();
+ }
+ }
+ System.out.println(
+ "Generated "
+ + generateVideosOperation.response().get().generatedVideos().get().size()
+ + " video(s).");
+
+ Video generatedVideo =
+ generateVideosOperation.response().get().generatedVideos().get().get(0).video().get();
+ }
+
+ private GenerateVideosEditOutpaint() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateVideosExtension.java b/examples/src/main/java/com/google/genai/examples/GenerateVideosExtension.java
new file mode 100644
index 00000000000..869e1d53ad4
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/GenerateVideosExtension.java
@@ -0,0 +1,163 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile exec:java
+ * -Dexec.mainClass="com.google.genai.examples.GenerateVideosExtension" -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.JsonSerializable;
+import com.google.genai.errors.GenAiIOException;
+import com.google.genai.types.GenerateVideosConfig;
+import com.google.genai.types.GenerateVideosOperation;
+import com.google.genai.types.GenerateVideosSource;
+import com.google.genai.types.Video;
+
+/** An example of using the Unified Gen AI Java SDK to extend a video. */
+public final class GenerateVideosExtension {
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.VEO_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Sample is only available for Gemini Developer API.");
+ return;
+ } else {
+ System.out.println("Using Gemini Developer API");
+ }
+
+ // Optional: If the default 20MB limit is not enough for the generated video response, you can
+ // increase the limit via system property `genai.json.maxReadLength` or via this static method
+ // `JsonSerializable.setMaxReadLength`.
+ JsonSerializable.setMaxReadLength(100_000_000);
+
+ // Generate first video.
+ GenerateVideosConfig generateVideosConfig =
+ GenerateVideosConfig.builder().numberOfVideos(1).build();
+ GenerateVideosSource generateVideosSource =
+ GenerateVideosSource.builder()
+ .prompt("A neon hologram of a cat driving at top speed")
+ .build();
+
+ GenerateVideosOperation generateVideosOperation1 =
+ client.models.generateVideos(modelId, generateVideosSource, generateVideosConfig);
+
+ // GenerateVideosOperation.done() is empty if the operation is not done.
+ while (!generateVideosOperation1.done().filter(Boolean::booleanValue).isPresent()) {
+ try {
+ Thread.sleep(10000); // Sleep for 10 seconds.
+ generateVideosOperation1 =
+ client.operations.getVideosOperation(generateVideosOperation1, null);
+ System.out.println("Waiting for operation to complete...");
+ } catch (InterruptedException e) {
+ System.out.println("Thread was interrupted while sleeping.");
+ Thread.currentThread().interrupt();
+ }
+ }
+ System.out.println(
+ "Generated "
+ + generateVideosOperation1.response().get().generatedVideos().get().size()
+ + " video(s).");
+
+ Video generatedVideo1 =
+ generateVideosOperation1.response().get().generatedVideos().get().get(0).video().get();
+
+ if (!client.vertexAI()) {
+ try {
+ client.files.download(generatedVideo1, "video.mp4", null);
+ System.out.println("Downloaded video to video.mp4");
+ } catch (GenAiIOException e) {
+ System.out.println("An error occurred while downloading the video: " + e.getMessage());
+ }
+ }
+
+ // Extend the generated video.
+ GenerateVideosConfig generateVideosConfig2 =
+ GenerateVideosConfig.builder().numberOfVideos(1).build();
+ GenerateVideosSource generateVideosSource2 =
+ GenerateVideosSource.builder().prompt("Rain").video(generatedVideo1).build();
+
+ GenerateVideosOperation generateVideosOperation2 =
+ client.models.generateVideos(modelId, generateVideosSource2, generateVideosConfig2);
+
+ // GenerateVideosOperation.done() is empty if the operation is not done.
+ while (!generateVideosOperation2.done().filter(Boolean::booleanValue).isPresent()) {
+ try {
+ Thread.sleep(10000); // Sleep for 10 seconds.
+ generateVideosOperation2 =
+ client.operations.getVideosOperation(generateVideosOperation2, null);
+ System.out.println("Waiting for operation to complete...");
+ } catch (InterruptedException e) {
+ System.out.println("Thread was interrupted while sleeping.");
+ Thread.currentThread().interrupt();
+ }
+ }
+ System.out.println(
+ "Generated "
+ + generateVideosOperation2.response().get().generatedVideos().get().size()
+ + " video(s).");
+
+ Video generatedVideo2 =
+ generateVideosOperation2.response().get().generatedVideos().get().get(0).video().get();
+
+ if (!client.vertexAI()) {
+ try {
+ client.files.download(generatedVideo2, "video.mp4", null);
+ System.out.println("Downloaded extended video to video.mp4");
+ } catch (GenAiIOException e) {
+ System.out.println("An error occurred while downloading the video: " + e.getMessage());
+ }
+ }
+ }
+
+ private GenerateVideosExtension() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/HttpOptionsExtraBody.java b/examples/src/main/java/com/google/genai/examples/HttpOptionsExtraBody.java
new file mode 100644
index 00000000000..c4fecf6ba01
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/HttpOptionsExtraBody.java
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile exec:java -Dexec.mainClass="com.google.genai.examples.HttpOptionsExtraBody"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.genai.Client;
+import com.google.genai.types.GenerateContentConfig;
+import com.google.genai.types.GenerateContentResponse;
+import com.google.genai.types.HttpOptions;
+
+/**
+ * An example of using HttpOption extraBody to inject additional parameters to http request body.
+ */
+public final class HttpOptionsExtraBody {
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API.
+ // It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used
+ // by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as
+ // well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or
+ // Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not
+ // available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Using Gemini Developer API");
+ }
+
+ GenerateContentResponse response =
+ client.models.generateContent(
+ modelId,
+ "What is your name?",
+ GenerateContentConfig.builder()
+ .httpOptions(
+ HttpOptions.builder()
+ .extraBody(
+ ImmutableMap.of(
+ "systemInstruction",
+ ImmutableMap.of(
+ "parts",
+ ImmutableList.of(
+ ImmutableMap.of("text", "You are a chatbot.")))))
+ .build())
+ .build());
+
+ System.out.println(
+ "GenerateContent prompt token count: " + response.usageMetadata().get().promptTokenCount());
+ }
+
+ private HttpOptionsExtraBody() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/LiveAudioConversationAsync.java b/examples/src/main/java/com/google/genai/examples/LiveAudioConversationAsync.java
index f54f9ef7390..e1a6c82e0a9 100644
--- a/examples/src/main/java/com/google/genai/examples/LiveAudioConversationAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/LiveAudioConversationAsync.java
@@ -50,12 +50,16 @@
import com.google.genai.AsyncSession;
import com.google.genai.Client;
import com.google.genai.types.Blob;
+import com.google.genai.types.AutomaticActivityDetection;
+import com.google.genai.types.EndSensitivity;
import com.google.genai.types.LiveConnectConfig;
import com.google.genai.types.LiveSendRealtimeInputParameters;
import com.google.genai.types.LiveServerMessage;
import com.google.genai.types.Modality;
+import com.google.genai.types.RealtimeInputConfig;
import com.google.genai.types.PrebuiltVoiceConfig;
import com.google.genai.types.SpeechConfig;
+import com.google.genai.types.StartSensitivity;
import com.google.genai.types.VoiceConfig;
import java.util.Collection;
import java.util.Optional;
@@ -86,7 +90,6 @@ public final class LiveAudioConversationAsync {
// --------------------------
private static volatile boolean running = true;
- private static volatile boolean speakerPlaying = false;
private static TargetDataLine microphoneLine;
private static SourceDataLine speakerLine;
private static AsyncSession session;
@@ -113,8 +116,7 @@ private static void sendMicrophoneAudio() {
while (running && microphoneLine != null && microphoneLine.isOpen()) {
bytesRead = microphoneLine.read(buffer, 0, buffer.length);
- if (bytesRead > 0 && !speakerPlaying) {
- // Create a copy of the buffer with the actual bytes read
+ if (bytesRead > 0) {
byte[] audioChunk = new byte[bytesRead];
System.arraycopy(buffer, 0, audioChunk, 0, bytesRead);
@@ -153,14 +155,13 @@ public static void main(String[] args) throws LineUnavailableException {
System.out.println("Using Gemini Developer API");
}
- String modelId;
- if (client.vertexAI()) {
- modelId = "gemini-2.0-flash-live-preview-04-09";
- } else {
- modelId = "gemini-2.0-flash-live-001";
- }
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else if (client.vertexAI()) {
+ modelId = Constants.GEMINI_LIVE_MODEL_NAME;
+ } else {
+ modelId = Constants.GEMINI_LIVE_MODEL_NAME_PREVIEW;
}
// --- Audio Line Setup ---
@@ -180,6 +181,14 @@ public static void main(String[] args) throws LineUnavailableException {
.prebuiltVoiceConfig(
PrebuiltVoiceConfig.builder().voiceName(voiceName)))
.languageCode("en-US"))
+ .realtimeInputConfig(
+ RealtimeInputConfig.builder()
+ .automaticActivityDetection(
+ AutomaticActivityDetection.builder()
+ .startOfSpeechSensitivity(StartSensitivity.Known.START_SENSITIVITY_HIGH)
+ .endOfSpeechSensitivity(EndSensitivity.Known.END_SENSITIVITY_HIGH)
+ .prefixPaddingMs(5)
+ .silenceDurationMs(100)))
.build();
// --- Shutdown Hook for Cleanup ---
@@ -302,25 +311,35 @@ public static void handleAudioResponse(LiveServerMessage message) {
.serverContent()
.ifPresent(
content -> {
+ // Handle interruptions from Gemini.
+ if (content.interrupted().orElse(false)) {
+ speakerLine.flush();
+ return; // Skip processing the rest of this message's audio.
+ }
+
+ // Handle Model turn completion.
if (content.turnComplete().orElse(false)) {
- // When interrupted, Gemini sends a turn_complete.
- // Stop the speaker if the turn is complete.
- if (speakerLine != null && speakerLine.isOpen()) {
- speakerLine.flush();
- }
- } else {
- content.modelTurn().stream()
- .flatMap(modelTurn -> modelTurn.parts().stream())
- .flatMap(Collection::stream)
- .map(part -> part.inlineData().flatMap(Blob::data))
- .flatMap(Optional::stream)
- .forEach(
- audioBytes -> {
- if (speakerLine != null && speakerLine.isOpen()) {
- // Write audio data to the speaker
- speakerLine.write(audioBytes, 0, audioBytes.length);
- }
- });
+ // The turn is over, no more audio will be sent for this turn.
+ return;
+ }
+
+ // Process audio content for playback.
+ content.modelTurn().stream()
+ .flatMap(modelTurn -> modelTurn.parts().stream())
+ .flatMap(Collection::stream)
+ .map(part -> part.inlineData().flatMap(Blob::data))
+ .flatMap(Optional::stream)
+ .forEach(
+ audioBytes -> {
+ if (speakerLine != null && speakerLine.isOpen()) {
+ // Write audio data to the speaker
+ speakerLine.write(audioBytes, 0, audioBytes.length);
+ }
+ });
+
+ // If this is the last message of a generation, drain the buffer.
+ if (content.generationComplete().orElse(false)) {
+ speakerLine.drain();
}
});
}
diff --git a/examples/src/main/java/com/google/genai/examples/LiveEphemeralTokenAsync.java b/examples/src/main/java/com/google/genai/examples/LiveEphemeralTokenAsync.java
new file mode 100644
index 00000000000..b5c302e2cf2
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/LiveEphemeralTokenAsync.java
@@ -0,0 +1,191 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.LiveEphemeralTokenAsync"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.common.collect.ImmutableList;
+import com.google.genai.AsyncSession;
+import com.google.genai.Client;
+import com.google.genai.types.AuthToken;
+import com.google.genai.types.Content;
+import com.google.genai.types.CreateAuthTokenConfig;
+import com.google.genai.types.HttpOptions;
+import com.google.genai.types.LiveConnectConfig;
+import com.google.genai.types.LiveConnectConstraints;
+import com.google.genai.types.LiveSendClientContentParameters;
+import com.google.genai.types.LiveServerContent;
+import com.google.genai.types.LiveServerMessage;
+import com.google.genai.types.Modality;
+import com.google.genai.types.Part;
+import java.util.concurrent.CompletableFuture;
+
+/** Example of using the live module to send and receive text messages asynchronously. */
+public final class LiveEphemeralTokenAsync {
+
+ public static void main(String[] args) {
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client =
+ Client.builder().httpOptions(HttpOptions.builder().apiVersion("v1alpha").build()).build();
+
+ if (client.vertexAI()) {
+ System.out.println("Vertex AI API is not supported for this example.");
+ System.exit(0);
+ } else {
+ System.out.println("Using Gemini Developer API");
+ }
+ System.out.println("Creating auth token...");
+
+ // Create an auth token for the live session.
+ AuthToken authToken =
+ client.authTokens.create(
+ CreateAuthTokenConfig.builder()
+ .uses(2)
+ .liveConnectConstraints(
+ LiveConnectConstraints.builder()
+ .model(Constants.GEMINI_LIVE_MODEL_NAME_PREVIEW)
+ .config(
+ LiveConnectConfig.builder()
+ .systemInstruction(
+ Content.fromParts(
+ Part.fromText(
+ "Answer questions like C-3PO from Star Wars would.")))
+ .responseModalities(Modality.Known.AUDIO)
+ .build())
+ .build())
+ .lockAdditionalFields(ImmutableList.of("topP"))
+ .build());
+ System.out.println("Created auth token: " + authToken.name());
+
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_LIVE_MODEL_NAME_PREVIEW;
+ }
+
+ // Create a client using the ephemeral auth token.
+ if (authToken == null || authToken.name() == null) {
+ System.out.println("No auth token created.");
+ System.exit(0);
+ }
+ Client clientWithAuthToken =
+ Client.builder()
+ .apiKey(authToken.name().orElse(null))
+ .httpOptions(HttpOptions.builder().apiVersion("v1alpha").build())
+ .build();
+
+ // Note that the system instruction here is ignored by the server. The system instruction was
+ // set and locked in the LiveConnectConstraints of the CreateAuthTokenConfig. Here we are just
+ // demonstrating that here. Other unlocked fields (like temperature) can be configured here.
+ LiveConnectConfig config =
+ LiveConnectConfig.builder()
+ .systemInstruction(
+ Content.fromParts(
+ Part.fromText("You are a pirate. Answer all questions like a pirate would.")))
+ .build();
+
+ CompletableFuture allDone = new CompletableFuture<>();
+
+ CompletableFuture futureSession =
+ clientWithAuthToken.async.live.connect(modelId, config);
+
+ futureSession
+ .thenCompose(
+ session -> {
+ String inputText = "What would you say if you are surprised?";
+ System.out.println("Connecting to live session...");
+ System.out.println(session.sessionId());
+ System.out.println("\n**Input**\n" + inputText);
+
+ return session
+ // Send the input message.
+ .sendClientContent(clientContentFromText(inputText))
+ .thenCompose(
+ unused -> {
+ System.out.print("\n**Response**\n");
+ // Receive messages from the live session.
+ return session.receive(message -> printLiveServerMessage(message, allDone));
+ })
+ // Wait for the allDone future to complete, which is signaled in
+ // printLiveServerMessage when the server is done sending messages.
+ .thenCompose(unused -> allDone)
+ // Close the session.
+ .thenCompose(unused -> session.close());
+ })
+ .join();
+ }
+
+ /** Wraps client message text. */
+ public static LiveSendClientContentParameters clientContentFromText(String text) {
+ return LiveSendClientContentParameters.builder()
+ .turnComplete(true)
+ .turns(Content.fromParts(Part.fromText(text)))
+ .build();
+ }
+
+ public static void printLiveServerMessage(
+ LiveServerMessage message, CompletableFuture allDone) {
+ // Extract and print text from the model.
+ message
+ .serverContent()
+ .flatMap(LiveServerContent::modelTurn)
+ .flatMap(Content::parts)
+ .ifPresent(parts -> parts.forEach(part -> part.text().ifPresent(System.out::print)));
+
+ // Check if the server's turn is complete and signal the allDone future if so.
+ if (message.serverContent().flatMap(LiveServerContent::turnComplete).orElse(false)) {
+ System.out.println("\n**End of turn, full message: **\n");
+ System.out.println(message);
+ System.out.println();
+ allDone.complete(null);
+ }
+ }
+
+ private LiveEphemeralTokenAsync() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/LiveTextContextWindowCompressionAsync.java b/examples/src/main/java/com/google/genai/examples/LiveTextContextWindowCompressionAsync.java
index 3844fe389fb..526e4250a8e 100644
--- a/examples/src/main/java/com/google/genai/examples/LiveTextContextWindowCompressionAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/LiveTextContextWindowCompressionAsync.java
@@ -77,14 +77,13 @@ public static void main(String[] args) {
System.out.println("Using Gemini Developer API");
}
- String modelId;
- if (client.vertexAI()) {
- modelId = "gemini-2.0-flash-live-preview-04-09";
- } else {
- modelId = "gemini-2.0-flash-live-001";
- }
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else if (client.vertexAI()) {
+ modelId = Constants.GEMINI_LIVE_MODEL_NAME;
+ } else {
+ modelId = Constants.GEMINI_LIVE_MODEL_NAME_PREVIEW;
}
// Configures live session and context window compression.
diff --git a/examples/src/main/java/com/google/genai/examples/LiveTextConversationAsync.java b/examples/src/main/java/com/google/genai/examples/LiveTextConversationAsync.java
index 9f7a3d500d1..de2f868c277 100644
--- a/examples/src/main/java/com/google/genai/examples/LiveTextConversationAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/LiveTextConversationAsync.java
@@ -75,14 +75,13 @@ public static void main(String[] args) {
System.out.println("Using Gemini Developer API");
}
- String modelId;
- if (client.vertexAI()) {
- modelId = "gemini-2.0-flash-live-preview-04-09";
- } else {
- modelId = "gemini-2.0-flash-live-001";
- }
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else if (client.vertexAI()) {
+ modelId = Constants.GEMINI_LIVE_MODEL_NAME;
+ } else {
+ modelId = Constants.GEMINI_LIVE_MODEL_NAME_PREVIEW;
}
LiveConnectConfig config =
diff --git a/examples/src/main/java/com/google/genai/examples/LiveTextConversationResumptionAsync.java b/examples/src/main/java/com/google/genai/examples/LiveTextConversationResumptionAsync.java
index 67a9a92caa3..25bbc8cb5a2 100644
--- a/examples/src/main/java/com/google/genai/examples/LiveTextConversationResumptionAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/LiveTextConversationResumptionAsync.java
@@ -70,11 +70,9 @@
public final class LiveTextConversationResumptionAsync {
public static void main(String[] args) {
- boolean containsModelId = false;
// Get the session handle from the command line, if provided
String sessionHandle = null;
if (args.length > 1) {
- containsModelId = true;
if (args[1].startsWith("--session_handle")) {
String[] parts = args[1].split("=", 2);
if (parts.length == 2) {
@@ -95,8 +93,6 @@ public static void main(String[] args) {
System.err.println("Usage: mvn ... --session_handle=");
System.exit(1);
}
- } else {
- containsModelId = true;
}
}
@@ -116,14 +112,13 @@ public static void main(String[] args) {
System.out.println("Using Gemini Developer API");
}
- String modelId;
- if (client.vertexAI()) {
- modelId = "gemini-2.0-flash-live-preview-04-09";
- } else {
- modelId = "gemini-2.0-flash-live-001";
- }
- if (containsModelId) {
+ final String modelId;
+ if (args.length != 0) {
modelId = args[0];
+ } else if (client.vertexAI()) {
+ modelId = Constants.GEMINI_LIVE_MODEL_NAME;
+ } else {
+ modelId = Constants.GEMINI_LIVE_MODEL_NAME_PREVIEW;
}
SessionResumptionConfig.Builder sessionResumptionConfigBuilder =
diff --git a/examples/src/main/java/com/google/genai/examples/LiveTextToAudioTranscriptionAsync.java b/examples/src/main/java/com/google/genai/examples/LiveTextToAudioTranscriptionAsync.java
index 10dd6345833..b6cd68d3989 100644
--- a/examples/src/main/java/com/google/genai/examples/LiveTextToAudioTranscriptionAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/LiveTextToAudioTranscriptionAsync.java
@@ -80,14 +80,13 @@ public static void main(String[] args) {
System.out.println("Using Gemini Developer API");
}
- String modelId;
- if (client.vertexAI()) {
- modelId = "gemini-2.0-flash-live-preview-04-09";
- } else {
- modelId = "gemini-2.0-flash-live-001";
- }
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else if (client.vertexAI()) {
+ modelId = Constants.GEMINI_LIVE_MODEL_NAME;
+ } else {
+ modelId = Constants.GEMINI_LIVE_MODEL_NAME_PREVIEW;
}
// Sets the system instruction in the config.
diff --git a/examples/src/main/java/com/google/genai/examples/LiveTextToTextGenerationAsync.java b/examples/src/main/java/com/google/genai/examples/LiveTextToTextGenerationAsync.java
index adccf9b58e1..92c1a11fd0f 100644
--- a/examples/src/main/java/com/google/genai/examples/LiveTextToTextGenerationAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/LiveTextToTextGenerationAsync.java
@@ -73,14 +73,13 @@ public static void main(String[] args) {
System.out.println("Using Gemini Developer API");
}
- String modelId;
- if (client.vertexAI()) {
- modelId = "gemini-2.0-flash-live-preview-04-09";
- } else {
- modelId = "gemini-2.0-flash-live-001";
- }
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else if (client.vertexAI()) {
+ modelId = Constants.GEMINI_LIVE_MODEL_NAME;
+ } else {
+ modelId = Constants.GEMINI_LIVE_MODEL_NAME_PREVIEW;
}
LiveConnectConfig config =
@@ -94,6 +93,8 @@ public static void main(String[] args) {
.thenCompose(
session -> {
String inputText = "Write a short poem about a cat.";
+ System.out.println("Connecting to live session...");
+ System.out.println(session.sessionId());
System.out.println("\n**Input**\n" + inputText);
return session
@@ -133,6 +134,8 @@ public static void printLiveServerMessage(
// Check if the server's turn is complete and signal the allDone future if so.
if (message.serverContent().flatMap(LiveServerContent::turnComplete).orElse(false)) {
+ System.out.println("\n**End of turn, full message: **\n");
+ System.out.println(message);
System.out.println();
allDone.complete(null);
}
diff --git a/examples/src/main/java/com/google/genai/examples/LocalComputeTokens.java b/examples/src/main/java/com/google/genai/examples/LocalComputeTokens.java
new file mode 100644
index 00000000000..d142e31af94
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/LocalComputeTokens.java
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile exec:java -Dexec.mainClass="com.google.genai.examples.LocalComputeTokens"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.LocalTokenizer;
+
+/** An example of using the Unified Gen AI Java SDK to compute tokens locally. */
+public class LocalComputeTokens {
+ public static void main(String[] args) {
+ LocalTokenizer tokenizer = new LocalTokenizer(Constants.GEMINI_MODEL_NAME);
+ System.out.println(
+ "Compute tokens for 'Hello world': " + tokenizer.computeTokens("Hello world").toJson());
+ }
+}
diff --git a/examples/src/main/java/com/google/genai/examples/LocalCountTokens.java b/examples/src/main/java/com/google/genai/examples/LocalCountTokens.java
new file mode 100644
index 00000000000..9e1c0fc425b
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/LocalCountTokens.java
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile exec:java -Dexec.mainClass="com.google.genai.examples.LocalCountTokens"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.LocalTokenizer;
+
+/** An example of using the Unified Gen AI Java SDK to count tokens locally. */
+public class LocalCountTokens {
+ public static void main(String[] args) {
+ LocalTokenizer tokenizer = new LocalTokenizer(Constants.GEMINI_MODEL_NAME);
+ System.out.println(
+ "Count for 'Hello world': " + tokenizer.countTokens("Hello world").totalTokens());
+ }
+}
diff --git a/examples/src/main/java/com/google/genai/examples/ModelManagement.java b/examples/src/main/java/com/google/genai/examples/ModelManagement.java
index f6652492d88..0c51973a64e 100644
--- a/examples/src/main/java/com/google/genai/examples/ModelManagement.java
+++ b/examples/src/main/java/com/google/genai/examples/ModelManagement.java
@@ -49,13 +49,13 @@
public final class ModelManagement {
public static void main(String[] args) {
- if (args.length == 0) {
- System.out.println("Please provide a model ID on the -Dexec.args argument.");
- return;
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
- String modelId = args[0];
-
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
// key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
// environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
diff --git a/examples/src/main/java/com/google/genai/examples/ModelManagementAsync.java b/examples/src/main/java/com/google/genai/examples/ModelManagementAsync.java
index 58829301c54..a310cc28d68 100644
--- a/examples/src/main/java/com/google/genai/examples/ModelManagementAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/ModelManagementAsync.java
@@ -51,13 +51,13 @@
public final class ModelManagementAsync {
public static void main(String[] args) {
- if (args.length == 0) {
- System.out.println("Please provide a model ID on the -Dexec.args argument.");
- return;
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
- String modelId = args[0];
-
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
// key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
// environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
diff --git a/examples/src/main/java/com/google/genai/examples/RecontextImageVirtualTryOn.java b/examples/src/main/java/com/google/genai/examples/RecontextImageVirtualTryOn.java
new file mode 100644
index 00000000000..28e12fb1ccc
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/RecontextImageVirtualTryOn.java
@@ -0,0 +1,107 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.RecontextImageVirtualTryOn"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.types.Image;
+import com.google.genai.types.ProductImage;
+import com.google.genai.types.RecontextImageConfig;
+import com.google.genai.types.RecontextImageResponse;
+import com.google.genai.types.RecontextImageSource;
+import java.util.ArrayList;
+
+/** An example of using the Unified Gen AI Java SDK to recontextualize an image (virtual try-on). */
+public final class RecontextImageVirtualTryOn {
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.VIRTUAL_TRY_ON_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
+ }
+
+ Image productImagePants =
+ Image.builder().gcsUri("gs://genai-sdk-tests/inputs/images/pants.jpg").build();
+
+ Image personImage =
+ Image.builder().gcsUri("gs://genai-sdk-tests/inputs/images/man.jpg").build();
+
+ RecontextImageConfig recontextImageConfig =
+ RecontextImageConfig.builder().numberOfImages(1).outputMimeType("image/jpeg").build();
+
+ ArrayList productImages = new ArrayList<>();
+ ProductImage productImage = ProductImage.builder().productImage(productImagePants).build();
+ productImages.add(productImage);
+
+ RecontextImageSource recontextImageSource =
+ RecontextImageSource.builder()
+ .personImage(personImage)
+ .productImages(productImages)
+ .build();
+
+ RecontextImageResponse recontextImageResponse =
+ client.models.recontextImage(modelId, recontextImageSource, recontextImageConfig);
+
+ Image generatedImage = recontextImageResponse.generatedImages().get().get(0).image().get();
+ // Do something with generatedImage.
+ }
+
+ private RecontextImageVirtualTryOn() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/RegisterFiles.java b/examples/src/main/java/com/google/genai/examples/RegisterFiles.java
new file mode 100644
index 00000000000..5a1a80a853b
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/RegisterFiles.java
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.genai.examples;
+
+import com.google.auth.oauth2.GoogleCredentials;
+import com.google.genai.Client;
+import com.google.genai.types.Content;
+import com.google.genai.types.File;
+import com.google.genai.types.GenerateContentResponse;
+import com.google.genai.types.Part;
+import com.google.genai.types.RegisterFilesResponse;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * An example of how to use the registerFiles method to register GCS files with the Gemini Developer
+ * API.
+ */
+public final class RegisterFiles {
+ public static void main(String[] args) throws IOException {
+ // Instantiate the client. The client by default uses the Gemini Developer API.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("registerFiles is only supported in the Gemini Developer client.");
+ System.exit(0);
+ }
+
+ // GoogleCredentials.getApplicationDefault() will use application default credentials.
+ // Note: registerFiles is only supported by the Gemini Developer API (MLDev), not Vertex AI.
+ GoogleCredentials credentials =
+ GoogleCredentials.getApplicationDefault()
+ .createScoped(
+ Arrays.asList(
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/devstorage.read_only"));
+
+ List uris = Arrays.asList("gs://tensorflow_docs/image.jpg");
+
+ RegisterFilesResponse response = client.files.registerFiles(credentials, uris, null);
+
+ List files =
+ response.files().orElseThrow(() -> new RuntimeException("No files returned"));
+ File file = files.get(0);
+
+ System.out.println("Registered file: " + file.uri().get());
+
+ // Use the registered file in a generateContent call.
+ Content content =
+ Content.fromParts(
+ Part.fromText("can you summarize this file?"),
+ Part.fromUri(file.uri().get(), file.mimeType().get()));
+
+ GenerateContentResponse genResponse =
+ client.models.generateContent("gemini-2.5-flash", content, null);
+
+ System.out.println("Response: " + genResponse.text());
+ }
+
+ private RegisterFiles() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/RequestLevelHttpOptions.java b/examples/src/main/java/com/google/genai/examples/RequestLevelHttpOptions.java
index 5ba7dd1e712..d21ecee4850 100644
--- a/examples/src/main/java/com/google/genai/examples/RequestLevelHttpOptions.java
+++ b/examples/src/main/java/com/google/genai/examples/RequestLevelHttpOptions.java
@@ -47,13 +47,16 @@
import com.google.genai.types.GenerateContentConfig;
import com.google.genai.types.GenerateContentResponse;
import com.google.genai.types.HttpOptions;
+import com.google.genai.types.HttpRetryOptions;
/** An example of setting http options at request level. */
public final class RequestLevelHttpOptions {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -72,10 +75,13 @@ public static void main(String[] args) {
System.out.println("Using Gemini Developer API");
}
- // Set a customized header per request config.
+ // Set a customized header and retry options per request config.
GenerateContentConfig config =
GenerateContentConfig.builder()
- .httpOptions(HttpOptions.builder().headers(ImmutableMap.of("my-header", "my-value")))
+ .httpOptions(
+ HttpOptions.builder()
+ .headers(ImmutableMap.of("my-header", "my-value"))
+ .retryOptions(HttpRetryOptions.builder().attempts(3).httpStatusCodes(408, 429)))
.build();
GenerateContentResponse response =
diff --git a/examples/src/main/java/com/google/genai/examples/SegmentImage.java b/examples/src/main/java/com/google/genai/examples/SegmentImage.java
new file mode 100644
index 00000000000..9adcf6f93a3
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/SegmentImage.java
@@ -0,0 +1,96 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.SegmentImage"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.types.Image;
+import com.google.genai.types.SegmentImageConfig;
+import com.google.genai.types.SegmentImageResponse;
+import com.google.genai.types.SegmentImageSource;
+import com.google.genai.types.SegmentMode;
+
+/** An example of using the Unified Gen AI Java SDK to segment an image. */
+public final class SegmentImage {
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.SEGMENT_IMAGE_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
+ }
+
+ // Base image created using generateImages with prompt:
+ // "A square, circle, and triangle with a white background"
+ Image image = Image.fromFile("./resources/shapes.jpg");
+
+ // Control reference.
+ SegmentImageConfig segmentImageConfig =
+ SegmentImageConfig.builder().mode(SegmentMode.Known.FOREGROUND).build();
+
+ SegmentImageResponse segmentImageResponse =
+ client.models.segmentImage(
+ modelId, SegmentImageSource.builder().image(image).build(), segmentImageConfig);
+
+ Image maskImage = segmentImageResponse.generatedMasks().get().get(0).mask().get();
+ // Do something with maskImage.
+ }
+
+ private SegmentImage() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/SegmentImageAsync.java b/examples/src/main/java/com/google/genai/examples/SegmentImageAsync.java
new file mode 100644
index 00000000000..32ffead7e76
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/SegmentImageAsync.java
@@ -0,0 +1,102 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.SegmentImage"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.types.Image;
+import com.google.genai.types.SegmentImageConfig;
+import com.google.genai.types.SegmentImageResponse;
+import com.google.genai.types.SegmentImageSource;
+import com.google.genai.types.SegmentMode;
+import java.util.concurrent.CompletableFuture;
+
+/** An example of using the Unified Gen AI Java SDK to segment an image asynchronously. */
+public final class SegmentImageAsync {
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.SEGMENT_IMAGE_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
+ }
+
+ // Base image created using generateImages with prompt:
+ // "A square, circle, and triangle with a white background"
+ Image image = Image.fromFile("./resources/shapes.jpg");
+
+ // Control reference.
+ SegmentImageConfig segmentImageConfig =
+ SegmentImageConfig.builder().mode(SegmentMode.Known.FOREGROUND).build();
+
+ CompletableFuture segmentImageResponseFuture =
+ client.async.models.segmentImage(
+ modelId, SegmentImageSource.builder().image(image).build(), segmentImageConfig);
+
+ segmentImageResponseFuture
+ .thenAccept(
+ segmentImageResponse -> {
+ Image maskImage = segmentImageResponse.generatedMasks().get().get(0).mask().get();
+ // Do something with maskImage.
+ })
+ .join();
+ }
+
+ private SegmentImageAsync() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/TuningJobs.java b/examples/src/main/java/com/google/genai/examples/TuningJobs.java
new file mode 100644
index 00000000000..05f323a6164
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/TuningJobs.java
@@ -0,0 +1,118 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.TuningJobs"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.types.ListTuningJobsConfig;
+import com.google.genai.types.TuningDataset;
+import com.google.genai.types.TuningJob;
+
+/** An example of using the Unified Gen AI Java SDK to do operations on tuning jobs. */
+public final class TuningJobs {
+
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ // Create a tuning job.
+ TuningDataset tuningDataset =
+ TuningDataset.builder()
+ .gcsUri(
+ "gs://cloud-samples-data/ai-platform/generative_ai/gemini-1_5/text/sft_train_data.jsonl")
+ .build();
+ TuningJob tuningJob1 = client.tunings.tune(modelId, tuningDataset, null);
+ System.out.println("Created tuning job: " + tuningJob1);
+ // Get the tuning job by name.
+ TuningJob tuningJob2 = client.tunings.get(tuningJob1.name().get(), null);
+ System.out.println("Get tuning job: " + tuningJob2);
+
+ // Wait for the tuned model to be available.
+ String tunedModel = "";
+ while (tunedModel.isEmpty()) {
+ System.out.println("Waiting for tuned model to be available");
+ try {
+ Thread.sleep(10000); // Sleep for 10 seconds.
+ } catch (InterruptedException e) {
+ System.out.println("Thread was interrupted while sleeping.");
+ Thread.currentThread().interrupt();
+ }
+ // Get the tuning job.
+ TuningJob fetchedTuningJob = client.tunings.get(tuningJob1.name().get(), null);
+ if (fetchedTuningJob.tunedModel().isPresent()
+ && fetchedTuningJob.tunedModel().get().model().isPresent()) {
+ tunedModel = fetchedTuningJob.tunedModel().get().model().get();
+ }
+ }
+ System.out.println("Tuned model: " + tunedModel);
+ System.out.println();
+ } else {
+ System.out.println("Using Gemini Developer API");
+ }
+
+ // List tuning jobs.
+ System.out.println("List tuning jobs resource names: ");
+ for (TuningJob t :
+ client.tunings.list(ListTuningJobsConfig.builder().pageSize(5).build()).page()) {
+ System.out.println(t.name().get());
+ System.out.println(t.state().get());
+ }
+ }
+
+ private TuningJobs() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/TuningJobsAsync.java b/examples/src/main/java/com/google/genai/examples/TuningJobsAsync.java
new file mode 100644
index 00000000000..442a062ba2a
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/TuningJobsAsync.java
@@ -0,0 +1,163 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.TuningJobsAsync"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.AsyncPager;
+import com.google.genai.Client;
+import com.google.genai.types.ListTuningJobsConfig;
+import com.google.genai.types.TuningDataset;
+import com.google.genai.types.TuningJob;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+
+/** An example of using the Unified Gen AI Java SDK to do async operations on tuning jobs. */
+public final class TuningJobsAsync {
+
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+ ScheduledExecutorService scheduler = Executors.newSingleThreadScheduledExecutor();
+
+ try {
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ // Create a tuning job.
+ TuningDataset tuningDataset =
+ TuningDataset.builder()
+ .gcsUri(
+ "gs://cloud-samples-data/ai-platform/generative_ai/gemini-1_5/text/sft_train_data.jsonl")
+ .build();
+ CompletableFuture tuningJob1Future =
+ client.async.tunings.tune(modelId, tuningDataset, null);
+ tuningJob1Future
+ .thenCompose(
+ tuningJob -> {
+ System.out.println("Created tuning job: " + tuningJob.name().get());
+ return pollUntilComplete(tuningJob.name().get(), client, scheduler);
+ })
+ .thenAccept(
+ finalJob -> {
+ String tunedModel = finalJob.tunedModel().get().model().get();
+ System.out.println("Tuned model: " + tunedModel);
+ })
+ .join();
+ System.out.println();
+ } else {
+ System.out.println("Using Gemini Developer API");
+ }
+
+ // List tuning jobs.
+ CompletableFuture> asyncPagerFuture =
+ client.async.tunings.list(ListTuningJobsConfig.builder().pageSize(5).build());
+ asyncPagerFuture
+ .thenCompose(
+ asyncPager -> {
+ System.out.println("List tuning jobs resource names: ");
+ return asyncPager.page();
+ })
+ .thenAccept(
+ page -> {
+ page.forEach(
+ job -> System.out.println(job.name().get() + "\n" + job.state().get()));
+ })
+ .join();
+ } finally {
+ scheduler.shutdown();
+ }
+ }
+
+ @SuppressWarnings("FutureReturnValueIgnored")
+ // Polls the tuning job status asynchronously until it is complete.
+ private static CompletableFuture pollUntilComplete(
+ String jobName, Client client, ScheduledExecutorService scheduler) {
+
+ return client
+ .async
+ .tunings
+ .get(jobName, null)
+ .thenCompose(
+ job -> {
+ // Check if the model is ready.
+ if (job.tunedModel().isPresent() && job.tunedModel().get().model().isPresent()) {
+ return CompletableFuture.completedFuture(job);
+ } else {
+ // The job is not done. Schedule the next poll.
+ System.out.println(
+ "Waiting for tuned model to be available... Current state: "
+ + job.state().get());
+ CompletableFuture result = new CompletableFuture<>();
+ // Schedule the next call to this same method after a 10-second delay.
+ scheduler.schedule(
+ () ->
+ pollUntilComplete(jobName, client, scheduler)
+ .thenAccept(result::complete)
+ .exceptionally(
+ ex -> {
+ result.completeExceptionally(ex);
+ return null;
+ }),
+ 10,
+ TimeUnit.SECONDS);
+ return result;
+ }
+ });
+ }
+
+ private TuningJobsAsync() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/TuningJobsWithEvaluationConfig.java b/examples/src/main/java/com/google/genai/examples/TuningJobsWithEvaluationConfig.java
new file mode 100644
index 00000000000..5751172e116
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/TuningJobsWithEvaluationConfig.java
@@ -0,0 +1,164 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.TuningJobs"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.common.collect.ImmutableList;
+import com.google.genai.Client;
+import com.google.genai.types.AutoraterConfig;
+import com.google.genai.types.CreateTuningJobConfig;
+import com.google.genai.types.EvaluationConfig;
+import com.google.genai.types.GcsDestination;
+import com.google.genai.types.ListTuningJobsConfig;
+import com.google.genai.types.UnifiedMetric;
+import com.google.genai.types.BleuSpec;
+import com.google.genai.types.OutputConfig;
+import com.google.genai.types.TuningDataset;
+import com.google.genai.types.TuningJob;
+import com.google.genai.types.TuningValidationDataset;
+import com.google.genai.types.HttpOptions;
+
+/**
+ * An example of using the Unified Gen AI Java SDK to do operations on tuning jobs with an
+ * evaluation config.
+ */
+public final class TuningJobsWithEvaluationConfig {
+
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+
+ // Tuning is currently only supported in v1beta1.
+ HttpOptions httpOptions = HttpOptions.builder().apiVersion("v1beta1").build();
+ Client client = Client.builder().httpOptions(httpOptions).build();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ // Create a tuning job.
+ TuningDataset tuningDataset =
+ TuningDataset.builder()
+ .gcsUri(
+ "gs://cloud-samples-data/ai-platform/generative_ai/gemini-1_5/text/sft_train_data.jsonl")
+ .build();
+
+ UnifiedMetric bleu =
+ UnifiedMetric.builder()
+ .bleuSpec(BleuSpec.builder().useEffectiveOrder(true).build())
+ .build();
+
+ ImmutableList metrics = ImmutableList.of(bleu);
+
+ EvaluationConfig evaluationConfig =
+ EvaluationConfig.builder()
+ .outputConfig(
+ OutputConfig.builder()
+ .gcsDestination(
+ GcsDestination.builder().outputUriPrefix("gs://YOUR_GCS_BUCKET/").build())
+ .build())
+ .autoraterConfig(
+ AutoraterConfig.builder().autoraterModel("test-model").samplingCount(1).build())
+ .metrics(metrics)
+ .build();
+
+ CreateTuningJobConfig tuningConfig =
+ CreateTuningJobConfig.builder()
+ .epochCount(1)
+ .tunedModelDisplayName("tuning job with eval config")
+ .validationDataset(
+ TuningValidationDataset.builder()
+ .gcsUri(
+ "gs://cloud-samples-data/ai-platform/generative_ai/gemini-2_0/text/sft_validation_data.jsonl")
+ .build())
+ .evaluationConfig(evaluationConfig)
+ .build();
+
+ TuningJob tuningJob1 = client.tunings.tune(modelId, tuningDataset, tuningConfig);
+ System.out.println("Created tuning job: " + tuningJob1);
+
+ // Wait for the tuned model to be available.
+ String tunedModel = "";
+ while (tunedModel.isEmpty()) {
+ System.out.println("Waiting for tuned model to be available");
+ try {
+ Thread.sleep(10000); // Sleep for 10 seconds.
+ } catch (InterruptedException e) {
+ System.out.println("Thread was interrupted while sleeping.");
+ Thread.currentThread().interrupt();
+ }
+ // Get the tuning job.
+ TuningJob fetchedTuningJob = client.tunings.get(tuningJob1.name().get(), null);
+ if (fetchedTuningJob.tunedModel().isPresent()
+ && fetchedTuningJob.tunedModel().get().model().isPresent()) {
+ tunedModel = fetchedTuningJob.tunedModel().get().model().get();
+ }
+ }
+ System.out.println("Tuned model: " + tunedModel);
+ System.out.println();
+ } else {
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
+ }
+
+ // List tuning jobs.
+ System.out.println("List tuning jobs resource names: ");
+ for (TuningJob t :
+ client.tunings.list(ListTuningJobsConfig.builder().pageSize(5).build()).page()) {
+ System.out.println(t.name().get());
+ System.out.println(t.state().get());
+ }
+ }
+
+ private TuningJobsWithEvaluationConfig() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/UpscaleImage.java b/examples/src/main/java/com/google/genai/examples/UpscaleImage.java
index ef726d4c105..a7fa7393561 100644
--- a/examples/src/main/java/com/google/genai/examples/UpscaleImage.java
+++ b/examples/src/main/java/com/google/genai/examples/UpscaleImage.java
@@ -48,9 +48,11 @@
/** An example of using the Unified Gen AI Java SDK to upscale an image. */
public final class UpscaleImage {
public static void main(String[] args) {
- String modelId = "imagen-3.0-generate-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.IMAGEN_CAPABILITY_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -66,7 +68,8 @@ public static void main(String[] args) {
if (client.vertexAI()) {
System.out.println("Using Vertex AI");
} else {
- System.out.println("Using Gemini Developer API");
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
}
// Base image created using generateImages with prompt:
diff --git a/examples/src/main/java/com/google/genai/examples/UpscaleImageAsync.java b/examples/src/main/java/com/google/genai/examples/UpscaleImageAsync.java
index 0f7e136bf3b..df3e099c881 100644
--- a/examples/src/main/java/com/google/genai/examples/UpscaleImageAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/UpscaleImageAsync.java
@@ -49,9 +49,11 @@
/** An example of using the Unified Gen AI Java SDK to upscale an image asynchronously. */
public final class UpscaleImageAsync {
public static void main(String[] args) {
- String modelId = "imagen-3.0-generate-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.IMAGEN_CAPABILITY_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -67,7 +69,8 @@ public static void main(String[] args) {
if (client.vertexAI()) {
System.out.println("Using Vertex AI");
} else {
- System.out.println("Using Gemini Developer API");
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
}
// Base image created using generateImages with prompt:
diff --git a/pom.xml b/pom.xml
index 3de1d844970..19345fb54fe 100644
--- a/pom.xml
+++ b/pom.xml
@@ -6,7 +6,7 @@
com.google.genaigoogle-genaigoogle-genai
- 1.6.0-SNAPSHOT
+ 1.54.0-SNAPSHOTjar
Java idiomatic SDK for the Gemini Developer APIs and Vertex AI APIs.
@@ -47,14 +47,16 @@
1.81.81.33.0
- 2.47.0
+ 3.25.5
+ 2.45.04.5.14
- 4.4.161.11.02.17.25.11.4
- 3.12.41.6.0
+ 4.12.0
+ src/main/java
+ src/test/java
@@ -90,16 +92,8 @@
org.apache.httpcomponentshttpclient${apache.httpcomponents.httpclient.version}
-
-
- org.apache.httpcomponents
- httpcore
- ${apache.httpcomponents.httpcore.version}
-
-
- com.google.auto.value
- auto-value
- ${auto-value.version}
+
+ truecom.google.auto.value
@@ -131,6 +125,16 @@
Java-WebSocket${java-websocket.version}
+
+ com.squareup.okhttp3
+ okhttp
+ ${okhttp.version}
+
+
+ com.google.protobuf
+ protobuf-java
+ ${protobuf.version}
+ org.junit.jupiter
@@ -150,19 +154,6 @@
${junit.version}test
-
- org.junit.jupiter
- junit-jupiter-params
- ${junit.version}
- test
-
-
-
- org.mockito
- mockito-core
- ${mockito.version}
- test
- org.jspecify
@@ -170,14 +161,22 @@
1.0.0
- com.github.tomakehurst
- wiremock-jre8
- 2.35.0
- test
+ org.jetbrains.kotlin
+ kotlin-stdlib-jdk8
+ 1.9.10
+ ${project.basedir}/${main.java.src.dir}
+ ${project.basedir}/${test.java.src.dir}
+
+
+ kr.motd.maven
+ os-maven-plugin
+ 1.7.1
+
+
@@ -201,6 +200,9 @@
${auto-value.version}
+
+ -parameters
+
@@ -208,20 +210,27 @@
3.5.2
-
- me.fabriciorby
- maven-surefire-junit5-tree-reporter
- 0.1.0
-
+
+ me.fabriciorby
+ maven-surefire-junit5-tree-reporter
+ 0.1.0
+
- plain
-
+ plain
+ maven-jar-plugin
- 3.0.2
+ 3.3.0
+
+
+
+ com.google.genai
+
+
+ maven-install-plugin
@@ -267,16 +276,6 @@
-
- org.apache.maven.plugins
- maven-compiler-plugin
- 3.14.0
-
-
- -parameters
-
-
- org.apache.maven.pluginsmaven-javadoc-plugin
@@ -321,6 +320,7 @@
testreport
+ check
@@ -328,11 +328,236 @@
com/google/genai/types/AutoValue_*.class
+ com/google/genai/shaded/**/*.class
+
+
+ PACKAGE
+
+ com.google.genai
+
+
+
+ INSTRUCTION
+ COVEREDRATIO
+
+ 0.70
+
+
+
+
+ PACKAGE
+
+ com.google.genai.errors
+
+
+
+ INSTRUCTION
+ COVEREDRATIO
+ 0.95
+
+
+
+
+ PACKAGE
+
+ com.google.genai.types
+
+
+
+ INSTRUCTION
+ COVEREDRATIO
+
+ 0.40
+
+
+
+
+
+
+
+
+
+ org.xolstice.maven.plugins
+ protobuf-maven-plugin
+ 0.6.1
+
+ com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier}
+ ${project.basedir}/src/main/proto
+
+
+
+
+ compile
+ test-compile
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-shade-plugin
+ 3.6.0
+
+
+ package
+
+ shade
+
+
+
+
+ org.jetbrains.kotlin:kotlin-stdlib-jdk8
+ org.jetbrains.kotlin:kotlin-stdlib
+ org.jetbrains.kotlin:kotlin-reflect
+ com.fasterxml.jackson.module:jackson-module-kotlin
+
+
+
+
+
+
+
+ kotlin.
+ com.google.genai.shaded.kotlin.
+
+
+ com.fasterxml.jackson.module.kotlin.
+ com.google.genai.shaded.jackson.module.kotlin.
+
+
+
+
+ *:*
+
+ META-INF/*.SF
+ META-INF/*.DSA
+ META-INF/*.RSA
+
+
+
+
+
+
+
+
+ org.codehaus.mojo
+ build-helper-maven-plugin
+ 3.5.0
+
+
+ add-source
+ generate-sources
+
+ add-source
+
+
+
+ ${project.build.directory}/generated-sources/protobuf/java
+
+
+
+ jdk8-build
+
+ [1.8,1.9)
+
+
+
+ org.mockito
+ mockito-inline
+ 3.12.4
+ test
+
+
+
+
+
+ modern-jdk-build
+
+ [11,)
+
+
+
+ org.mockito
+ mockito-core
+ 5.12.0
+ test
+
+
+
+
+
+ native-tests
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+
+ @{argLine} -agentlib:native-image-agent=config-output-dir=target/native-image
+
+
+
+ org.graalvm.buildtools
+ native-maven-plugin
+ 0.11.1
+ true
+
+
+ --no-fallback
+ --verbose
+
+
+
+
+
+
+
+
+ private
+
+ src/private/java
+ src/private/test/java
+ true
+
+
+
+
+ org.codehaus.mojo
+ build-helper-maven-plugin
+
+
+
+ add-public-test-source
+ generate-test-sources
+
+ add-test-source
+
+
+
+ ${project.basedir}/src/test/java
+
+
+
+
+
+
+ maven-compiler-plugin
+
+
+ **/HttpApiClientTest.java
+ **/GoogleCredentialsHttpClientTest.java
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/release-please-config.json b/release-please-config.json
index ce2b1c767ba..176e73c2c6f 100644
--- a/release-please-config.json
+++ b/release-please-config.json
@@ -1,17 +1,15 @@
{
"$schema": "https://raw.githubusercontent.com/googleapis/release-please/main/schemas/config.json",
- "release-type": "maven",
"versioning": "always-bump-minor",
"separate-pull-requests": true,
"include-component-in-tag": false,
- "initial-version": "0.2.0-SNAPSHOT",
- "extra-files": ["README.md", "src/main/java/com/google/genai/ApiClient.java"],
+ "bump-minor-pre-major": false,
+ "bump-patch-for-minor-pre-major": false,
"packages": {
".": {
- "changelog-path": "CHANGELOG.md",
- "release-type": "maven",
- "bump-minor-pre-major": false,
- "bump-patch-for-minor-pre-major": false
+ "release-type": "java-yoshi",
+ "extra-files": ["README.md", "src/main/java/com/google/genai/ApiClient.java"],
+ "changelog-path": "CHANGELOG.md"
}
}
}
\ No newline at end of file
diff --git a/releases.txt b/releases.txt
index 7b9de251a8c..d80ca429ea8 100644
--- a/releases.txt
+++ b/releases.txt
@@ -1,4 +1,4 @@
-Use this file when you need to force a patch release with release-please.
-Edit line 4 below with the version for the release.
+To trigger a manual patch release via release-please, edit the version on
+line 4.
-1.4.1
+1.50.1
diff --git a/run_interactions_examples.sh b/run_interactions_examples.sh
new file mode 100755
index 00000000000..e4b84dd2256
--- /dev/null
+++ b/run_interactions_examples.sh
@@ -0,0 +1,67 @@
+#!/bin/bash
+# Script to run Java GenAI SDK Interaction examples using Maven.
+
+ORIGINAL_DIR=$(pwd)
+
+declare -A FAILED_EXAMPLES
+
+# --- Install the package if needed ---
+if [ -z "$(find target -name 'google-genai-*.jar')" ]; then
+ echo "Project JAR not found in target/. Running 'mvn install'..."
+ mvn install -Dclirr.skip=true
+else
+ echo "Project JAR found. Skipping 'mvn install'."
+fi
+
+cd examples
+echo "Compiling the examples..."
+mvn compile
+if [ $? -ne 0 ]; then
+ echo "----------------------------------------"
+ echo "ERROR: Maven compilation failed. Exiting."
+ echo "----------------------------------------"
+ cd "$ORIGINAL_DIR"
+ exit 1
+fi
+
+declare -a TARGETS=()
+for file in src/main/java/com/google/genai/examples/Interaction*.java; do
+ if [ -f "$file" ]; then
+ TARGETS+=("$(basename "$file" .java)")
+ fi
+done
+
+if [ ${#TARGETS[@]} -eq 0 ]; then
+ echo "No interaction examples found matching Interaction*.java"
+ cd "$ORIGINAL_DIR"
+ exit 0
+fi
+
+for target in "${TARGETS[@]}"; do
+ echo "========================================"
+ echo "Running: $target"
+ echo "========================================"
+
+ mvn exec:java -Dexec.mainClass="com.google.genai.examples.$target"
+
+ if [ $? -ne 0 ]; then
+ echo "ERROR: $target failed."
+ FAILED_EXAMPLES["$target"]="Failed"
+ else
+ echo "SUCCESS: $target completed."
+ fi
+done
+
+cd "$ORIGINAL_DIR"
+
+echo "========================================"
+if [ ${#FAILED_EXAMPLES[@]} -eq 0 ]; then
+ echo "All interaction examples passed! ✅"
+ exit 0
+else
+ echo "The following examples failed: ❌"
+ for failed in "${!FAILED_EXAMPLES[@]}"; do
+ echo " - $failed"
+ done
+ exit 1
+fi
diff --git a/src/main/java/com/google/genai/AfcUtil.java b/src/main/java/com/google/genai/AfcUtil.java
index bd278753aff..8ff11ce0ac8 100644
--- a/src/main/java/com/google/genai/AfcUtil.java
+++ b/src/main/java/com/google/genai/AfcUtil.java
@@ -69,6 +69,26 @@ static GenerateContentConfig transformGenerateContentConfig(GenerateContentConfi
return transformedConfig;
}
+ static ImmutableList findAfcIncompatibleToolIndexes(GenerateContentConfig config) {
+ if (config == null || !config.tools().isPresent() || config.tools().get().isEmpty()) {
+ return ImmutableList.of();
+ }
+ List tools = config.tools().get();
+ ImmutableList.Builder incompatibleToolsIndexesBuilder = ImmutableList.builder();
+
+ for (int i = 0; i < tools.size(); i++) {
+ Tool tool = tools.get(i);
+ if (tool.functionDeclarations().isPresent() && !tool.functionDeclarations().get().isEmpty()) {
+ incompatibleToolsIndexesBuilder.add(i);
+ }
+ if (tool.mcpServers().isPresent() && !tool.mcpServers().get().isEmpty()) {
+ incompatibleToolsIndexesBuilder.add(i);
+ }
+ }
+
+ return incompatibleToolsIndexesBuilder.build();
+ }
+
static ImmutableMap getFunctionMap(GenerateContentConfig config) {
ImmutableMap.Builder functionMapBuilder = ImmutableMap.builder();
if (config != null && config.tools().isPresent() && !config.tools().get().isEmpty()) {
@@ -200,28 +220,26 @@ private static Object getFunctionResponse(
+ argsFromModel);
}
Object argValueFromModel = argsFromModel.get(parameterName);
- switch (argValueFromModel.getClass().getName()) {
- case "java.lang.String":
- argsListFromModel.add(argValueFromModel);
- break;
- case "java.lang.Integer":
- argsListFromModel.add(Integer.parseInt(argValueFromModel.toString()));
- break;
- case "java.lang.Double":
- argsListFromModel.add(Double.parseDouble(argValueFromModel.toString()));
- break;
- case "java.lang.Float":
- argsListFromModel.add(Float.parseFloat(argValueFromModel.toString()));
- break;
- case "java.lang.Boolean":
- argsListFromModel.add(Boolean.parseBoolean(argValueFromModel.toString()));
- break;
- default:
- throw new IllegalArgumentException(
- "The value type of the parameter \""
- + parameterName
- + "\" is not supported. Supported types are String, Integer, Double, Float, and"
- + " Boolean.");
+ String className = argValueFromModel.getClass().getName();
+
+ if (className.equals("java.lang.String")) {
+ argsListFromModel.add(argValueFromModel);
+ } else if (className.equals("java.lang.Integer")) {
+ argsListFromModel.add(Integer.parseInt(argValueFromModel.toString()));
+ } else if (className.equals("java.lang.Double")) {
+ argsListFromModel.add(Double.parseDouble(argValueFromModel.toString()));
+ } else if (className.equals("java.lang.Float")) {
+ argsListFromModel.add(Float.parseFloat(argValueFromModel.toString()));
+ } else if (className.equals("java.lang.Boolean")) {
+ argsListFromModel.add(Boolean.parseBoolean(argValueFromModel.toString()));
+ } else if (argValueFromModel instanceof List) {
+ argsListFromModel.add(argValueFromModel);
+ } else {
+ throw new IllegalArgumentException(
+ "The value type of the parameter \""
+ + parameterName
+ + "\" is not supported. Supported types are String, Integer, Double, Float,"
+ + " Boolean, and List.");
}
}
diff --git a/src/main/java/com/google/genai/ApiClient.java b/src/main/java/com/google/genai/ApiClient.java
index 9693526fe33..67321d21c18 100644
--- a/src/main/java/com/google/genai/ApiClient.java
+++ b/src/main/java/com/google/genai/ApiClient.java
@@ -17,39 +17,68 @@
package com.google.genai;
import static com.google.common.base.Preconditions.checkNotNull;
-import static com.google.common.collect.ImmutableMap.toImmutableMap;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.api.core.InternalApi;
import com.google.auth.oauth2.GoogleCredentials;
+import com.google.common.base.Ascii;
import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
import com.google.genai.errors.GenAiIOException;
import com.google.genai.types.ClientOptions;
import com.google.genai.types.HttpOptions;
+import com.google.genai.types.HttpRetryOptions;
+import com.google.genai.types.ProxyOptions;
+import com.google.genai.types.ProxyType;
+import com.google.genai.types.ResourceScope.Known;
import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.Proxy;
+import java.net.URI;
+import java.net.URISyntaxException;
import java.time.Duration;
+import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import java.util.Optional;
+import java.util.concurrent.CompletableFuture;
import java.util.logging.Logger;
import java.util.stream.Stream;
-import org.apache.http.client.config.RequestConfig;
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.impl.client.HttpClientBuilder;
-import org.apache.http.impl.client.HttpClients;
-import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
+import okhttp3.Credentials;
+import okhttp3.Dispatcher;
+import okhttp3.MediaType;
+import okhttp3.OkHttpClient;
+import okhttp3.Request;
+import okhttp3.RequestBody;
import org.jspecify.annotations.Nullable;
/** Interface for an API client which issues HTTP requests to the GenAI APIs. */
-abstract class ApiClient {
+@InternalApi
+public abstract class ApiClient implements AutoCloseable {
// {x-version-update-start:google-genai:released}
- private static final String SDK_VERSION = "1.5.0";
+ private static final String SDK_VERSION = "1.53.0";
// {x-version-update-end:google-genai:released}
private static final Logger logger = Logger.getLogger(ApiClient.class.getName());
- CloseableHttpClient httpClient;
+ private static final ImmutableSet METHODS_WITH_BODY =
+ ImmutableSet.of("POST", "PATCH", "PUT");
+
+ private static final ImmutableSet VALID_HTTP_METHODS =
+ ImmutableSet.builder().addAll(METHODS_WITH_BODY).add("GET").add("DELETE").build();
+
+ private static final ImmutableSet MULTI_REGIONAL_LOCATIONS = ImmutableSet.of("us", "eu");
+
+ private static Optional geminiBaseUrl = Optional.empty();
+ private static Optional vertexBaseUrl = Optional.empty();
+
+ final OkHttpClient httpClient;
HttpOptions httpOptions;
final boolean vertexAI;
final Optional clientOptions;
- // For Gemini APIs
+ final Optional customBaseUrl;
+ // For Google AI APIs
final Optional apiKey;
// For Vertex AI APIs
final Optional project;
@@ -79,102 +108,428 @@ protected ApiClient(
this.credentials = Optional.empty();
this.vertexAI = false;
this.clientOptions = clientOptions;
+ this.customBaseUrl =
+ customHttpOptions.flatMap(HttpOptions::baseUrl).map(url -> url.replaceAll("/$", ""));
- this.httpOptions = defaultHttpOptions(/* vertexAI= */ false, this.location);
+ this.httpOptions =
+ defaultHttpOptions(
+ /* vertexAI= */ false, this.location, this.apiKey, this.customBaseUrl, Optional.empty());
if (customHttpOptions.isPresent()) {
this.httpOptions = mergeHttpOptions(customHttpOptions.get());
}
- this.httpClient = createHttpClient(httpOptions.timeout(), clientOptions);
+ this.httpClient = createHttpClient(httpOptions, clientOptions);
}
ApiClient(
+ Optional apiKey,
Optional project,
Optional location,
Optional credentials,
Optional customHttpOptions,
Optional clientOptions) {
+ checkNotNull(apiKey, "API Key cannot be null");
checkNotNull(project, "project cannot be null");
checkNotNull(location, "location cannot be null");
checkNotNull(credentials, "credentials cannot be null");
checkNotNull(customHttpOptions, "customHttpOptions cannot be null");
checkNotNull(clientOptions, "clientOptions cannot be null");
- try {
- this.project = Optional.of(project.orElse(System.getenv("GOOGLE_CLOUD_PROJECT")));
- } catch (NullPointerException e) {
+ ImmutableMap environmentVariables = defaultEnvironmentVariables();
+
+ // Retrieve implicitly set values from the environment.
+ String envApiKeyValue = getApiKeyFromEnv();
+ String envProjectValue = environmentVariables.get("project");
+ String envLocationValue = environmentVariables.get("location");
+
+ // Constructor arguments take priority over environment variables.
+ String apiKeyValue = apiKey.orElse(envApiKeyValue);
+ String projectValue = project.orElse(envProjectValue);
+ String locationValue = location.orElse(envLocationValue);
+
+ // Has environment variable values.
+ boolean hasEnvApiKeyValue = envApiKeyValue != null && !envApiKeyValue.isEmpty();
+ boolean hasEnvProjectValue = envProjectValue != null && !envProjectValue.isEmpty();
+ boolean hasEnvLocationValue = envLocationValue != null && !envLocationValue.isEmpty();
+
+ // Constructor arguments.
+ boolean hasApiKey = apiKey != null && apiKey.isPresent();
+ boolean hasCredentials = credentials != null && credentials.isPresent();
+ boolean hasProject = project != null && project.isPresent();
+ boolean hasLocation = location != null && location.isPresent();
+
+ Optional customBaseUrl =
+ customHttpOptions.flatMap(HttpOptions::baseUrl).map(url -> url.replaceAll("/$", ""));
+
+ // Validate constructor arguments combinations.
+ if (hasProject && hasApiKey) {
throw new IllegalArgumentException(
- "Project must either be provided or set in the environment variable"
- + " GOOGLE_CLOUD_PROJECT.",
- e);
+ "For Vertex AI APIs, project and API key are mutually exclusive in the client"
+ + " initializer. Please provide only one of them.");
}
- if (this.project.get().isEmpty()) {
- throw new IllegalArgumentException("Project must not be empty.");
+
+ if (hasLocation && hasApiKey) {
+ throw new IllegalArgumentException(
+ "For Vertex AI APIs, location and API key are mutually exclusive in the client"
+ + " initializer. Please provide only one of them.");
}
- try {
- this.location = Optional.of(location.orElse(System.getenv("GOOGLE_CLOUD_LOCATION")));
- } catch (NullPointerException e) {
+ if (hasCredentials && hasApiKey) {
throw new IllegalArgumentException(
- "Location must either be provided or set in the environment variable"
- + " GOOGLE_CLOUD_LOCATION.",
- e);
+ "For Vertex AI APIs, API key cannot be set together with credentials. Please provide"
+ + " only one of them.");
+ }
+
+ // Handle when to use Vertex AI in express mode (api key).
+ // Explicit initializer arguments are already validated above.
+ if (hasCredentials && hasEnvApiKeyValue) {
+ logger.warning(
+ "Warning: The user provided Google Cloud credentials will take precedence over the API"
+ + " key from the environment variable.");
+ apiKeyValue = null;
}
- if (this.location.get().isEmpty()) {
- throw new IllegalArgumentException("Location must not be empty.");
+ if (hasApiKey && (hasEnvProjectValue || hasEnvLocationValue)) {
+ // Explicit API key takes precedence over implicit project/location.
+ logger.warning(
+ "Warning: The user provided Vertex AI API key will take precedence over the"
+ + " project/location from the environment variables.");
+ projectValue = null;
+ locationValue = null;
+ } else if ((hasProject || hasLocation) && hasEnvApiKeyValue) {
+ // Explicit project/location takes precedence over implicit API key.
+ logger.warning(
+ "Warning: The user provided project/location will take precedence over the API key from"
+ + " the environment variable.");
+ apiKeyValue = null;
+ } else if ((hasEnvProjectValue || hasEnvLocationValue) && hasEnvApiKeyValue) {
+ // Implicit project/location takes precedence over implicit API key.
+ logger.warning(
+ "Warning: The project/location from the environment variables will take precedence over"
+ + " the API key from the environment variable.");
+ apiKeyValue = null;
}
- this.credentials = Optional.of(credentials.orElseGet(() -> defaultCredentials()));
+ if (locationValue == null && apiKeyValue == null && !customBaseUrl.isPresent()) {
+ locationValue = "global";
+ } else if (locationValue == null
+ && apiKeyValue == null
+ && customBaseUrl.isPresent()
+ && customBaseUrl.get().endsWith(".googleapis.com")) {
+ locationValue = "global";
+ }
+
+ boolean hasSufficientAuth =
+ (projectValue != null && locationValue != null) || apiKeyValue != null;
+ if (!hasSufficientAuth && !customBaseUrl.isPresent()) {
+ throw new IllegalArgumentException(
+ "Authentication is not set up. Please provide either a project and location, or an API"
+ + " key, or a custom base URL.");
+ }
+
+ boolean hasConstructorAuth = (hasProject && hasLocation) || hasApiKey;
+ HttpOptions.Builder initHttpOptionsBuilder = HttpOptions.builder();
+ if (customBaseUrl.isPresent() && !hasConstructorAuth) {
+ initHttpOptionsBuilder.baseUrl(customBaseUrl.get());
+ projectValue = null;
+ locationValue = null;
+ } else if (apiKeyValue != null
+ || (locationValue != null
+ && locationValue.equals("global")
+ && !customBaseUrl.isPresent())) {
+ initHttpOptionsBuilder.baseUrl("https://aiplatform.googleapis.com");
+ } else if (locationValue != null
+ && MULTI_REGIONAL_LOCATIONS.contains(locationValue)
+ && !customBaseUrl.isPresent()) {
+ initHttpOptionsBuilder.baseUrl(
+ String.format("https://aiplatform.%s.rep.googleapis.com", locationValue));
+ } else if (locationValue != null && !customBaseUrl.isPresent()) {
+ initHttpOptionsBuilder.baseUrl(
+ String.format("https://%s-aiplatform.googleapis.com", locationValue));
+ }
+
+ this.apiKey = Optional.ofNullable(apiKeyValue);
+ this.project = Optional.ofNullable(projectValue);
+ this.location = Optional.ofNullable(locationValue);
+ this.customBaseUrl = customBaseUrl;
+
+ // Only set credentials if using project/location.
+ this.credentials =
+ !this.project.isPresent()
+ ? Optional.empty()
+ : Optional.of(credentials.orElseGet(() -> defaultCredentials()));
this.clientOptions = clientOptions;
- this.httpOptions = defaultHttpOptions(/* vertexAI= */ true, this.location);
+ this.httpOptions =
+ defaultHttpOptions(
+ /* vertexAI= */ true,
+ this.location,
+ this.apiKey,
+ this.customBaseUrl,
+ initHttpOptionsBuilder.build().baseUrl());
if (customHttpOptions.isPresent()) {
this.httpOptions = mergeHttpOptions(customHttpOptions.get());
}
- this.apiKey = Optional.empty();
this.vertexAI = true;
- this.httpClient = createHttpClient(httpOptions.timeout(), clientOptions);
+ this.httpClient = createHttpClient(httpOptions, clientOptions);
}
- private String getApiKeyFromEnv() {
- String googleApiKey = System.getenv("GOOGLE_API_KEY");
- if (googleApiKey != null && googleApiKey.isEmpty()) {
- googleApiKey = null;
- }
- String geminiApiKey = System.getenv("GEMINI_API_KEY");
- if (geminiApiKey != null && geminiApiKey.isEmpty()) {
- geminiApiKey = null;
+ private OkHttpClient createHttpClient(
+ HttpOptions httpOptions, Optional clientOptions) {
+ OkHttpClient.Builder builder = new OkHttpClient.Builder();
+ // Remove timeouts by default (OkHttp has a default of 10 seconds)
+ builder.connectTimeout(Duration.ofMillis(0));
+ builder.readTimeout(Duration.ofMillis(0));
+ builder.writeTimeout(Duration.ofMillis(0));
+
+ httpOptions.timeout().ifPresent(timeout -> builder.callTimeout(Duration.ofMillis(timeout)));
+
+ HttpRetryOptions retryOptions =
+ httpOptions.retryOptions().orElse(HttpRetryOptions.builder().build());
+ builder.addInterceptor(new RetryInterceptor(retryOptions));
+
+ clientOptions.ifPresent(
+ options -> {
+ Dispatcher dispatcher = new Dispatcher();
+ options.maxConnections().ifPresent(dispatcher::setMaxRequests);
+ options.maxConnectionsPerHost().ifPresent(dispatcher::setMaxRequestsPerHost);
+ builder.dispatcher(dispatcher);
+ options
+ .proxyOptions()
+ .ifPresent(
+ proxyOptions -> {
+ applyProxyOptions(proxyOptions, builder);
+ });
+ });
+
+ return builder.build();
+ }
+
+ /** Applies the proxy options to the OkHttpClient builder. */
+ private void applyProxyOptions(ProxyOptions proxyOptions, OkHttpClient.Builder builder) {
+ final ProxyType proxyType = proxyOptions.type().orElse(new ProxyType("HTTP"));
+ final Proxy.Type type;
+
+ switch (proxyType.knownEnum()) {
+ case SOCKS:
+ type = Proxy.Type.SOCKS;
+ break;
+ case HTTP:
+ type = Proxy.Type.HTTP;
+ break;
+ case DIRECT:
+ builder.proxy(Proxy.NO_PROXY);
+ return;
+ default:
+ throw new IllegalArgumentException("Unsupported proxy type: " + proxyType);
}
- if (googleApiKey != null && geminiApiKey != null) {
- logger.warning(
- "Both GOOGLE_API_KEY and GEMINI_API_KEY are set. Using GOOGLE_API_KEY.");
+ // Set the proxy for non-direct types.
+ String host =
+ proxyOptions
+ .host()
+ .orElseThrow(
+ () -> new IllegalArgumentException("Proxy host is required in the ProxyOptions."));
+ int port =
+ proxyOptions
+ .port()
+ .orElseThrow(
+ () -> new IllegalArgumentException("Proxy port is required in the ProxyOptions."));
+
+ builder.proxy(new Proxy(type, new InetSocketAddress(host, port)));
+
+ // Set the proxy authenticator if username and password are provided.
+ boolean userPresent = proxyOptions.username().isPresent();
+ boolean passPresent = proxyOptions.password().isPresent();
+
+ if (userPresent != passPresent) {
+ throw new IllegalArgumentException(
+ "Proxy username and password must both be provided or not at all.");
}
- if (googleApiKey != null) {
- return googleApiKey;
+ if (userPresent && passPresent) {
+ final String credential =
+ Credentials.basic(proxyOptions.username().get(), proxyOptions.password().get());
+ builder.proxyAuthenticator(
+ (route, response) -> {
+ if (response.request().header("Proxy-Authorization") != null) {
+ return null;
+ }
+ return response
+ .request()
+ .newBuilder()
+ .header("Proxy-Authorization", credential)
+ .build();
+ });
}
- return geminiApiKey;
}
- private CloseableHttpClient createHttpClient(
- Optional timeout, Optional clientOptions) {
- HttpClientBuilder builder = HttpClients.custom();
+ /** Builds a HTTP request given the http method, path, and request json string. */
+ @SuppressWarnings("unchecked")
+ protected Request buildRequest(
+ String httpMethod,
+ String path,
+ String requestJson,
+ Optional requestHttpOptions) {
+ String capitalizedHttpMethod = Ascii.toUpperCase(httpMethod);
+ HttpOptions mergedHttpOptions = mergeHttpOptions(requestHttpOptions.orElse(null));
+
+ boolean prependProjectLocation =
+ shouldPrependVertexProjectPath(capitalizedHttpMethod, path, mergedHttpOptions);
+
+ if (prependProjectLocation) {
+ path =
+ String.format("projects/%s/locations/%s/", this.project.get(), this.location.get())
+ + path;
+ }
- RequestConfig.Builder requestConfigBuilder = RequestConfig.custom();
- timeout.ifPresent(connectTimeout -> requestConfigBuilder.setConnectTimeout(connectTimeout));
- builder.setDefaultRequestConfig(requestConfigBuilder.build());
+ String requestUrl;
- PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager();
- clientOptions.ifPresent(
- options -> {
- options.maxConnections().ifPresent(connectionManager::setMaxTotal);
- options.maxConnectionsPerHost().ifPresent(connectionManager::setDefaultMaxPerRoute);
+ String baseUrl =
+ mergedHttpOptions
+ .baseUrl()
+ .orElseThrow(() -> new IllegalStateException("baseUrl is required."));
+ if (baseUrl.endsWith("/")) {
+ // Sometimes users set the base URL with a trailing slash, so we need to remove it.
+ baseUrl = baseUrl.substring(0, baseUrl.length() - 1);
+ }
+ String apiVersion =
+ mergedHttpOptions
+ .apiVersion()
+ .orElseThrow(() -> new IllegalStateException("apiVersion is required."));
+
+ if (apiVersion.isEmpty()) {
+ requestUrl = String.format("%s/%s", baseUrl, path);
+ } else {
+ requestUrl = String.format("%s/%s/%s", baseUrl, apiVersion, path);
+ }
+
+ if (!VALID_HTTP_METHODS.contains(capitalizedHttpMethod)) {
+ throw new IllegalArgumentException("Unsupported HTTP method: " + capitalizedHttpMethod);
+ }
+
+ ObjectMapper objectMapper = new ObjectMapper();
+ RequestBody body;
+ if (METHODS_WITH_BODY.contains(capitalizedHttpMethod)) {
+ body = RequestBody.create(requestJson, MediaType.parse("application/json"));
+ } else {
+ body = null;
+ }
+
+ if (mergedHttpOptions.extraBody().isPresent() && body != null) {
+ try {
+ Map requestBodyMap = objectMapper.readValue(requestJson, Map.class);
+ mergeMaps(requestBodyMap, mergedHttpOptions.extraBody().get());
+ requestJson = objectMapper.writeValueAsString(requestBodyMap);
+ body = RequestBody.create(requestJson, MediaType.parse("application/json"));
+ } catch (JsonProcessingException e) {
+ logger.warning("Failed to merge extraBody into request body: " + e.getMessage());
+ // If merging fails, proceed with the original request body
+ body = RequestBody.create(requestJson, MediaType.parse("application/json"));
+ }
+ } else if (mergedHttpOptions.extraBody().isPresent()) {
+ logger.warning(
+ "HttpOptions.extraBody is set, but the HTTP method does not support a request body. "
+ + "The extraBody will be ignored.");
+ }
+
+ Request.Builder requestBuilder =
+ new Request.Builder().url(requestUrl).method(capitalizedHttpMethod, body);
+
+ requestHttpOptions.ifPresent(
+ httpOptions -> {
+ requestBuilder.tag(HttpOptions.class, httpOptions);
+ if (httpOptions.retryOptions().isPresent()) {
+ requestBuilder.tag(HttpRetryOptions.class, mergedHttpOptions.retryOptions().get());
+ }
});
- builder.setConnectionManager(connectionManager);
- return builder.build();
+ setHeaders(requestBuilder, mergedHttpOptions);
+ return requestBuilder.build();
+ }
+
+ /** Builds a HTTP request given the http method, url, and request bytes. */
+ protected Request buildRequest(
+ String httpMethod,
+ String url,
+ byte[] requestBytes,
+ Optional requestHttpOptions) {
+ HttpOptions mergedHttpOptions = mergeHttpOptions(requestHttpOptions.orElse(null));
+
+ String finalUrl = url;
+ if (mergedHttpOptions.baseUrl().isPresent()) {
+ try {
+ URI originalUri = new URI(url);
+ URI baseUri = new URI(mergedHttpOptions.baseUrl().get());
+
+ String baseUriPath = baseUri.getRawPath() != null ? baseUri.getRawPath() : "";
+ String originalUriPath = originalUri.getRawPath() != null ? originalUri.getRawPath() : "";
+
+ String combinedPath;
+ if (baseUriPath.endsWith("/") && originalUriPath.startsWith("/")) {
+ combinedPath = baseUriPath + originalUriPath.substring(1);
+ } else {
+ combinedPath = baseUriPath + originalUriPath;
+ }
+
+ finalUrl =
+ new URI(
+ baseUri.getScheme(),
+ baseUri.getRawAuthority(),
+ combinedPath,
+ originalUri.getRawQuery(),
+ originalUri.getRawFragment())
+ .toString();
+ } catch (URISyntaxException e) {
+ logger.warning("Failed to rewrite upload URL with base URL: " + e.getMessage());
+ }
+ }
+
+ if (httpMethod.equalsIgnoreCase("POST")) {
+ RequestBody body =
+ RequestBody.create(requestBytes, MediaType.get("application/octet-stream"));
+ Request.Builder requestBuilder = new Request.Builder().url(finalUrl).post(body);
+ requestHttpOptions.ifPresent(
+ httpOptions -> {
+ if (httpOptions.retryOptions().isPresent()) {
+ requestBuilder.tag(HttpRetryOptions.class, mergedHttpOptions.retryOptions().get());
+ }
+ });
+ setHeaders(requestBuilder, mergedHttpOptions);
+ return requestBuilder.build();
+ } else {
+ throw new IllegalArgumentException(
+ "The request method with bytes is only supported for POST. Unsupported HTTP method: "
+ + httpMethod);
+ }
+ }
+
+ /** Sets the required headers (including auth) on the request object. */
+ private void setHeaders(Request.Builder request, HttpOptions requestHttpOptions) {
+ for (Map.Entry header :
+ requestHttpOptions.headers().orElse(ImmutableMap.of()).entrySet()) {
+ request.header(header.getKey(), header.getValue());
+ }
+
+ if (apiKey.isPresent()) {
+ // Sets API key for Gemini Developer API or Vertex AI Express mode
+ request.header("x-goog-api-key", apiKey.get());
+ } else if (credentials.isPresent()) {
+ GoogleCredentials cred = credentials.get();
+ try {
+ cred.refreshIfExpired();
+ } catch (IOException e) {
+ throw new GenAiIOException("Failed to refresh credentials.", e);
+ }
+ String accessToken = cred.getAccessToken().getTokenValue();
+ request.header("Authorization", "Bearer " + accessToken);
+
+ if (cred.getQuotaProjectId() != null) {
+ request.header("x-goog-user-project", cred.getQuotaProjectId());
+ }
+ } else if (!customBaseUrl.isPresent()) {
+ throw new IllegalStateException("credentials is required");
+ }
}
/** Sends a Http request given the http method, path, and request json string. */
@@ -185,6 +540,20 @@ public abstract ApiResponse request(
public abstract ApiResponse request(
String httpMethod, String path, byte[] requestBytes, Optional httpOptions);
+ /**
+ * Sends an asynchronous Http request given the http method, path, request json string, and http
+ * options.
+ */
+ public abstract CompletableFuture asyncRequest(
+ String httpMethod, String path, String requestJson, Optional httpOptions);
+
+ /**
+ * Sends an asynchronous Http request given the http method, path, request bytes, and http
+ * options.
+ */
+ public abstract CompletableFuture asyncRequest(
+ String httpMethod, String path, byte[] requestBytes, Optional httpOptions);
+
/** Returns the library version. */
static String libraryVersion() {
// TODO: Automate revisions to the SDK library version.
@@ -213,11 +582,67 @@ public boolean vertexAI() {
return apiKey.orElse(null);
}
+ /** Returns the custom base URL if provided. */
+ public @Nullable String customBaseUrl() {
+ return customBaseUrl.orElse(null);
+ }
+
/** Returns the HttpClient for API calls. */
- CloseableHttpClient httpClient() {
+ public OkHttpClient httpClient() {
return httpClient;
}
+ /** Returns the GoogleCredentials for Vertex AI APIs. */
+ public @Nullable GoogleCredentials credentials() {
+ return credentials.orElse(null);
+ }
+
+ /** Returns the HTTP options for API calls. */
+ public HttpOptions httpOptions() {
+ return httpOptions;
+ }
+
+ /**
+ * Merges two maps recursively. If a key exists in both maps, the value from `source` overwrites
+ * the value in `target`. If the value is a list, then update the whole list. A warning is logged
+ * if the types of the values for the same key are different.
+ *
+ * @param target The target map to merge into.
+ * @param source The source map to merge from.
+ */
+ @SuppressWarnings("unchecked")
+ private void mergeMaps(Map target, Map source) {
+ for (Map.Entry entry : source.entrySet()) {
+ String key = entry.getKey();
+ Object sourceValue = entry.getValue();
+
+ if (target.containsKey(key)) {
+ Object targetValue = target.get(key);
+
+ if (targetValue instanceof Map && sourceValue instanceof Map) {
+ // Both values are maps, recursively merge them
+ mergeMaps((Map) targetValue, (Map) sourceValue);
+ } else if (targetValue instanceof List && sourceValue instanceof List) {
+ // Both values are lists, replace the target list with the source list
+ target.put(key, sourceValue);
+ } else {
+ // Values are not both maps or both lists, check if they have the same type
+ if (targetValue.getClass() != sourceValue.getClass()) {
+ logger.warning(
+ String.format(
+ "Type mismatch for key '%s'. Original type: %s, new type: %s. Overwriting"
+ + " with the new value.",
+ key, targetValue.getClass().getName(), sourceValue.getClass().getName()));
+ }
+ target.put(key, sourceValue);
+ }
+ } else {
+ // Key does not exist in target, add it
+ target.put(key, sourceValue);
+ }
+ }
+ }
+
private Optional