From 0f259d9a460884e0061f1f96ffeaefa8c6de3963 Mon Sep 17 00:00:00 2001 From: "octavia-bot[bot]" <108746235+octavia-bot[bot]@users.noreply.github.com> Date: Fri, 6 Feb 2026 06:24:14 +0000 Subject: [PATCH 01/16] docs: sync agent connector docs from airbyte-agent-connectors repo (#72918) Co-authored-by: ian-at-airbyte <187576150+ian-at-airbyte@users.noreply.github.com> --- docs/ai-agents/connectors/airtable/README.md | 4 ++-- docs/ai-agents/connectors/amazon-ads/README.md | 4 ++-- docs/ai-agents/connectors/asana/README.md | 4 ++-- docs/ai-agents/connectors/facebook-marketing/README.md | 4 ++-- docs/ai-agents/connectors/github/README.md | 4 ++-- docs/ai-agents/connectors/gong/README.md | 4 ++-- docs/ai-agents/connectors/google-drive/README.md | 4 ++-- docs/ai-agents/connectors/greenhouse/README.md | 4 ++-- docs/ai-agents/connectors/hubspot/README.md | 4 ++-- docs/ai-agents/connectors/intercom/README.md | 4 ++-- docs/ai-agents/connectors/jira/README.md | 4 ++-- docs/ai-agents/connectors/klaviyo/README.md | 4 ++-- docs/ai-agents/connectors/linear/README.md | 4 ++-- docs/ai-agents/connectors/mailchimp/README.md | 4 ++-- docs/ai-agents/connectors/orb/README.md | 4 ++-- docs/ai-agents/connectors/salesforce/README.md | 4 ++-- docs/ai-agents/connectors/shopify/README.md | 4 ++-- docs/ai-agents/connectors/slack/README.md | 4 ++-- docs/ai-agents/connectors/stripe/README.md | 4 ++-- docs/ai-agents/connectors/zendesk-chat/README.md | 4 ++-- docs/ai-agents/connectors/zendesk-support/README.md | 4 ++-- 21 files changed, 42 insertions(+), 42 deletions(-) diff --git a/docs/ai-agents/connectors/airtable/README.md b/docs/ai-agents/connectors/airtable/README.md index e2837442cf8e..30ed7c2de829 100644 --- a/docs/ai-agents/connectors/airtable/README.md +++ b/docs/ai-agents/connectors/airtable/README.md @@ -105,7 +105,7 @@ See the official [Airtable API reference](https://airtable.com/developers/web/ap ## Version information -- **Package version:** 0.1.21 +- **Package version:** 0.1.22 - **Connector version:** 1.0.3 -- **Generated with Connector SDK commit SHA:** e4f3b9c8a8118bfaa9d57578c64868c91cb9b3a4 +- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/airtable/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/amazon-ads/README.md b/docs/ai-agents/connectors/amazon-ads/README.md index bebbc98ee3c6..fcbeb2ff8644 100644 --- a/docs/ai-agents/connectors/amazon-ads/README.md +++ b/docs/ai-agents/connectors/amazon-ads/README.md @@ -104,7 +104,7 @@ See the official [Amazon-Ads API reference](https://advertising.amazon.com/API/d ## Version information -- **Package version:** 0.1.42 +- **Package version:** 0.1.43 - **Connector version:** 1.0.7 -- **Generated with Connector SDK commit SHA:** e4f3b9c8a8118bfaa9d57578c64868c91cb9b3a4 +- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/amazon-ads/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/asana/README.md b/docs/ai-agents/connectors/asana/README.md index 956ac5bc59aa..737bfb857130 100644 --- a/docs/ai-agents/connectors/asana/README.md +++ b/docs/ai-agents/connectors/asana/README.md @@ -126,7 +126,7 @@ See the official [Asana API reference](https://developers.asana.com/reference/re ## Version information -- **Package version:** 0.19.94 +- **Package version:** 0.19.95 - **Connector version:** 0.1.12 -- **Generated with Connector SDK commit SHA:** e4f3b9c8a8118bfaa9d57578c64868c91cb9b3a4 +- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/asana/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/facebook-marketing/README.md b/docs/ai-agents/connectors/facebook-marketing/README.md index 406e7e8f1578..b62a195b24c2 100644 --- a/docs/ai-agents/connectors/facebook-marketing/README.md +++ b/docs/ai-agents/connectors/facebook-marketing/README.md @@ -114,7 +114,7 @@ See the official [Facebook-Marketing API reference](https://developers.facebook. ## Version information -- **Package version:** 0.1.27 +- **Package version:** 0.1.28 - **Connector version:** 1.0.14 -- **Generated with Connector SDK commit SHA:** e4f3b9c8a8118bfaa9d57578c64868c91cb9b3a4 +- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/facebook-marketing/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/github/README.md b/docs/ai-agents/connectors/github/README.md index a684a06f6def..1e79ce488b44 100644 --- a/docs/ai-agents/connectors/github/README.md +++ b/docs/ai-agents/connectors/github/README.md @@ -125,7 +125,7 @@ See the official [Github API reference](https://docs.github.com/en/rest). ## Version information -- **Package version:** 0.18.94 +- **Package version:** 0.18.95 - **Connector version:** 0.1.11 -- **Generated with Connector SDK commit SHA:** e4f3b9c8a8118bfaa9d57578c64868c91cb9b3a4 +- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/github/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/gong/README.md b/docs/ai-agents/connectors/gong/README.md index 75d309b52be2..1fb964804133 100644 --- a/docs/ai-agents/connectors/gong/README.md +++ b/docs/ai-agents/connectors/gong/README.md @@ -123,7 +123,7 @@ See the official [Gong API reference](https://gong.app.gong.io/settings/api/docu ## Version information -- **Package version:** 0.19.99 +- **Package version:** 0.19.100 - **Connector version:** 0.1.17 -- **Generated with Connector SDK commit SHA:** e4f3b9c8a8118bfaa9d57578c64868c91cb9b3a4 +- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/gong/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/google-drive/README.md b/docs/ai-agents/connectors/google-drive/README.md index 3439fc2c3c6b..ef962c4b5e58 100644 --- a/docs/ai-agents/connectors/google-drive/README.md +++ b/docs/ai-agents/connectors/google-drive/README.md @@ -124,7 +124,7 @@ See the official [Google-Drive API reference](https://developers.google.com/work ## Version information -- **Package version:** 0.1.62 +- **Package version:** 0.1.63 - **Connector version:** 0.1.6 -- **Generated with Connector SDK commit SHA:** e4f3b9c8a8118bfaa9d57578c64868c91cb9b3a4 +- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/google-drive/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/greenhouse/README.md b/docs/ai-agents/connectors/greenhouse/README.md index f373cb8170d1..dd7440113abd 100644 --- a/docs/ai-agents/connectors/greenhouse/README.md +++ b/docs/ai-agents/connectors/greenhouse/README.md @@ -118,7 +118,7 @@ See the official [Greenhouse API reference](https://developers.greenhouse.io/har ## Version information -- **Package version:** 0.17.88 +- **Package version:** 0.17.89 - **Connector version:** 0.1.5 -- **Generated with Connector SDK commit SHA:** e4f3b9c8a8118bfaa9d57578c64868c91cb9b3a4 +- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/greenhouse/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/hubspot/README.md b/docs/ai-agents/connectors/hubspot/README.md index 8fbda44404e5..4d9e7c302f9e 100644 --- a/docs/ai-agents/connectors/hubspot/README.md +++ b/docs/ai-agents/connectors/hubspot/README.md @@ -113,7 +113,7 @@ See the official [Hubspot API reference](https://developers.hubspot.com/docs/api ## Version information -- **Package version:** 0.15.94 +- **Package version:** 0.15.95 - **Connector version:** 0.1.11 -- **Generated with Connector SDK commit SHA:** e4f3b9c8a8118bfaa9d57578c64868c91cb9b3a4 +- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/hubspot/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/intercom/README.md b/docs/ai-agents/connectors/intercom/README.md index 3ef755546829..ece0da70a3e1 100644 --- a/docs/ai-agents/connectors/intercom/README.md +++ b/docs/ai-agents/connectors/intercom/README.md @@ -114,7 +114,7 @@ See the official [Intercom API reference](https://developers.intercom.com/docs/r ## Version information -- **Package version:** 0.1.63 +- **Package version:** 0.1.64 - **Connector version:** 0.1.7 -- **Generated with Connector SDK commit SHA:** e4f3b9c8a8118bfaa9d57578c64868c91cb9b3a4 +- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/intercom/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/jira/README.md b/docs/ai-agents/connectors/jira/README.md index 91cfbb8dc07a..8656d3487f50 100644 --- a/docs/ai-agents/connectors/jira/README.md +++ b/docs/ai-agents/connectors/jira/README.md @@ -117,7 +117,7 @@ See the official [Jira API reference](https://developer.atlassian.com/cloud/jira ## Version information -- **Package version:** 0.1.84 +- **Package version:** 0.1.85 - **Connector version:** 1.1.5 -- **Generated with Connector SDK commit SHA:** e4f3b9c8a8118bfaa9d57578c64868c91cb9b3a4 +- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/jira/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/klaviyo/README.md b/docs/ai-agents/connectors/klaviyo/README.md index 65e128dc2226..69230f26dc00 100644 --- a/docs/ai-agents/connectors/klaviyo/README.md +++ b/docs/ai-agents/connectors/klaviyo/README.md @@ -115,7 +115,7 @@ See the official [Klaviyo API reference](https://developers.klaviyo.com/en/refer ## Version information -- **Package version:** 0.1.20 +- **Package version:** 0.1.21 - **Connector version:** 1.0.1 -- **Generated with Connector SDK commit SHA:** e4f3b9c8a8118bfaa9d57578c64868c91cb9b3a4 +- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/klaviyo/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/linear/README.md b/docs/ai-agents/connectors/linear/README.md index 2930f14e653d..19bb5f9c4631 100644 --- a/docs/ai-agents/connectors/linear/README.md +++ b/docs/ai-agents/connectors/linear/README.md @@ -117,7 +117,7 @@ See the official [Linear API reference](https://linear.app/developers/graphql). ## Version information -- **Package version:** 0.19.90 +- **Package version:** 0.19.91 - **Connector version:** 0.1.9 -- **Generated with Connector SDK commit SHA:** e4f3b9c8a8118bfaa9d57578c64868c91cb9b3a4 +- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/linear/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/mailchimp/README.md b/docs/ai-agents/connectors/mailchimp/README.md index 0182659ef0d2..3cdce33ee2cb 100644 --- a/docs/ai-agents/connectors/mailchimp/README.md +++ b/docs/ai-agents/connectors/mailchimp/README.md @@ -119,7 +119,7 @@ See the official [Mailchimp API reference](https://mailchimp.com/developer/marke ## Version information -- **Package version:** 0.1.49 +- **Package version:** 0.1.50 - **Connector version:** 1.0.6 -- **Generated with Connector SDK commit SHA:** e4f3b9c8a8118bfaa9d57578c64868c91cb9b3a4 +- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/mailchimp/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/orb/README.md b/docs/ai-agents/connectors/orb/README.md index cf77d82ae4b4..908b7cc261f6 100644 --- a/docs/ai-agents/connectors/orb/README.md +++ b/docs/ai-agents/connectors/orb/README.md @@ -116,7 +116,7 @@ See the official [Orb API reference](https://docs.withorb.com/api-reference). ## Version information -- **Package version:** 0.1.23 +- **Package version:** 0.1.24 - **Connector version:** 0.1.3 -- **Generated with Connector SDK commit SHA:** e4f3b9c8a8118bfaa9d57578c64868c91cb9b3a4 +- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/orb/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/salesforce/README.md b/docs/ai-agents/connectors/salesforce/README.md index bd510a4ce065..565d40bdb72f 100644 --- a/docs/ai-agents/connectors/salesforce/README.md +++ b/docs/ai-agents/connectors/salesforce/README.md @@ -120,7 +120,7 @@ See the official [Salesforce API reference](https://developer.salesforce.com/doc ## Version information -- **Package version:** 0.1.86 +- **Package version:** 0.1.87 - **Connector version:** 1.0.12 -- **Generated with Connector SDK commit SHA:** e4f3b9c8a8118bfaa9d57578c64868c91cb9b3a4 +- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/salesforce/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/shopify/README.md b/docs/ai-agents/connectors/shopify/README.md index 5a599ac281d8..4574d4e77eb7 100644 --- a/docs/ai-agents/connectors/shopify/README.md +++ b/docs/ai-agents/connectors/shopify/README.md @@ -139,7 +139,7 @@ See the official [Shopify API reference](https://shopify.dev/docs/api/admin-rest ## Version information -- **Package version:** 0.1.43 +- **Package version:** 0.1.44 - **Connector version:** 0.1.6 -- **Generated with Connector SDK commit SHA:** e4f3b9c8a8118bfaa9d57578c64868c91cb9b3a4 +- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/shopify/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/slack/README.md b/docs/ai-agents/connectors/slack/README.md index 1035e639b0a8..14f821bd92c3 100644 --- a/docs/ai-agents/connectors/slack/README.md +++ b/docs/ai-agents/connectors/slack/README.md @@ -128,7 +128,7 @@ See the official [Slack API reference](https://api.slack.com/methods). ## Version information -- **Package version:** 0.1.57 +- **Package version:** 0.1.58 - **Connector version:** 0.1.14 -- **Generated with Connector SDK commit SHA:** e4f3b9c8a8118bfaa9d57578c64868c91cb9b3a4 +- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/slack/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/stripe/README.md b/docs/ai-agents/connectors/stripe/README.md index 7a1415cfceb8..807e1f86b607 100644 --- a/docs/ai-agents/connectors/stripe/README.md +++ b/docs/ai-agents/connectors/stripe/README.md @@ -120,7 +120,7 @@ See the official [Stripe API reference](https://docs.stripe.com/api). ## Version information -- **Package version:** 0.5.91 +- **Package version:** 0.5.92 - **Connector version:** 0.1.8 -- **Generated with Connector SDK commit SHA:** e4f3b9c8a8118bfaa9d57578c64868c91cb9b3a4 +- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/stripe/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/zendesk-chat/README.md b/docs/ai-agents/connectors/zendesk-chat/README.md index 94754a37db3c..344bf8582344 100644 --- a/docs/ai-agents/connectors/zendesk-chat/README.md +++ b/docs/ai-agents/connectors/zendesk-chat/README.md @@ -131,7 +131,7 @@ See the official [Zendesk-Chat API reference](https://developer.zendesk.com/api- ## Version information -- **Package version:** 0.1.43 +- **Package version:** 0.1.44 - **Connector version:** 0.1.7 -- **Generated with Connector SDK commit SHA:** e4f3b9c8a8118bfaa9d57578c64868c91cb9b3a4 +- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/zendesk-chat/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/zendesk-support/README.md b/docs/ai-agents/connectors/zendesk-support/README.md index 5821107fb2dd..e92d60d41c8e 100644 --- a/docs/ai-agents/connectors/zendesk-support/README.md +++ b/docs/ai-agents/connectors/zendesk-support/README.md @@ -126,7 +126,7 @@ See the official [Zendesk-Support API reference](https://developer.zendesk.com/a ## Version information -- **Package version:** 0.18.94 +- **Package version:** 0.18.95 - **Connector version:** 0.1.12 -- **Generated with Connector SDK commit SHA:** e4f3b9c8a8118bfaa9d57578c64868c91cb9b3a4 +- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/zendesk-support/CHANGELOG.md) \ No newline at end of file From 849354f0c23d2dccfb1f39c0eb0b408019e4810d Mon Sep 17 00:00:00 2001 From: "octavia-bot[bot]" <108746235+octavia-bot[bot]@users.noreply.github.com> Date: Fri, 6 Feb 2026 18:21:05 +0000 Subject: [PATCH 02/16] docs: sync agent connector docs from airbyte-agent-connectors repo (#72933) Co-authored-by: ian-at-airbyte <187576150+ian-at-airbyte@users.noreply.github.com> --- docs/ai-agents/connectors/airtable/README.md | 6 +++--- docs/ai-agents/connectors/amazon-ads/README.md | 6 +++--- docs/ai-agents/connectors/asana/README.md | 6 +++--- docs/ai-agents/connectors/facebook-marketing/README.md | 6 +++--- docs/ai-agents/connectors/github/README.md | 6 +++--- docs/ai-agents/connectors/gong/README.md | 6 +++--- docs/ai-agents/connectors/google-drive/README.md | 6 +++--- docs/ai-agents/connectors/greenhouse/README.md | 6 +++--- docs/ai-agents/connectors/hubspot/README.md | 6 +++--- docs/ai-agents/connectors/intercom/README.md | 6 +++--- docs/ai-agents/connectors/jira/README.md | 6 +++--- docs/ai-agents/connectors/klaviyo/README.md | 6 +++--- docs/ai-agents/connectors/linear/README.md | 6 +++--- docs/ai-agents/connectors/mailchimp/README.md | 6 +++--- docs/ai-agents/connectors/orb/README.md | 6 +++--- docs/ai-agents/connectors/salesforce/README.md | 6 +++--- docs/ai-agents/connectors/shopify/README.md | 6 +++--- docs/ai-agents/connectors/slack/README.md | 6 +++--- docs/ai-agents/connectors/stripe/README.md | 6 +++--- docs/ai-agents/connectors/zendesk-chat/README.md | 6 +++--- docs/ai-agents/connectors/zendesk-support/README.md | 6 +++--- 21 files changed, 63 insertions(+), 63 deletions(-) diff --git a/docs/ai-agents/connectors/airtable/README.md b/docs/ai-agents/connectors/airtable/README.md index 30ed7c2de829..f6c36055c4be 100644 --- a/docs/ai-agents/connectors/airtable/README.md +++ b/docs/ai-agents/connectors/airtable/README.md @@ -105,7 +105,7 @@ See the official [Airtable API reference](https://airtable.com/developers/web/ap ## Version information -- **Package version:** 0.1.22 -- **Connector version:** 1.0.3 -- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 +- **Package version:** 0.1.23 +- **Connector version:** 1.0.4 +- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/airtable/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/amazon-ads/README.md b/docs/ai-agents/connectors/amazon-ads/README.md index fcbeb2ff8644..d70d6bfaa2f8 100644 --- a/docs/ai-agents/connectors/amazon-ads/README.md +++ b/docs/ai-agents/connectors/amazon-ads/README.md @@ -104,7 +104,7 @@ See the official [Amazon-Ads API reference](https://advertising.amazon.com/API/d ## Version information -- **Package version:** 0.1.43 -- **Connector version:** 1.0.7 -- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 +- **Package version:** 0.1.44 +- **Connector version:** 1.0.8 +- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/amazon-ads/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/asana/README.md b/docs/ai-agents/connectors/asana/README.md index 737bfb857130..6935fe91fcb6 100644 --- a/docs/ai-agents/connectors/asana/README.md +++ b/docs/ai-agents/connectors/asana/README.md @@ -126,7 +126,7 @@ See the official [Asana API reference](https://developers.asana.com/reference/re ## Version information -- **Package version:** 0.19.95 -- **Connector version:** 0.1.12 -- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 +- **Package version:** 0.19.96 +- **Connector version:** 0.1.13 +- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/asana/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/facebook-marketing/README.md b/docs/ai-agents/connectors/facebook-marketing/README.md index b62a195b24c2..cd3bc06a180a 100644 --- a/docs/ai-agents/connectors/facebook-marketing/README.md +++ b/docs/ai-agents/connectors/facebook-marketing/README.md @@ -114,7 +114,7 @@ See the official [Facebook-Marketing API reference](https://developers.facebook. ## Version information -- **Package version:** 0.1.28 -- **Connector version:** 1.0.14 -- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 +- **Package version:** 0.1.29 +- **Connector version:** 1.0.15 +- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/facebook-marketing/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/github/README.md b/docs/ai-agents/connectors/github/README.md index 1e79ce488b44..ddc9a3f2500c 100644 --- a/docs/ai-agents/connectors/github/README.md +++ b/docs/ai-agents/connectors/github/README.md @@ -125,7 +125,7 @@ See the official [Github API reference](https://docs.github.com/en/rest). ## Version information -- **Package version:** 0.18.95 -- **Connector version:** 0.1.11 -- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 +- **Package version:** 0.18.96 +- **Connector version:** 0.1.12 +- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/github/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/gong/README.md b/docs/ai-agents/connectors/gong/README.md index 1fb964804133..7bb6ea27d756 100644 --- a/docs/ai-agents/connectors/gong/README.md +++ b/docs/ai-agents/connectors/gong/README.md @@ -123,7 +123,7 @@ See the official [Gong API reference](https://gong.app.gong.io/settings/api/docu ## Version information -- **Package version:** 0.19.100 -- **Connector version:** 0.1.17 -- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 +- **Package version:** 0.19.101 +- **Connector version:** 0.1.18 +- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/gong/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/google-drive/README.md b/docs/ai-agents/connectors/google-drive/README.md index ef962c4b5e58..4b380b6771ae 100644 --- a/docs/ai-agents/connectors/google-drive/README.md +++ b/docs/ai-agents/connectors/google-drive/README.md @@ -124,7 +124,7 @@ See the official [Google-Drive API reference](https://developers.google.com/work ## Version information -- **Package version:** 0.1.63 -- **Connector version:** 0.1.6 -- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 +- **Package version:** 0.1.64 +- **Connector version:** 0.1.7 +- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/google-drive/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/greenhouse/README.md b/docs/ai-agents/connectors/greenhouse/README.md index dd7440113abd..0f240de74de2 100644 --- a/docs/ai-agents/connectors/greenhouse/README.md +++ b/docs/ai-agents/connectors/greenhouse/README.md @@ -118,7 +118,7 @@ See the official [Greenhouse API reference](https://developers.greenhouse.io/har ## Version information -- **Package version:** 0.17.89 -- **Connector version:** 0.1.5 -- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 +- **Package version:** 0.17.90 +- **Connector version:** 0.1.6 +- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/greenhouse/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/hubspot/README.md b/docs/ai-agents/connectors/hubspot/README.md index 4d9e7c302f9e..5bc224523dfc 100644 --- a/docs/ai-agents/connectors/hubspot/README.md +++ b/docs/ai-agents/connectors/hubspot/README.md @@ -113,7 +113,7 @@ See the official [Hubspot API reference](https://developers.hubspot.com/docs/api ## Version information -- **Package version:** 0.15.95 -- **Connector version:** 0.1.11 -- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 +- **Package version:** 0.15.96 +- **Connector version:** 0.1.12 +- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/hubspot/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/intercom/README.md b/docs/ai-agents/connectors/intercom/README.md index ece0da70a3e1..f563ae27013c 100644 --- a/docs/ai-agents/connectors/intercom/README.md +++ b/docs/ai-agents/connectors/intercom/README.md @@ -114,7 +114,7 @@ See the official [Intercom API reference](https://developers.intercom.com/docs/r ## Version information -- **Package version:** 0.1.64 -- **Connector version:** 0.1.7 -- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 +- **Package version:** 0.1.65 +- **Connector version:** 0.1.8 +- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/intercom/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/jira/README.md b/docs/ai-agents/connectors/jira/README.md index 8656d3487f50..a3919bf7b809 100644 --- a/docs/ai-agents/connectors/jira/README.md +++ b/docs/ai-agents/connectors/jira/README.md @@ -117,7 +117,7 @@ See the official [Jira API reference](https://developer.atlassian.com/cloud/jira ## Version information -- **Package version:** 0.1.85 -- **Connector version:** 1.1.5 -- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 +- **Package version:** 0.1.86 +- **Connector version:** 1.1.6 +- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/jira/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/klaviyo/README.md b/docs/ai-agents/connectors/klaviyo/README.md index 69230f26dc00..0f045eaff619 100644 --- a/docs/ai-agents/connectors/klaviyo/README.md +++ b/docs/ai-agents/connectors/klaviyo/README.md @@ -115,7 +115,7 @@ See the official [Klaviyo API reference](https://developers.klaviyo.com/en/refer ## Version information -- **Package version:** 0.1.21 -- **Connector version:** 1.0.1 -- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 +- **Package version:** 0.1.22 +- **Connector version:** 1.0.2 +- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/klaviyo/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/linear/README.md b/docs/ai-agents/connectors/linear/README.md index 19bb5f9c4631..b93deb50b351 100644 --- a/docs/ai-agents/connectors/linear/README.md +++ b/docs/ai-agents/connectors/linear/README.md @@ -117,7 +117,7 @@ See the official [Linear API reference](https://linear.app/developers/graphql). ## Version information -- **Package version:** 0.19.91 -- **Connector version:** 0.1.9 -- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 +- **Package version:** 0.19.92 +- **Connector version:** 0.1.10 +- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/linear/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/mailchimp/README.md b/docs/ai-agents/connectors/mailchimp/README.md index 3cdce33ee2cb..564c8cabcb95 100644 --- a/docs/ai-agents/connectors/mailchimp/README.md +++ b/docs/ai-agents/connectors/mailchimp/README.md @@ -119,7 +119,7 @@ See the official [Mailchimp API reference](https://mailchimp.com/developer/marke ## Version information -- **Package version:** 0.1.50 -- **Connector version:** 1.0.6 -- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 +- **Package version:** 0.1.51 +- **Connector version:** 1.0.7 +- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/mailchimp/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/orb/README.md b/docs/ai-agents/connectors/orb/README.md index 908b7cc261f6..3c37959deaec 100644 --- a/docs/ai-agents/connectors/orb/README.md +++ b/docs/ai-agents/connectors/orb/README.md @@ -116,7 +116,7 @@ See the official [Orb API reference](https://docs.withorb.com/api-reference). ## Version information -- **Package version:** 0.1.24 -- **Connector version:** 0.1.3 -- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 +- **Package version:** 0.1.25 +- **Connector version:** 0.1.4 +- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/orb/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/salesforce/README.md b/docs/ai-agents/connectors/salesforce/README.md index 565d40bdb72f..58a9a022ba8e 100644 --- a/docs/ai-agents/connectors/salesforce/README.md +++ b/docs/ai-agents/connectors/salesforce/README.md @@ -120,7 +120,7 @@ See the official [Salesforce API reference](https://developer.salesforce.com/doc ## Version information -- **Package version:** 0.1.87 -- **Connector version:** 1.0.12 -- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 +- **Package version:** 0.1.88 +- **Connector version:** 1.0.13 +- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/salesforce/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/shopify/README.md b/docs/ai-agents/connectors/shopify/README.md index 4574d4e77eb7..645a9d7580d3 100644 --- a/docs/ai-agents/connectors/shopify/README.md +++ b/docs/ai-agents/connectors/shopify/README.md @@ -139,7 +139,7 @@ See the official [Shopify API reference](https://shopify.dev/docs/api/admin-rest ## Version information -- **Package version:** 0.1.44 -- **Connector version:** 0.1.6 -- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 +- **Package version:** 0.1.45 +- **Connector version:** 0.1.7 +- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/shopify/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/slack/README.md b/docs/ai-agents/connectors/slack/README.md index 14f821bd92c3..03f0a0234adf 100644 --- a/docs/ai-agents/connectors/slack/README.md +++ b/docs/ai-agents/connectors/slack/README.md @@ -128,7 +128,7 @@ See the official [Slack API reference](https://api.slack.com/methods). ## Version information -- **Package version:** 0.1.58 -- **Connector version:** 0.1.14 -- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 +- **Package version:** 0.1.59 +- **Connector version:** 0.1.15 +- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/slack/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/stripe/README.md b/docs/ai-agents/connectors/stripe/README.md index 807e1f86b607..676999e3e0a0 100644 --- a/docs/ai-agents/connectors/stripe/README.md +++ b/docs/ai-agents/connectors/stripe/README.md @@ -120,7 +120,7 @@ See the official [Stripe API reference](https://docs.stripe.com/api). ## Version information -- **Package version:** 0.5.92 -- **Connector version:** 0.1.8 -- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 +- **Package version:** 0.5.93 +- **Connector version:** 0.1.9 +- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/stripe/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/zendesk-chat/README.md b/docs/ai-agents/connectors/zendesk-chat/README.md index 344bf8582344..03b926b00f24 100644 --- a/docs/ai-agents/connectors/zendesk-chat/README.md +++ b/docs/ai-agents/connectors/zendesk-chat/README.md @@ -131,7 +131,7 @@ See the official [Zendesk-Chat API reference](https://developer.zendesk.com/api- ## Version information -- **Package version:** 0.1.44 -- **Connector version:** 0.1.7 -- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 +- **Package version:** 0.1.45 +- **Connector version:** 0.1.8 +- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/zendesk-chat/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/zendesk-support/README.md b/docs/ai-agents/connectors/zendesk-support/README.md index e92d60d41c8e..f3d47880e7ec 100644 --- a/docs/ai-agents/connectors/zendesk-support/README.md +++ b/docs/ai-agents/connectors/zendesk-support/README.md @@ -126,7 +126,7 @@ See the official [Zendesk-Support API reference](https://developer.zendesk.com/a ## Version information -- **Package version:** 0.18.95 -- **Connector version:** 0.1.12 -- **Generated with Connector SDK commit SHA:** 883f64f29a8a65efcb5a7b62bf9fee14e94f4812 +- **Package version:** 0.18.96 +- **Connector version:** 0.1.13 +- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/zendesk-support/CHANGELOG.md) \ No newline at end of file From b8e2434f81ee25d152de3a723a6be96804bafa60 Mon Sep 17 00:00:00 2001 From: "devin-ai-integration[bot]" <158243242+devin-ai-integration[bot]@users.noreply.github.com> Date: Fri, 6 Feb 2026 11:49:28 -0800 Subject: [PATCH 03/16] docs(source-bing-ads): fix broken URL, grammar, and spelling in documentation (#72904) Co-authored-by: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com> --- docs/integrations/sources/bing-ads.md | 40 +++++++++++++-------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/docs/integrations/sources/bing-ads.md b/docs/integrations/sources/bing-ads.md index 7b3814c5b9d2..ddaff90ffbc1 100644 --- a/docs/integrations/sources/bing-ads.md +++ b/docs/integrations/sources/bing-ads.md @@ -18,7 +18,7 @@ This page contains the setup guide and reference information for the [Bing Ads]( For Airbyte Open Source set up your application to get **Client ID**, **Client Secret**, **Refresh Token** 1. [Register your application](https://docs.microsoft.com/en-us/advertising/guides/authentication-oauth-register?view=bingads-13) in the Azure portal. -2. [Request user consent](https://docs.microsoft.com/en-us/advertising/guides/authentication-oauth-consent?view=bingads-13l) to get the authorization code. +2. [Request user consent](https://docs.microsoft.com/en-us/advertising/guides/authentication-oauth-consent?view=bingads-13) to get the authorization code. 3. Use the authorization code to [get a refresh token](https://docs.microsoft.com/en-us/advertising/guides/authentication-oauth-get-tokens?view=bingads-13). :::note @@ -57,14 +57,14 @@ The tenant is used in the authentication URL, for example: `https://login.micros 4. Enter a name for the Bing Ads connector. 5. For **Tenant ID**, enter the custom tenant or use the common tenant. 6. Add the developer token from [Step 1](#step-1-set-up-bing-ads). -7. For **Account Names Predicates** - see [predicates](https://learn.microsoft.com/en-us/advertising/customer-management-service/predicate?view=bingads-13) in bing ads docs. Will be used to filter your accounts by specified operator and account name. You can use multiple predicates pairs. The **Operator** is a one of Contains or Equals. The **Account Name** is a value to compare Accounts Name field in rows by specified operator. For example, for operator=Contains and name=Dev, all accounts where name contains dev will be replicated. And for operator=Equals and name=Airbyte, all accounts where name is equal to Airbyte will be replicated. Account Name value is not case-sensitive. +7. For **Account Names Predicates**, see [predicates](https://learn.microsoft.com/en-us/advertising/customer-management-service/predicate?view=bingads-13) in the Bing Ads docs. This filters your accounts by a specified operator and account name. You can use multiple predicate pairs. The **Operator** is one of Contains or Equals. The **Account Name** is a value to compare the Account Name field in rows by the specified operator. For example, for operator=Contains and name=Dev, all accounts where the name contains "Dev" will be replicated. For operator=Equals and name=Airbyte, all accounts where the name equals "Airbyte" will be replicated. Account Name values are not case-sensitive. 8. For **Reports Replication Start Date**, enter the date in YYYY-MM-DD format. The data added on and after this date will be replicated. If this field is blank, Airbyte will replicate all data from previous and current calendar years. -9. For **Lookback window** (also known as attribution or conversion window) enter the number of **days** to look into the past. If your conversion window has an hours/minutes granularity, round it up to the number of days exceeding. If you're not using performance report streams in incremental mode and Reports Start Date is not provided, let it with 0 default value. -10. For _Custom Reports_ - see [custom reports](#custom-reports) section, list of custom reports object: -11. For _Report Name_ enter the name that you want for your custom report. -12. For _Reporting Data Object_ add the Bing Ads Reporting Object that you want to sync in the custom report. -13. For _Columns_ add list columns of Reporting Data Object that you want to see in the custom report. -14. For _Aggregation_ add time aggregation. See [report aggregation](#report-aggregation) section. +9. For **Lookback window** (also known as attribution or conversion window) enter the number of **days** to look into the past. If your conversion window has an hours/minutes granularity, round it up to the number of days exceeding. If you're not using performance report streams in incremental mode and Reports Start Date is not provided, leave it at the default value of 0. +10. For _Custom Reports_, see the [custom reports](#custom-reports) section. +11. For _Report Name_, enter the name that you want for your custom report. +12. For _Reporting Data Object_, add the Bing Ads Reporting Object that you want to sync in the custom report. +13. For _Columns_, add the columns of the Reporting Data Object that you want to see in the custom report. +14. For _Aggregation_, add the time aggregation. See the [report aggregation](#report-aggregation) section. 15. (Optional) For _Disable Custom Report Names Camel to Snake Case Conversion_, enable this option if you want to use the exact report name without automatic conversion from camelCase to snake_case. See [custom report name conversion](#custom-report-name-conversion) for details. 16. Click **Authenticate your Bing Ads account**. 17. Log in and authorize the Bing Ads account. @@ -81,14 +81,14 @@ The tenant is used in the authentication URL, for example: `https://login.micros 4. Enter a name for the Bing Ads connector. 5. For **Tenant ID**, enter the custom tenant or use the common tenant. 6. Enter the **Client ID**, **Client Secret**, **Refresh Token**, and **Developer Token** from [Step 1](#step-1-set-up-bing-ads). -7. For **Account Names Predicates** - see [predicates](https://learn.microsoft.com/en-us/advertising/customer-management-service/predicate?view=bingads-13) in bing ads docs. Will be used to filter your accounts by specified operator and account name. You can use multiple predicates pairs. The **Operator** is a one of Contains or Equals. The **Account Name** is a value to compare Accounts Name field in rows by specified operator. For example, for operator=Contains and name=Dev, all accounts where name contains dev will be replicated. And for operator=Equals and name=Airbyte, all accounts where name is equal to Airbyte will be replicated. Account Name value is not case-sensitive. +7. For **Account Names Predicates**, see [predicates](https://learn.microsoft.com/en-us/advertising/customer-management-service/predicate?view=bingads-13) in the Bing Ads docs. This filters your accounts by a specified operator and account name. You can use multiple predicate pairs. The **Operator** is one of Contains or Equals. The **Account Name** is a value to compare the Account Name field in rows by the specified operator. For example, for operator=Contains and name=Dev, all accounts where the name contains "Dev" will be replicated. For operator=Equals and name=Airbyte, all accounts where the name equals "Airbyte" will be replicated. Account Name values are not case-sensitive. 8. For **Reports Replication Start Date**, enter the date in YYYY-MM-DD format. The data added on and after this date will be replicated. If this field is blank, Airbyte will replicate all data from previous and current calendar years. -9. For **Lookback window** (also known as attribution or conversion window) enter the number of **days** to look into the past. If your conversion window has an hours/minutes granularity, round it up to the number of days exceeding. If you're not using performance report streams in incremental mode and Reports Start Date is not provided, let it with 0 default value. -10. For _Custom Reports_ - see [custom reports](#custom-reports) section: -11. For _Report Name_ enter the name that you want for your custom report. -12. For _Reporting Data Object_ add the Bing Ads Reporting Object that you want to sync in the custom report. -13. For _Columns_ add columns of Reporting Data Object that you want to see in the custom report. -14. For _Aggregation_ select time aggregation. See [report aggregation](#report-aggregation) section. +9. For **Lookback window** (also known as attribution or conversion window) enter the number of **days** to look into the past. If your conversion window has an hours/minutes granularity, round it up to the number of days exceeding. If you're not using performance report streams in incremental mode and Reports Start Date is not provided, leave it at the default value of 0. +10. For _Custom Reports_, see the [custom reports](#custom-reports) section. +11. For _Report Name_, enter the name that you want for your custom report. +12. For _Reporting Data Object_, add the Bing Ads Reporting Object that you want to sync in the custom report. +13. For _Columns_, add the columns of the Reporting Data Object that you want to see in the custom report. +14. For _Aggregation_, select the time aggregation. See the [report aggregation](#report-aggregation) section. 15. (Optional) For _Disable Custom Report Names Camel to Snake Case Conversion_, enable this option if you want to use the exact report name without automatic conversion from camelCase to snake_case. See [custom report name conversion](#custom-report-name-conversion) for details. 16. Click **Set up source**. @@ -128,10 +128,10 @@ The Bing Ads source connector supports the following streams. For more informati :::note -Be careful with removing fields that you don't want to sync in the Replication Stream Settings. -Report will be generated by request with all fields in the Stream Schema. Removing fields from in the setting does not affect actual request for the report. -The results of such a report can be not accurate due to not visible values in removed fields. -If you faced this issue please use custom report, where you can define only that fields that you want to see in the report, and no other fields will be used in the request. +Be careful when removing fields that you don't want to sync in the replication stream settings. +Reports are generated with all fields in the stream schema. Removing fields in the replication settings does not change the actual API request for the report. +The results of such a report may be inaccurate due to invisible values in the removed fields. +If you encounter this issue, use a custom report where you can define only the fields that you want to see in the report. No other fields will be included in the request. ::: - [Account Performance Report Hourly](https://docs.microsoft.com/en-us/advertising/reporting-service/accountperformancereportrequest?view=bingads-13) @@ -308,7 +308,7 @@ The Bing Ads API limits the number of requests for all Microsoft Advertising cli ### Troubleshooting - Check out common troubleshooting issues for the Bing Ads source connector on our [Airbyte Forum](https://github.com/airbytehq/airbyte/discussions). -- Bulk streams (Ad Group Labels, App Install Ads, App Install Ad Labels, Campaign Labels, Keywords, Keyword Labels, Labels, Budgets) will ignore (fallback: none) the `LastSyncTimeInUTC` for dates > 30 days ago, which will trigger a full download request. +- Bulk streams (Ad Group Labels, App Install Ads, App Install Ad Labels, Campaign Labels, Keywords, Keyword Labels, Labels, Budget) ignore the `LastSyncTimeInUTC` for dates more than 30 days ago, which triggers a full download request. From f5bafe19384160fb3f43435eef924a8587268ff4 Mon Sep 17 00:00:00 2001 From: Ryan Waskewich <156025126+rwask@users.noreply.github.com> Date: Fri, 6 Feb 2026 16:03:45 -0500 Subject: [PATCH 04/16] fix(docs): add Definition ID to enterprise connector docs header (#72926) --- .../remark/enterpriseDocsHeaderInformation.js | 34 ++++++++++++++----- 1 file changed, 25 insertions(+), 9 deletions(-) diff --git a/docusaurus/src/remark/enterpriseDocsHeaderInformation.js b/docusaurus/src/remark/enterpriseDocsHeaderInformation.js index 819021805e1a..615ebfd3a976 100644 --- a/docusaurus/src/remark/enterpriseDocsHeaderInformation.js +++ b/docusaurus/src/remark/enterpriseDocsHeaderInformation.js @@ -3,9 +3,15 @@ const { toAttributes } = require("../helpers/objects"); const visit = require("unist-util-visit").visit; const { fetchRegistry } = require("../scripts/fetch-registry"); -const getEnterpriseConnectorVersion = async (dockerRepository) => { +const FALLBACK_VERSION = "No version information available"; +const FALLBACK_DEFINITION_ID = "No definition ID available."; + +const getEnterpriseConnectorRegistryInfo = async (dockerRepository) => { if (!dockerRepository) { - return "No version information available"; + return { + version: FALLBACK_VERSION, + definitionId: FALLBACK_DEFINITION_ID, + }; } try { const registry = await fetchRegistry(); @@ -16,16 +22,24 @@ const getEnterpriseConnectorVersion = async (dockerRepository) => { r.dockerRepository_cloud === dockerRepository, ); if (!registryEntry) { - return "No version information available"; + return { + version: FALLBACK_VERSION, + definitionId: FALLBACK_DEFINITION_ID, + }; } - return ( - registryEntry.dockerImageTag_oss || registryEntry.dockerImageTag_cloud - ); + return { + version: + registryEntry.dockerImageTag_oss || registryEntry.dockerImageTag_cloud, + definitionId: registryEntry.definitionId || FALLBACK_DEFINITION_ID, + }; } catch (error) { console.warn(`[Enterprise Connector Debug] Error fetching version:`, error); } - return "No version information available"; + return { + version: FALLBACK_VERSION, + definitionId: FALLBACK_DEFINITION_ID, + }; }; const plugin = () => { @@ -33,7 +47,7 @@ const plugin = () => { const isDocsPage = isEnterpriseConnectorDocsPage(vfile); if (!isDocsPage) return; - const version = await getEnterpriseConnectorVersion( + const { version, definitionId } = await getEnterpriseConnectorRegistryInfo( vfile.data.frontMatter.dockerRepository, ); @@ -52,7 +66,9 @@ const plugin = () => { dockerImageTag: version, github_url: undefined, originalTitle, - "enterprise-connector": vfile.data.frontMatter["enterprise-connector"] || true, + "enterprise-connector": + vfile.data.frontMatter["enterprise-connector"] || true, + definitionId, }; firstHeading = false; From 329190523718aaa1f9e10bb141f72390f1ed6efa Mon Sep 17 00:00:00 2001 From: "octavia-bot[bot]" <108746235+octavia-bot[bot]@users.noreply.github.com> Date: Fri, 6 Feb 2026 22:07:46 +0000 Subject: [PATCH 05/16] docs: sync agent connector docs from airbyte-agent-connectors repo (#72934) Co-authored-by: ian-at-airbyte <187576150+ian-at-airbyte@users.noreply.github.com> --- docs/ai-agents/connectors/airtable/README.md | 8 +++--- .../ai-agents/connectors/amazon-ads/README.md | 6 ++-- docs/ai-agents/connectors/asana/README.md | 20 ++++++------- .../connectors/facebook-marketing/README.md | 24 ++++++++-------- docs/ai-agents/connectors/github/README.md | 4 +-- docs/ai-agents/connectors/gong/README.md | 14 +++++----- .../connectors/google-drive/README.md | 4 +-- .../ai-agents/connectors/greenhouse/README.md | 22 +++++++-------- docs/ai-agents/connectors/hubspot/README.md | 10 +++---- docs/ai-agents/connectors/intercom/README.md | 12 ++++---- docs/ai-agents/connectors/jira/README.md | 16 +++++------ docs/ai-agents/connectors/klaviyo/README.md | 18 ++++++------ docs/ai-agents/connectors/linear/README.md | 14 +++++----- docs/ai-agents/connectors/mailchimp/README.md | 12 ++++---- docs/ai-agents/connectors/orb/README.md | 12 ++++---- .../ai-agents/connectors/salesforce/README.md | 14 +++++----- docs/ai-agents/connectors/shopify/README.md | 4 +-- docs/ai-agents/connectors/slack/README.md | 8 +++--- docs/ai-agents/connectors/stripe/README.md | 14 +++++----- .../connectors/zendesk-chat/README.md | 14 +++++----- .../connectors/zendesk-support/README.md | 28 +++++++++---------- 21 files changed, 139 insertions(+), 139 deletions(-) diff --git a/docs/ai-agents/connectors/airtable/README.md b/docs/ai-agents/connectors/airtable/README.md index f6c36055c4be..76c090dad27b 100644 --- a/docs/ai-agents/connectors/airtable/README.md +++ b/docs/ai-agents/connectors/airtable/README.md @@ -90,8 +90,8 @@ This connector supports the following entities and actions. For more details, se | Entity | Actions | |--------|---------| -| Bases | [List](./REFERENCE.md#bases-list) | -| Tables | [List](./REFERENCE.md#tables-list) | +| Bases | [List](./REFERENCE.md#bases-list), [Search](./REFERENCE.md#bases-search) | +| Tables | [List](./REFERENCE.md#tables-list), [Search](./REFERENCE.md#tables-search) | | Records | [List](./REFERENCE.md#records-list), [Get](./REFERENCE.md#records-get) | @@ -105,7 +105,7 @@ See the official [Airtable API reference](https://airtable.com/developers/web/ap ## Version information -- **Package version:** 0.1.23 +- **Package version:** 0.1.24 - **Connector version:** 1.0.4 -- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 +- **Generated with Connector SDK commit SHA:** df1e8094b5b2d94e172536ce7f33fb98f2c3fdc1 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/airtable/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/amazon-ads/README.md b/docs/ai-agents/connectors/amazon-ads/README.md index d70d6bfaa2f8..60edd83f9ac0 100644 --- a/docs/ai-agents/connectors/amazon-ads/README.md +++ b/docs/ai-agents/connectors/amazon-ads/README.md @@ -89,7 +89,7 @@ This connector supports the following entities and actions. For more details, se | Entity | Actions | |--------|---------| -| Profiles | [List](./REFERENCE.md#profiles-list), [Get](./REFERENCE.md#profiles-get) | +| Profiles | [List](./REFERENCE.md#profiles-list), [Get](./REFERENCE.md#profiles-get), [Search](./REFERENCE.md#profiles-search) | | Portfolios | [List](./REFERENCE.md#portfolios-list), [Get](./REFERENCE.md#portfolios-get) | | Sponsored Product Campaigns | [List](./REFERENCE.md#sponsored-product-campaigns-list), [Get](./REFERENCE.md#sponsored-product-campaigns-get) | @@ -104,7 +104,7 @@ See the official [Amazon-Ads API reference](https://advertising.amazon.com/API/d ## Version information -- **Package version:** 0.1.44 +- **Package version:** 0.1.45 - **Connector version:** 1.0.8 -- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 +- **Generated with Connector SDK commit SHA:** df1e8094b5b2d94e172536ce7f33fb98f2c3fdc1 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/amazon-ads/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/asana/README.md b/docs/ai-agents/connectors/asana/README.md index 6935fe91fcb6..20ee833809b8 100644 --- a/docs/ai-agents/connectors/asana/README.md +++ b/docs/ai-agents/connectors/asana/README.md @@ -92,25 +92,25 @@ This connector supports the following entities and actions. For more details, se | Entity | Actions | |--------|---------| -| Tasks | [List](./REFERENCE.md#tasks-list), [Get](./REFERENCE.md#tasks-get) | +| Tasks | [List](./REFERENCE.md#tasks-list), [Get](./REFERENCE.md#tasks-get), [Search](./REFERENCE.md#tasks-search) | | Project Tasks | [List](./REFERENCE.md#project-tasks-list) | | Workspace Task Search | [List](./REFERENCE.md#workspace-task-search-list) | -| Projects | [List](./REFERENCE.md#projects-list), [Get](./REFERENCE.md#projects-get) | +| Projects | [List](./REFERENCE.md#projects-list), [Get](./REFERENCE.md#projects-get), [Search](./REFERENCE.md#projects-search) | | Task Projects | [List](./REFERENCE.md#task-projects-list) | | Team Projects | [List](./REFERENCE.md#team-projects-list) | | Workspace Projects | [List](./REFERENCE.md#workspace-projects-list) | -| Workspaces | [List](./REFERENCE.md#workspaces-list), [Get](./REFERENCE.md#workspaces-get) | -| Users | [List](./REFERENCE.md#users-list), [Get](./REFERENCE.md#users-get) | +| Workspaces | [List](./REFERENCE.md#workspaces-list), [Get](./REFERENCE.md#workspaces-get), [Search](./REFERENCE.md#workspaces-search) | +| Users | [List](./REFERENCE.md#users-list), [Get](./REFERENCE.md#users-get), [Search](./REFERENCE.md#users-search) | | Workspace Users | [List](./REFERENCE.md#workspace-users-list) | | Team Users | [List](./REFERENCE.md#team-users-list) | -| Teams | [Get](./REFERENCE.md#teams-get) | +| Teams | [Get](./REFERENCE.md#teams-get), [Search](./REFERENCE.md#teams-search) | | Workspace Teams | [List](./REFERENCE.md#workspace-teams-list) | | User Teams | [List](./REFERENCE.md#user-teams-list) | -| Attachments | [List](./REFERENCE.md#attachments-list), [Get](./REFERENCE.md#attachments-get), [Download](./REFERENCE.md#attachments-download) | +| Attachments | [List](./REFERENCE.md#attachments-list), [Get](./REFERENCE.md#attachments-get), [Download](./REFERENCE.md#attachments-download), [Search](./REFERENCE.md#attachments-search) | | Workspace Tags | [List](./REFERENCE.md#workspace-tags-list) | -| Tags | [Get](./REFERENCE.md#tags-get) | +| Tags | [Get](./REFERENCE.md#tags-get), [Search](./REFERENCE.md#tags-search) | | Project Sections | [List](./REFERENCE.md#project-sections-list) | -| Sections | [Get](./REFERENCE.md#sections-get) | +| Sections | [Get](./REFERENCE.md#sections-get), [Search](./REFERENCE.md#sections-search) | | Task Subtasks | [List](./REFERENCE.md#task-subtasks-list) | | Task Dependencies | [List](./REFERENCE.md#task-dependencies-list) | | Task Dependents | [List](./REFERENCE.md#task-dependents-list) | @@ -126,7 +126,7 @@ See the official [Asana API reference](https://developers.asana.com/reference/re ## Version information -- **Package version:** 0.19.96 +- **Package version:** 0.19.97 - **Connector version:** 0.1.13 -- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 +- **Generated with Connector SDK commit SHA:** df1e8094b5b2d94e172536ce7f33fb98f2c3fdc1 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/asana/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/facebook-marketing/README.md b/docs/ai-agents/connectors/facebook-marketing/README.md index cd3bc06a180a..a30e43a03275 100644 --- a/docs/ai-agents/connectors/facebook-marketing/README.md +++ b/docs/ai-agents/connectors/facebook-marketing/README.md @@ -92,16 +92,16 @@ This connector supports the following entities and actions. For more details, se | Entity | Actions | |--------|---------| | Current User | [Get](./REFERENCE.md#current-user-get) | -| Ad Accounts | [List](./REFERENCE.md#ad-accounts-list) | -| Campaigns | [List](./REFERENCE.md#campaigns-list), [Get](./REFERENCE.md#campaigns-get) | -| Ad Sets | [List](./REFERENCE.md#ad-sets-list), [Get](./REFERENCE.md#ad-sets-get) | -| Ads | [List](./REFERENCE.md#ads-list), [Get](./REFERENCE.md#ads-get) | -| Ad Creatives | [List](./REFERENCE.md#ad-creatives-list) | -| Ads Insights | [List](./REFERENCE.md#ads-insights-list) | -| Ad Account | [Get](./REFERENCE.md#ad-account-get) | -| Custom Conversions | [List](./REFERENCE.md#custom-conversions-list) | -| Images | [List](./REFERENCE.md#images-list) | -| Videos | [List](./REFERENCE.md#videos-list) | +| Ad Accounts | [List](./REFERENCE.md#ad-accounts-list), [Search](./REFERENCE.md#ad-accounts-search) | +| Campaigns | [List](./REFERENCE.md#campaigns-list), [Get](./REFERENCE.md#campaigns-get), [Search](./REFERENCE.md#campaigns-search) | +| Ad Sets | [List](./REFERENCE.md#ad-sets-list), [Get](./REFERENCE.md#ad-sets-get), [Search](./REFERENCE.md#ad-sets-search) | +| Ads | [List](./REFERENCE.md#ads-list), [Get](./REFERENCE.md#ads-get), [Search](./REFERENCE.md#ads-search) | +| Ad Creatives | [List](./REFERENCE.md#ad-creatives-list), [Search](./REFERENCE.md#ad-creatives-search) | +| Ads Insights | [List](./REFERENCE.md#ads-insights-list), [Search](./REFERENCE.md#ads-insights-search) | +| Ad Account | [Get](./REFERENCE.md#ad-account-get), [Search](./REFERENCE.md#ad-account-search) | +| Custom Conversions | [List](./REFERENCE.md#custom-conversions-list), [Search](./REFERENCE.md#custom-conversions-search) | +| Images | [List](./REFERENCE.md#images-list), [Search](./REFERENCE.md#images-search) | +| Videos | [List](./REFERENCE.md#videos-list), [Search](./REFERENCE.md#videos-search) | ### Authentication and configuration @@ -114,7 +114,7 @@ See the official [Facebook-Marketing API reference](https://developers.facebook. ## Version information -- **Package version:** 0.1.29 +- **Package version:** 0.1.30 - **Connector version:** 1.0.15 -- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 +- **Generated with Connector SDK commit SHA:** df1e8094b5b2d94e172536ce7f33fb98f2c3fdc1 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/facebook-marketing/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/github/README.md b/docs/ai-agents/connectors/github/README.md index ddc9a3f2500c..1adf8f3d9ad7 100644 --- a/docs/ai-agents/connectors/github/README.md +++ b/docs/ai-agents/connectors/github/README.md @@ -125,7 +125,7 @@ See the official [Github API reference](https://docs.github.com/en/rest). ## Version information -- **Package version:** 0.18.96 +- **Package version:** 0.18.97 - **Connector version:** 0.1.12 -- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 +- **Generated with Connector SDK commit SHA:** df1e8094b5b2d94e172536ce7f33fb98f2c3fdc1 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/github/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/gong/README.md b/docs/ai-agents/connectors/gong/README.md index 7bb6ea27d756..e65ada619bf6 100644 --- a/docs/ai-agents/connectors/gong/README.md +++ b/docs/ai-agents/connectors/gong/README.md @@ -95,9 +95,9 @@ This connector supports the following entities and actions. For more details, se | Entity | Actions | |--------|---------| -| Users | [List](./REFERENCE.md#users-list), [Get](./REFERENCE.md#users-get) | -| Calls | [List](./REFERENCE.md#calls-list), [Get](./REFERENCE.md#calls-get) | -| Calls Extensive | [List](./REFERENCE.md#calls-extensive-list) | +| Users | [List](./REFERENCE.md#users-list), [Get](./REFERENCE.md#users-get), [Search](./REFERENCE.md#users-search) | +| Calls | [List](./REFERENCE.md#calls-list), [Get](./REFERENCE.md#calls-get), [Search](./REFERENCE.md#calls-search) | +| Calls Extensive | [List](./REFERENCE.md#calls-extensive-list), [Search](./REFERENCE.md#calls-extensive-search) | | Call Audio | [Download](./REFERENCE.md#call-audio-download) | | Call Video | [Download](./REFERENCE.md#call-video-download) | | Workspaces | [List](./REFERENCE.md#workspaces-list) | @@ -105,12 +105,12 @@ This connector supports the following entities and actions. For more details, se | Stats Activity Aggregate | [List](./REFERENCE.md#stats-activity-aggregate-list) | | Stats Activity Day By Day | [List](./REFERENCE.md#stats-activity-day-by-day-list) | | Stats Interaction | [List](./REFERENCE.md#stats-interaction-list) | -| Settings Scorecards | [List](./REFERENCE.md#settings-scorecards-list) | +| Settings Scorecards | [List](./REFERENCE.md#settings-scorecards-list), [Search](./REFERENCE.md#settings-scorecards-search) | | Settings Trackers | [List](./REFERENCE.md#settings-trackers-list) | | Library Folders | [List](./REFERENCE.md#library-folders-list) | | Library Folder Content | [List](./REFERENCE.md#library-folder-content-list) | | Coaching | [List](./REFERENCE.md#coaching-list) | -| Stats Activity Scorecards | [List](./REFERENCE.md#stats-activity-scorecards-list) | +| Stats Activity Scorecards | [List](./REFERENCE.md#stats-activity-scorecards-list), [Search](./REFERENCE.md#stats-activity-scorecards-search) | ### Authentication and configuration @@ -123,7 +123,7 @@ See the official [Gong API reference](https://gong.app.gong.io/settings/api/docu ## Version information -- **Package version:** 0.19.101 +- **Package version:** 0.19.102 - **Connector version:** 0.1.18 -- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 +- **Generated with Connector SDK commit SHA:** df1e8094b5b2d94e172536ce7f33fb98f2c3fdc1 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/gong/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/google-drive/README.md b/docs/ai-agents/connectors/google-drive/README.md index 4b380b6771ae..3bb305aee7e5 100644 --- a/docs/ai-agents/connectors/google-drive/README.md +++ b/docs/ai-agents/connectors/google-drive/README.md @@ -124,7 +124,7 @@ See the official [Google-Drive API reference](https://developers.google.com/work ## Version information -- **Package version:** 0.1.64 +- **Package version:** 0.1.65 - **Connector version:** 0.1.7 -- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 +- **Generated with Connector SDK commit SHA:** df1e8094b5b2d94e172536ce7f33fb98f2c3fdc1 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/google-drive/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/greenhouse/README.md b/docs/ai-agents/connectors/greenhouse/README.md index 0f240de74de2..a2285f081660 100644 --- a/docs/ai-agents/connectors/greenhouse/README.md +++ b/docs/ai-agents/connectors/greenhouse/README.md @@ -94,15 +94,15 @@ This connector supports the following entities and actions. For more details, se | Entity | Actions | |--------|---------| -| Candidates | [List](./REFERENCE.md#candidates-list), [Get](./REFERENCE.md#candidates-get) | -| Applications | [List](./REFERENCE.md#applications-list), [Get](./REFERENCE.md#applications-get) | -| Jobs | [List](./REFERENCE.md#jobs-list), [Get](./REFERENCE.md#jobs-get) | -| Offers | [List](./REFERENCE.md#offers-list), [Get](./REFERENCE.md#offers-get) | -| Users | [List](./REFERENCE.md#users-list), [Get](./REFERENCE.md#users-get) | -| Departments | [List](./REFERENCE.md#departments-list), [Get](./REFERENCE.md#departments-get) | -| Offices | [List](./REFERENCE.md#offices-list), [Get](./REFERENCE.md#offices-get) | -| Job Posts | [List](./REFERENCE.md#job-posts-list), [Get](./REFERENCE.md#job-posts-get) | -| Sources | [List](./REFERENCE.md#sources-list) | +| Candidates | [List](./REFERENCE.md#candidates-list), [Get](./REFERENCE.md#candidates-get), [Search](./REFERENCE.md#candidates-search) | +| Applications | [List](./REFERENCE.md#applications-list), [Get](./REFERENCE.md#applications-get), [Search](./REFERENCE.md#applications-search) | +| Jobs | [List](./REFERENCE.md#jobs-list), [Get](./REFERENCE.md#jobs-get), [Search](./REFERENCE.md#jobs-search) | +| Offers | [List](./REFERENCE.md#offers-list), [Get](./REFERENCE.md#offers-get), [Search](./REFERENCE.md#offers-search) | +| Users | [List](./REFERENCE.md#users-list), [Get](./REFERENCE.md#users-get), [Search](./REFERENCE.md#users-search) | +| Departments | [List](./REFERENCE.md#departments-list), [Get](./REFERENCE.md#departments-get), [Search](./REFERENCE.md#departments-search) | +| Offices | [List](./REFERENCE.md#offices-list), [Get](./REFERENCE.md#offices-get), [Search](./REFERENCE.md#offices-search) | +| Job Posts | [List](./REFERENCE.md#job-posts-list), [Get](./REFERENCE.md#job-posts-get), [Search](./REFERENCE.md#job-posts-search) | +| Sources | [List](./REFERENCE.md#sources-list), [Search](./REFERENCE.md#sources-search) | | Scheduled Interviews | [List](./REFERENCE.md#scheduled-interviews-list), [Get](./REFERENCE.md#scheduled-interviews-get) | | Application Attachment | [Download](./REFERENCE.md#application-attachment-download) | | Candidate Attachment | [Download](./REFERENCE.md#candidate-attachment-download) | @@ -118,7 +118,7 @@ See the official [Greenhouse API reference](https://developers.greenhouse.io/har ## Version information -- **Package version:** 0.17.90 +- **Package version:** 0.17.91 - **Connector version:** 0.1.6 -- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 +- **Generated with Connector SDK commit SHA:** df1e8094b5b2d94e172536ce7f33fb98f2c3fdc1 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/greenhouse/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/hubspot/README.md b/docs/ai-agents/connectors/hubspot/README.md index 5bc224523dfc..e4d4cc8b33da 100644 --- a/docs/ai-agents/connectors/hubspot/README.md +++ b/docs/ai-agents/connectors/hubspot/README.md @@ -95,9 +95,9 @@ This connector supports the following entities and actions. For more details, se | Entity | Actions | |--------|---------| -| Contacts | [List](./REFERENCE.md#contacts-list), [Get](./REFERENCE.md#contacts-get), [API Search](./REFERENCE.md#contacts-api_search) | -| Companies | [List](./REFERENCE.md#companies-list), [Get](./REFERENCE.md#companies-get), [API Search](./REFERENCE.md#companies-api_search) | -| Deals | [List](./REFERENCE.md#deals-list), [Get](./REFERENCE.md#deals-get), [API Search](./REFERENCE.md#deals-api_search) | +| Contacts | [List](./REFERENCE.md#contacts-list), [Get](./REFERENCE.md#contacts-get), [API Search](./REFERENCE.md#contacts-api_search), [Search](./REFERENCE.md#contacts-search) | +| Companies | [List](./REFERENCE.md#companies-list), [Get](./REFERENCE.md#companies-get), [API Search](./REFERENCE.md#companies-api_search), [Search](./REFERENCE.md#companies-search) | +| Deals | [List](./REFERENCE.md#deals-list), [Get](./REFERENCE.md#deals-get), [API Search](./REFERENCE.md#deals-api_search), [Search](./REFERENCE.md#deals-search) | | Tickets | [List](./REFERENCE.md#tickets-list), [Get](./REFERENCE.md#tickets-get), [API Search](./REFERENCE.md#tickets-api_search) | | Schemas | [List](./REFERENCE.md#schemas-list), [Get](./REFERENCE.md#schemas-get) | | Objects | [List](./REFERENCE.md#objects-list), [Get](./REFERENCE.md#objects-get) | @@ -113,7 +113,7 @@ See the official [Hubspot API reference](https://developers.hubspot.com/docs/api ## Version information -- **Package version:** 0.15.96 +- **Package version:** 0.15.97 - **Connector version:** 0.1.12 -- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 +- **Generated with Connector SDK commit SHA:** df1e8094b5b2d94e172536ce7f33fb98f2c3fdc1 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/hubspot/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/intercom/README.md b/docs/ai-agents/connectors/intercom/README.md index f563ae27013c..8d94165c88ed 100644 --- a/docs/ai-agents/connectors/intercom/README.md +++ b/docs/ai-agents/connectors/intercom/README.md @@ -95,10 +95,10 @@ This connector supports the following entities and actions. For more details, se | Entity | Actions | |--------|---------| -| Contacts | [List](./REFERENCE.md#contacts-list), [Get](./REFERENCE.md#contacts-get) | -| Conversations | [List](./REFERENCE.md#conversations-list), [Get](./REFERENCE.md#conversations-get) | -| Companies | [List](./REFERENCE.md#companies-list), [Get](./REFERENCE.md#companies-get) | -| Teams | [List](./REFERENCE.md#teams-list), [Get](./REFERENCE.md#teams-get) | +| Contacts | [List](./REFERENCE.md#contacts-list), [Get](./REFERENCE.md#contacts-get), [Search](./REFERENCE.md#contacts-search) | +| Conversations | [List](./REFERENCE.md#conversations-list), [Get](./REFERENCE.md#conversations-get), [Search](./REFERENCE.md#conversations-search) | +| Companies | [List](./REFERENCE.md#companies-list), [Get](./REFERENCE.md#companies-get), [Search](./REFERENCE.md#companies-search) | +| Teams | [List](./REFERENCE.md#teams-list), [Get](./REFERENCE.md#teams-get), [Search](./REFERENCE.md#teams-search) | | Admins | [List](./REFERENCE.md#admins-list), [Get](./REFERENCE.md#admins-get) | | Tags | [List](./REFERENCE.md#tags-list), [Get](./REFERENCE.md#tags-get) | | Segments | [List](./REFERENCE.md#segments-list), [Get](./REFERENCE.md#segments-get) | @@ -114,7 +114,7 @@ See the official [Intercom API reference](https://developers.intercom.com/docs/r ## Version information -- **Package version:** 0.1.65 +- **Package version:** 0.1.66 - **Connector version:** 0.1.8 -- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 +- **Generated with Connector SDK commit SHA:** df1e8094b5b2d94e172536ce7f33fb98f2c3fdc1 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/intercom/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/jira/README.md b/docs/ai-agents/connectors/jira/README.md index a3919bf7b809..800c89de89f1 100644 --- a/docs/ai-agents/connectors/jira/README.md +++ b/docs/ai-agents/connectors/jira/README.md @@ -98,12 +98,12 @@ This connector supports the following entities and actions. For more details, se | Entity | Actions | |--------|---------| -| Issues | [API Search](./REFERENCE.md#issues-api_search), [Create](./REFERENCE.md#issues-create), [Get](./REFERENCE.md#issues-get), [Update](./REFERENCE.md#issues-update), [Delete](./REFERENCE.md#issues-delete) | -| Projects | [API Search](./REFERENCE.md#projects-api_search), [Get](./REFERENCE.md#projects-get) | -| Users | [Get](./REFERENCE.md#users-get), [List](./REFERENCE.md#users-list), [API Search](./REFERENCE.md#users-api_search) | -| Issue Fields | [List](./REFERENCE.md#issue-fields-list), [API Search](./REFERENCE.md#issue-fields-api_search) | -| Issue Comments | [List](./REFERENCE.md#issue-comments-list), [Create](./REFERENCE.md#issue-comments-create), [Get](./REFERENCE.md#issue-comments-get), [Update](./REFERENCE.md#issue-comments-update), [Delete](./REFERENCE.md#issue-comments-delete) | -| Issue Worklogs | [List](./REFERENCE.md#issue-worklogs-list), [Get](./REFERENCE.md#issue-worklogs-get) | +| Issues | [API Search](./REFERENCE.md#issues-api_search), [Create](./REFERENCE.md#issues-create), [Get](./REFERENCE.md#issues-get), [Update](./REFERENCE.md#issues-update), [Delete](./REFERENCE.md#issues-delete), [Search](./REFERENCE.md#issues-search) | +| Projects | [API Search](./REFERENCE.md#projects-api_search), [Get](./REFERENCE.md#projects-get), [Search](./REFERENCE.md#projects-search) | +| Users | [Get](./REFERENCE.md#users-get), [List](./REFERENCE.md#users-list), [API Search](./REFERENCE.md#users-api_search), [Search](./REFERENCE.md#users-search) | +| Issue Fields | [List](./REFERENCE.md#issue-fields-list), [API Search](./REFERENCE.md#issue-fields-api_search), [Search](./REFERENCE.md#issue-fields-search) | +| Issue Comments | [List](./REFERENCE.md#issue-comments-list), [Create](./REFERENCE.md#issue-comments-create), [Get](./REFERENCE.md#issue-comments-get), [Update](./REFERENCE.md#issue-comments-update), [Delete](./REFERENCE.md#issue-comments-delete), [Search](./REFERENCE.md#issue-comments-search) | +| Issue Worklogs | [List](./REFERENCE.md#issue-worklogs-list), [Get](./REFERENCE.md#issue-worklogs-get), [Search](./REFERENCE.md#issue-worklogs-search) | | Issues Assignee | [Update](./REFERENCE.md#issues-assignee-update) | @@ -117,7 +117,7 @@ See the official [Jira API reference](https://developer.atlassian.com/cloud/jira ## Version information -- **Package version:** 0.1.86 +- **Package version:** 0.1.87 - **Connector version:** 1.1.6 -- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 +- **Generated with Connector SDK commit SHA:** df1e8094b5b2d94e172536ce7f33fb98f2c3fdc1 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/jira/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/klaviyo/README.md b/docs/ai-agents/connectors/klaviyo/README.md index 0f045eaff619..4bde74d13fda 100644 --- a/docs/ai-agents/connectors/klaviyo/README.md +++ b/docs/ai-agents/connectors/klaviyo/README.md @@ -96,13 +96,13 @@ This connector supports the following entities and actions. For more details, se | Entity | Actions | |--------|---------| -| Profiles | [List](./REFERENCE.md#profiles-list), [Get](./REFERENCE.md#profiles-get) | -| Lists | [List](./REFERENCE.md#lists-list), [Get](./REFERENCE.md#lists-get) | -| Campaigns | [List](./REFERENCE.md#campaigns-list), [Get](./REFERENCE.md#campaigns-get) | -| Events | [List](./REFERENCE.md#events-list) | -| Metrics | [List](./REFERENCE.md#metrics-list), [Get](./REFERENCE.md#metrics-get) | -| Flows | [List](./REFERENCE.md#flows-list), [Get](./REFERENCE.md#flows-get) | -| Email Templates | [List](./REFERENCE.md#email-templates-list), [Get](./REFERENCE.md#email-templates-get) | +| Profiles | [List](./REFERENCE.md#profiles-list), [Get](./REFERENCE.md#profiles-get), [Search](./REFERENCE.md#profiles-search) | +| Lists | [List](./REFERENCE.md#lists-list), [Get](./REFERENCE.md#lists-get), [Search](./REFERENCE.md#lists-search) | +| Campaigns | [List](./REFERENCE.md#campaigns-list), [Get](./REFERENCE.md#campaigns-get), [Search](./REFERENCE.md#campaigns-search) | +| Events | [List](./REFERENCE.md#events-list), [Search](./REFERENCE.md#events-search) | +| Metrics | [List](./REFERENCE.md#metrics-list), [Get](./REFERENCE.md#metrics-get), [Search](./REFERENCE.md#metrics-search) | +| Flows | [List](./REFERENCE.md#flows-list), [Get](./REFERENCE.md#flows-get), [Search](./REFERENCE.md#flows-search) | +| Email Templates | [List](./REFERENCE.md#email-templates-list), [Get](./REFERENCE.md#email-templates-get), [Search](./REFERENCE.md#email-templates-search) | ### Authentication and configuration @@ -115,7 +115,7 @@ See the official [Klaviyo API reference](https://developers.klaviyo.com/en/refer ## Version information -- **Package version:** 0.1.22 +- **Package version:** 0.1.23 - **Connector version:** 1.0.2 -- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 +- **Generated with Connector SDK commit SHA:** df1e8094b5b2d94e172536ce7f33fb98f2c3fdc1 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/klaviyo/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/linear/README.md b/docs/ai-agents/connectors/linear/README.md index b93deb50b351..45a1d2824c29 100644 --- a/docs/ai-agents/connectors/linear/README.md +++ b/docs/ai-agents/connectors/linear/README.md @@ -100,11 +100,11 @@ This connector supports the following entities and actions. For more details, se | Entity | Actions | |--------|---------| -| Issues | [List](./REFERENCE.md#issues-list), [Get](./REFERENCE.md#issues-get), [Create](./REFERENCE.md#issues-create), [Update](./REFERENCE.md#issues-update) | -| Projects | [List](./REFERENCE.md#projects-list), [Get](./REFERENCE.md#projects-get) | -| Teams | [List](./REFERENCE.md#teams-list), [Get](./REFERENCE.md#teams-get) | -| Users | [List](./REFERENCE.md#users-list), [Get](./REFERENCE.md#users-get) | -| Comments | [List](./REFERENCE.md#comments-list), [Get](./REFERENCE.md#comments-get), [Create](./REFERENCE.md#comments-create), [Update](./REFERENCE.md#comments-update) | +| Issues | [List](./REFERENCE.md#issues-list), [Get](./REFERENCE.md#issues-get), [Create](./REFERENCE.md#issues-create), [Update](./REFERENCE.md#issues-update), [Search](./REFERENCE.md#issues-search) | +| Projects | [List](./REFERENCE.md#projects-list), [Get](./REFERENCE.md#projects-get), [Search](./REFERENCE.md#projects-search) | +| Teams | [List](./REFERENCE.md#teams-list), [Get](./REFERENCE.md#teams-get), [Search](./REFERENCE.md#teams-search) | +| Users | [List](./REFERENCE.md#users-list), [Get](./REFERENCE.md#users-get), [Search](./REFERENCE.md#users-search) | +| Comments | [List](./REFERENCE.md#comments-list), [Get](./REFERENCE.md#comments-get), [Create](./REFERENCE.md#comments-create), [Update](./REFERENCE.md#comments-update), [Search](./REFERENCE.md#comments-search) | ### Authentication and configuration @@ -117,7 +117,7 @@ See the official [Linear API reference](https://linear.app/developers/graphql). ## Version information -- **Package version:** 0.19.92 +- **Package version:** 0.19.93 - **Connector version:** 0.1.10 -- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 +- **Generated with Connector SDK commit SHA:** df1e8094b5b2d94e172536ce7f33fb98f2c3fdc1 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/linear/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/mailchimp/README.md b/docs/ai-agents/connectors/mailchimp/README.md index 564c8cabcb95..b9c85bf73e81 100644 --- a/docs/ai-agents/connectors/mailchimp/README.md +++ b/docs/ai-agents/connectors/mailchimp/README.md @@ -95,11 +95,11 @@ This connector supports the following entities and actions. For more details, se | Entity | Actions | |--------|---------| -| Campaigns | [List](./REFERENCE.md#campaigns-list), [Get](./REFERENCE.md#campaigns-get) | -| Lists | [List](./REFERENCE.md#lists-list), [Get](./REFERENCE.md#lists-get) | +| Campaigns | [List](./REFERENCE.md#campaigns-list), [Get](./REFERENCE.md#campaigns-get), [Search](./REFERENCE.md#campaigns-search) | +| Lists | [List](./REFERENCE.md#lists-list), [Get](./REFERENCE.md#lists-get), [Search](./REFERENCE.md#lists-search) | | List Members | [List](./REFERENCE.md#list-members-list), [Get](./REFERENCE.md#list-members-get) | -| Reports | [List](./REFERENCE.md#reports-list), [Get](./REFERENCE.md#reports-get) | -| Email Activity | [List](./REFERENCE.md#email-activity-list) | +| Reports | [List](./REFERENCE.md#reports-list), [Get](./REFERENCE.md#reports-get), [Search](./REFERENCE.md#reports-search) | +| Email Activity | [List](./REFERENCE.md#email-activity-list), [Search](./REFERENCE.md#email-activity-search) | | Automations | [List](./REFERENCE.md#automations-list) | | Tags | [List](./REFERENCE.md#tags-list) | | Interest Categories | [List](./REFERENCE.md#interest-categories-list), [Get](./REFERENCE.md#interest-categories-get) | @@ -119,7 +119,7 @@ See the official [Mailchimp API reference](https://mailchimp.com/developer/marke ## Version information -- **Package version:** 0.1.51 +- **Package version:** 0.1.52 - **Connector version:** 1.0.7 -- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 +- **Generated with Connector SDK commit SHA:** df1e8094b5b2d94e172536ce7f33fb98f2c3fdc1 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/mailchimp/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/orb/README.md b/docs/ai-agents/connectors/orb/README.md index 3c37959deaec..015f90d41b05 100644 --- a/docs/ai-agents/connectors/orb/README.md +++ b/docs/ai-agents/connectors/orb/README.md @@ -100,10 +100,10 @@ This connector supports the following entities and actions. For more details, se | Entity | Actions | |--------|---------| -| Customers | [List](./REFERENCE.md#customers-list), [Get](./REFERENCE.md#customers-get) | -| Subscriptions | [List](./REFERENCE.md#subscriptions-list), [Get](./REFERENCE.md#subscriptions-get) | -| Plans | [List](./REFERENCE.md#plans-list), [Get](./REFERENCE.md#plans-get) | -| Invoices | [List](./REFERENCE.md#invoices-list), [Get](./REFERENCE.md#invoices-get) | +| Customers | [List](./REFERENCE.md#customers-list), [Get](./REFERENCE.md#customers-get), [Search](./REFERENCE.md#customers-search) | +| Subscriptions | [List](./REFERENCE.md#subscriptions-list), [Get](./REFERENCE.md#subscriptions-get), [Search](./REFERENCE.md#subscriptions-search) | +| Plans | [List](./REFERENCE.md#plans-list), [Get](./REFERENCE.md#plans-get), [Search](./REFERENCE.md#plans-search) | +| Invoices | [List](./REFERENCE.md#invoices-list), [Get](./REFERENCE.md#invoices-get), [Search](./REFERENCE.md#invoices-search) | ### Authentication and configuration @@ -116,7 +116,7 @@ See the official [Orb API reference](https://docs.withorb.com/api-reference). ## Version information -- **Package version:** 0.1.25 +- **Package version:** 0.1.26 - **Connector version:** 0.1.4 -- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 +- **Generated with Connector SDK commit SHA:** df1e8094b5b2d94e172536ce7f33fb98f2c3fdc1 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/orb/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/salesforce/README.md b/docs/ai-agents/connectors/salesforce/README.md index 58a9a022ba8e..6c5522788693 100644 --- a/docs/ai-agents/connectors/salesforce/README.md +++ b/docs/ai-agents/connectors/salesforce/README.md @@ -96,11 +96,11 @@ This connector supports the following entities and actions. For more details, se | Entity | Actions | |--------|---------| | Sobjects | [List](./REFERENCE.md#sobjects-list) | -| Accounts | [List](./REFERENCE.md#accounts-list), [Get](./REFERENCE.md#accounts-get), [API Search](./REFERENCE.md#accounts-api_search) | -| Contacts | [List](./REFERENCE.md#contacts-list), [Get](./REFERENCE.md#contacts-get), [API Search](./REFERENCE.md#contacts-api_search) | -| Leads | [List](./REFERENCE.md#leads-list), [Get](./REFERENCE.md#leads-get), [API Search](./REFERENCE.md#leads-api_search) | -| Opportunities | [List](./REFERENCE.md#opportunities-list), [Get](./REFERENCE.md#opportunities-get), [API Search](./REFERENCE.md#opportunities-api_search) | -| Tasks | [List](./REFERENCE.md#tasks-list), [Get](./REFERENCE.md#tasks-get), [API Search](./REFERENCE.md#tasks-api_search) | +| Accounts | [List](./REFERENCE.md#accounts-list), [Get](./REFERENCE.md#accounts-get), [API Search](./REFERENCE.md#accounts-api_search), [Search](./REFERENCE.md#accounts-search) | +| Contacts | [List](./REFERENCE.md#contacts-list), [Get](./REFERENCE.md#contacts-get), [API Search](./REFERENCE.md#contacts-api_search), [Search](./REFERENCE.md#contacts-search) | +| Leads | [List](./REFERENCE.md#leads-list), [Get](./REFERENCE.md#leads-get), [API Search](./REFERENCE.md#leads-api_search), [Search](./REFERENCE.md#leads-search) | +| Opportunities | [List](./REFERENCE.md#opportunities-list), [Get](./REFERENCE.md#opportunities-get), [API Search](./REFERENCE.md#opportunities-api_search), [Search](./REFERENCE.md#opportunities-search) | +| Tasks | [List](./REFERENCE.md#tasks-list), [Get](./REFERENCE.md#tasks-get), [API Search](./REFERENCE.md#tasks-api_search), [Search](./REFERENCE.md#tasks-search) | | Events | [List](./REFERENCE.md#events-list), [Get](./REFERENCE.md#events-get), [API Search](./REFERENCE.md#events-api_search) | | Campaigns | [List](./REFERENCE.md#campaigns-list), [Get](./REFERENCE.md#campaigns-get), [API Search](./REFERENCE.md#campaigns-api_search) | | Cases | [List](./REFERENCE.md#cases-list), [Get](./REFERENCE.md#cases-get), [API Search](./REFERENCE.md#cases-api_search) | @@ -120,7 +120,7 @@ See the official [Salesforce API reference](https://developer.salesforce.com/doc ## Version information -- **Package version:** 0.1.88 +- **Package version:** 0.1.89 - **Connector version:** 1.0.13 -- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 +- **Generated with Connector SDK commit SHA:** df1e8094b5b2d94e172536ce7f33fb98f2c3fdc1 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/salesforce/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/shopify/README.md b/docs/ai-agents/connectors/shopify/README.md index 645a9d7580d3..7a626f28d4bb 100644 --- a/docs/ai-agents/connectors/shopify/README.md +++ b/docs/ai-agents/connectors/shopify/README.md @@ -139,7 +139,7 @@ See the official [Shopify API reference](https://shopify.dev/docs/api/admin-rest ## Version information -- **Package version:** 0.1.45 +- **Package version:** 0.1.46 - **Connector version:** 0.1.7 -- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 +- **Generated with Connector SDK commit SHA:** df1e8094b5b2d94e172536ce7f33fb98f2c3fdc1 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/shopify/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/slack/README.md b/docs/ai-agents/connectors/slack/README.md index 03f0a0234adf..70c7304589fe 100644 --- a/docs/ai-agents/connectors/slack/README.md +++ b/docs/ai-agents/connectors/slack/README.md @@ -108,8 +108,8 @@ This connector supports the following entities and actions. For more details, se | Entity | Actions | |--------|---------| -| Users | [List](./REFERENCE.md#users-list), [Get](./REFERENCE.md#users-get) | -| Channels | [List](./REFERENCE.md#channels-list), [Get](./REFERENCE.md#channels-get), [Create](./REFERENCE.md#channels-create), [Update](./REFERENCE.md#channels-update) | +| Users | [List](./REFERENCE.md#users-list), [Get](./REFERENCE.md#users-get), [Search](./REFERENCE.md#users-search) | +| Channels | [List](./REFERENCE.md#channels-list), [Get](./REFERENCE.md#channels-get), [Create](./REFERENCE.md#channels-create), [Update](./REFERENCE.md#channels-update), [Search](./REFERENCE.md#channels-search) | | Channel Messages | [List](./REFERENCE.md#channel-messages-list) | | Threads | [List](./REFERENCE.md#threads-list) | | Messages | [Create](./REFERENCE.md#messages-create), [Update](./REFERENCE.md#messages-update) | @@ -128,7 +128,7 @@ See the official [Slack API reference](https://api.slack.com/methods). ## Version information -- **Package version:** 0.1.59 +- **Package version:** 0.1.60 - **Connector version:** 0.1.15 -- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 +- **Generated with Connector SDK commit SHA:** df1e8094b5b2d94e172536ce7f33fb98f2c3fdc1 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/slack/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/stripe/README.md b/docs/ai-agents/connectors/stripe/README.md index 676999e3e0a0..4617e7be62f6 100644 --- a/docs/ai-agents/connectors/stripe/README.md +++ b/docs/ai-agents/connectors/stripe/README.md @@ -97,11 +97,11 @@ This connector supports the following entities and actions. For more details, se | Entity | Actions | |--------|---------| -| Customers | [List](./REFERENCE.md#customers-list), [Create](./REFERENCE.md#customers-create), [Get](./REFERENCE.md#customers-get), [Update](./REFERENCE.md#customers-update), [Delete](./REFERENCE.md#customers-delete), [API Search](./REFERENCE.md#customers-api_search) | -| Invoices | [List](./REFERENCE.md#invoices-list), [Get](./REFERENCE.md#invoices-get), [API Search](./REFERENCE.md#invoices-api_search) | -| Charges | [List](./REFERENCE.md#charges-list), [Get](./REFERENCE.md#charges-get), [API Search](./REFERENCE.md#charges-api_search) | -| Subscriptions | [List](./REFERENCE.md#subscriptions-list), [Get](./REFERENCE.md#subscriptions-get), [API Search](./REFERENCE.md#subscriptions-api_search) | -| Refunds | [List](./REFERENCE.md#refunds-list), [Create](./REFERENCE.md#refunds-create), [Get](./REFERENCE.md#refunds-get) | +| Customers | [List](./REFERENCE.md#customers-list), [Create](./REFERENCE.md#customers-create), [Get](./REFERENCE.md#customers-get), [Update](./REFERENCE.md#customers-update), [Delete](./REFERENCE.md#customers-delete), [API Search](./REFERENCE.md#customers-api_search), [Search](./REFERENCE.md#customers-search) | +| Invoices | [List](./REFERENCE.md#invoices-list), [Get](./REFERENCE.md#invoices-get), [API Search](./REFERENCE.md#invoices-api_search), [Search](./REFERENCE.md#invoices-search) | +| Charges | [List](./REFERENCE.md#charges-list), [Get](./REFERENCE.md#charges-get), [API Search](./REFERENCE.md#charges-api_search), [Search](./REFERENCE.md#charges-search) | +| Subscriptions | [List](./REFERENCE.md#subscriptions-list), [Get](./REFERENCE.md#subscriptions-get), [API Search](./REFERENCE.md#subscriptions-api_search), [Search](./REFERENCE.md#subscriptions-search) | +| Refunds | [List](./REFERENCE.md#refunds-list), [Create](./REFERENCE.md#refunds-create), [Get](./REFERENCE.md#refunds-get), [Search](./REFERENCE.md#refunds-search) | | Products | [List](./REFERENCE.md#products-list), [Create](./REFERENCE.md#products-create), [Get](./REFERENCE.md#products-get), [Update](./REFERENCE.md#products-update), [Delete](./REFERENCE.md#products-delete), [API Search](./REFERENCE.md#products-api_search) | | Balance | [Get](./REFERENCE.md#balance-get) | | Balance Transactions | [List](./REFERENCE.md#balance-transactions-list), [Get](./REFERENCE.md#balance-transactions-get) | @@ -120,7 +120,7 @@ See the official [Stripe API reference](https://docs.stripe.com/api). ## Version information -- **Package version:** 0.5.93 +- **Package version:** 0.5.94 - **Connector version:** 0.1.9 -- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 +- **Generated with Connector SDK commit SHA:** df1e8094b5b2d94e172536ce7f33fb98f2c3fdc1 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/stripe/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/zendesk-chat/README.md b/docs/ai-agents/connectors/zendesk-chat/README.md index 03b926b00f24..98a2fbaca66f 100644 --- a/docs/ai-agents/connectors/zendesk-chat/README.md +++ b/docs/ai-agents/connectors/zendesk-chat/README.md @@ -108,17 +108,17 @@ This connector supports the following entities and actions. For more details, se | Entity | Actions | |--------|---------| | Accounts | [Get](./REFERENCE.md#accounts-get) | -| Agents | [List](./REFERENCE.md#agents-list), [Get](./REFERENCE.md#agents-get) | +| Agents | [List](./REFERENCE.md#agents-list), [Get](./REFERENCE.md#agents-get), [Search](./REFERENCE.md#agents-search) | | Agent Timeline | [List](./REFERENCE.md#agent-timeline-list) | | Bans | [List](./REFERENCE.md#bans-list), [Get](./REFERENCE.md#bans-get) | -| Chats | [List](./REFERENCE.md#chats-list), [Get](./REFERENCE.md#chats-get) | -| Departments | [List](./REFERENCE.md#departments-list), [Get](./REFERENCE.md#departments-get) | +| Chats | [List](./REFERENCE.md#chats-list), [Get](./REFERENCE.md#chats-get), [Search](./REFERENCE.md#chats-search) | +| Departments | [List](./REFERENCE.md#departments-list), [Get](./REFERENCE.md#departments-get), [Search](./REFERENCE.md#departments-search) | | Goals | [List](./REFERENCE.md#goals-list), [Get](./REFERENCE.md#goals-get) | | Roles | [List](./REFERENCE.md#roles-list), [Get](./REFERENCE.md#roles-get) | | Routing Settings | [Get](./REFERENCE.md#routing-settings-get) | -| Shortcuts | [List](./REFERENCE.md#shortcuts-list), [Get](./REFERENCE.md#shortcuts-get) | +| Shortcuts | [List](./REFERENCE.md#shortcuts-list), [Get](./REFERENCE.md#shortcuts-get), [Search](./REFERENCE.md#shortcuts-search) | | Skills | [List](./REFERENCE.md#skills-list), [Get](./REFERENCE.md#skills-get) | -| Triggers | [List](./REFERENCE.md#triggers-list) | +| Triggers | [List](./REFERENCE.md#triggers-list), [Search](./REFERENCE.md#triggers-search) | ### Authentication and configuration @@ -131,7 +131,7 @@ See the official [Zendesk-Chat API reference](https://developer.zendesk.com/api- ## Version information -- **Package version:** 0.1.45 +- **Package version:** 0.1.46 - **Connector version:** 0.1.8 -- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 +- **Generated with Connector SDK commit SHA:** df1e8094b5b2d94e172536ce7f33fb98f2c3fdc1 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/zendesk-chat/CHANGELOG.md) \ No newline at end of file diff --git a/docs/ai-agents/connectors/zendesk-support/README.md b/docs/ai-agents/connectors/zendesk-support/README.md index f3d47880e7ec..8fe83de1e6f9 100644 --- a/docs/ai-agents/connectors/zendesk-support/README.md +++ b/docs/ai-agents/connectors/zendesk-support/README.md @@ -92,26 +92,26 @@ This connector supports the following entities and actions. For more details, se | Entity | Actions | |--------|---------| -| Tickets | [List](./REFERENCE.md#tickets-list), [Get](./REFERENCE.md#tickets-get) | -| Users | [List](./REFERENCE.md#users-list), [Get](./REFERENCE.md#users-get) | -| Organizations | [List](./REFERENCE.md#organizations-list), [Get](./REFERENCE.md#organizations-get) | -| Groups | [List](./REFERENCE.md#groups-list), [Get](./REFERENCE.md#groups-get) | -| Ticket Comments | [List](./REFERENCE.md#ticket-comments-list) | +| Tickets | [List](./REFERENCE.md#tickets-list), [Get](./REFERENCE.md#tickets-get), [Search](./REFERENCE.md#tickets-search) | +| Users | [List](./REFERENCE.md#users-list), [Get](./REFERENCE.md#users-get), [Search](./REFERENCE.md#users-search) | +| Organizations | [List](./REFERENCE.md#organizations-list), [Get](./REFERENCE.md#organizations-get), [Search](./REFERENCE.md#organizations-search) | +| Groups | [List](./REFERENCE.md#groups-list), [Get](./REFERENCE.md#groups-get), [Search](./REFERENCE.md#groups-search) | +| Ticket Comments | [List](./REFERENCE.md#ticket-comments-list), [Search](./REFERENCE.md#ticket-comments-search) | | Attachments | [Get](./REFERENCE.md#attachments-get), [Download](./REFERENCE.md#attachments-download) | -| Ticket Audits | [List](./REFERENCE.md#ticket-audits-list), [List](./REFERENCE.md#ticket-audits-list) | -| Ticket Metrics | [List](./REFERENCE.md#ticket-metrics-list) | -| Ticket Fields | [List](./REFERENCE.md#ticket-fields-list), [Get](./REFERENCE.md#ticket-fields-get) | -| Brands | [List](./REFERENCE.md#brands-list), [Get](./REFERENCE.md#brands-get) | +| Ticket Audits | [List](./REFERENCE.md#ticket-audits-list), [List](./REFERENCE.md#ticket-audits-list), [Search](./REFERENCE.md#ticket-audits-search) | +| Ticket Metrics | [List](./REFERENCE.md#ticket-metrics-list), [Search](./REFERENCE.md#ticket-metrics-search) | +| Ticket Fields | [List](./REFERENCE.md#ticket-fields-list), [Get](./REFERENCE.md#ticket-fields-get), [Search](./REFERENCE.md#ticket-fields-search) | +| Brands | [List](./REFERENCE.md#brands-list), [Get](./REFERENCE.md#brands-get), [Search](./REFERENCE.md#brands-search) | | Views | [List](./REFERENCE.md#views-list), [Get](./REFERENCE.md#views-get) | | Macros | [List](./REFERENCE.md#macros-list), [Get](./REFERENCE.md#macros-get) | | Triggers | [List](./REFERENCE.md#triggers-list), [Get](./REFERENCE.md#triggers-get) | | Automations | [List](./REFERENCE.md#automations-list), [Get](./REFERENCE.md#automations-get) | -| Tags | [List](./REFERENCE.md#tags-list) | -| Satisfaction Ratings | [List](./REFERENCE.md#satisfaction-ratings-list), [Get](./REFERENCE.md#satisfaction-ratings-get) | +| Tags | [List](./REFERENCE.md#tags-list), [Search](./REFERENCE.md#tags-search) | +| Satisfaction Ratings | [List](./REFERENCE.md#satisfaction-ratings-list), [Get](./REFERENCE.md#satisfaction-ratings-get), [Search](./REFERENCE.md#satisfaction-ratings-search) | | Group Memberships | [List](./REFERENCE.md#group-memberships-list) | | Organization Memberships | [List](./REFERENCE.md#organization-memberships-list) | | Sla Policies | [List](./REFERENCE.md#sla-policies-list), [Get](./REFERENCE.md#sla-policies-get) | -| Ticket Forms | [List](./REFERENCE.md#ticket-forms-list), [Get](./REFERENCE.md#ticket-forms-get) | +| Ticket Forms | [List](./REFERENCE.md#ticket-forms-list), [Get](./REFERENCE.md#ticket-forms-get), [Search](./REFERENCE.md#ticket-forms-search) | | Articles | [List](./REFERENCE.md#articles-list), [Get](./REFERENCE.md#articles-get) | | Article Attachments | [List](./REFERENCE.md#article-attachments-list), [Get](./REFERENCE.md#article-attachments-get), [Download](./REFERENCE.md#article-attachments-download) | @@ -126,7 +126,7 @@ See the official [Zendesk-Support API reference](https://developer.zendesk.com/a ## Version information -- **Package version:** 0.18.96 +- **Package version:** 0.18.97 - **Connector version:** 0.1.13 -- **Generated with Connector SDK commit SHA:** b36beaead6fb6c49f155ba346a49e61388c16278 +- **Generated with Connector SDK commit SHA:** df1e8094b5b2d94e172536ce7f33fb98f2c3fdc1 - **Changelog:** [View changelog](https://github.com/airbytehq/airbyte-agent-connectors/blob/main/connectors/zendesk-support/CHANGELOG.md) \ No newline at end of file From 559e4916d2582a61fc85d5233fb65b160d5effe3 Mon Sep 17 00:00:00 2001 From: Pragyash Date: Mon, 26 Jan 2026 05:12:35 +0530 Subject: [PATCH 06/16] feat: airbyte bigquery destination custom partitioning/clustering logic --- .../bigquery/BigqueryBeansFactory.kt | 7 ++ .../bigquery/SafeDestinationCatalogFactory.kt | 97 +++++++++++++++++++ .../bigquery/spec/BigqueryConfiguration.kt | 22 +++++ .../bigquery/spec/BigquerySpecification.kt | 52 ++++++++++ .../bigquery/stream/StreamConfigProvider.kt | 77 +++++++++++++++ ...BigqueryDirectLoadNativeTableOperations.kt | 73 +++++++++++++- .../BigqueryDirectLoadSqlGenerator.kt | 53 +++++++++- 7 files changed, 375 insertions(+), 6 deletions(-) create mode 100644 airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/SafeDestinationCatalogFactory.kt create mode 100644 airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/stream/StreamConfigProvider.kt diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/BigqueryBeansFactory.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/BigqueryBeansFactory.kt index fb39dfee76e3..7ab37d94ca01 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/BigqueryBeansFactory.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/BigqueryBeansFactory.kt @@ -38,6 +38,7 @@ import io.airbyte.integrations.destination.bigquery.write.typing_deduping.direct import io.airbyte.integrations.destination.bigquery.write.typing_deduping.direct_load_tables.BigqueryDirectLoadSqlTableOperations import io.airbyte.integrations.destination.bigquery.write.typing_deduping.legacy_raw_tables.BigqueryRawTableOperations import io.airbyte.integrations.destination.bigquery.write.typing_deduping.legacy_raw_tables.BigqueryTypingDedupingDatabaseInitialStatusGatherer +import io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider // ADDED import io.github.oshai.kotlinlogging.KotlinLogging import io.micronaut.context.annotation.Factory import io.micronaut.context.annotation.Requires @@ -54,6 +55,9 @@ private val logger = KotlinLogging.logger {} class BigqueryBeansFactory { @Singleton fun getConfig(config: DestinationConfiguration) = config as BigqueryConfiguration + @Singleton + fun getStreamConfigProvider(config: BigqueryConfiguration) = StreamConfigProvider(config) + @Singleton @Requires(condition = BigqueryConfiguredForBulkLoad::class) fun getBulkLoadConfig(config: BigqueryConfiguration) = BigqueryBulkLoadConfiguration(config) @@ -102,6 +106,7 @@ class BigqueryBeansFactory { // we use a different type depending on whether we're in legacy raw tables vs // direct-load tables mode. streamStateStore: StreamStateStore<*>, + streamConfigProvider: StreamConfigProvider, ): DestinationWriter { val destinationHandler = BigQueryDatabaseHandler(bigquery, config.datasetLocation.region) if (config.legacyRawTablesOnly) { @@ -127,6 +132,7 @@ class BigqueryBeansFactory { BigqueryDirectLoadSqlGenerator( projectId = config.projectId, cdcDeletionMode = config.cdcDeletionMode, + streamConfigProvider = streamConfigProvider, ), destinationHandler, ), @@ -154,6 +160,7 @@ class BigqueryBeansFactory { destinationHandler, projectId = config.projectId, tempTableNameGenerator, + streamConfigProvider, ), sqlTableOperations = sqlTableOperations, streamStateStore = streamStateStore, diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/SafeDestinationCatalogFactory.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/SafeDestinationCatalogFactory.kt new file mode 100644 index 000000000000..81c006b56955 --- /dev/null +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/SafeDestinationCatalogFactory.kt @@ -0,0 +1,97 @@ +/* + * Copyright (c) 2026 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.bigquery + +import io.airbyte.cdk.Operation +import io.airbyte.cdk.load.command.Append +import io.airbyte.cdk.load.command.Dedupe +import io.airbyte.cdk.load.command.DefaultDestinationCatalogFactory +import io.airbyte.cdk.load.command.DestinationCatalog +import io.airbyte.cdk.load.command.DestinationStream +import io.airbyte.cdk.load.command.NamespaceMapper +import io.airbyte.cdk.load.command.Overwrite +import io.airbyte.cdk.load.command.SoftDelete +import io.airbyte.cdk.load.command.Update +import io.airbyte.cdk.load.config.CHECK_STREAM_NAMESPACE +import io.airbyte.cdk.load.data.FieldType +import io.airbyte.cdk.load.data.IntegerType +import io.airbyte.cdk.load.data.ObjectType +import io.airbyte.cdk.load.data.json.JsonSchemaToAirbyteType +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.DestinationSyncMode +import io.micronaut.context.annotation.Factory +import io.micronaut.context.annotation.Replaces +import io.micronaut.context.annotation.Value +import jakarta.inject.Named +import jakarta.inject.Singleton +import java.time.LocalDate +import java.time.format.DateTimeFormatter +import org.apache.commons.lang3.RandomStringUtils + +@Factory +@Replaces(DefaultDestinationCatalogFactory::class) +class SafeDestinationCatalogFactory { + @Singleton + fun getDestinationCatalog( + catalog: ConfiguredAirbyteCatalog, + namespaceMapper: NamespaceMapper, + jsonSchemaToAirbyteType: JsonSchemaToAirbyteType, + @Value("\${${Operation.PROPERTY}}") operation: String, + @Named("checkNamespace") checkNamespace: String?, + ): DestinationCatalog { + if (operation == "check") { + // Copied from DefaultDestinationCatalogFactory to maintain behavior + val date = LocalDate.now().format(DateTimeFormatter.ofPattern("yyyyMMdd")) + val random = RandomStringUtils.randomAlphabetic(5).lowercase() + val namespace = checkNamespace ?: "${CHECK_STREAM_NAMESPACE}_$date$random" + return DestinationCatalog( + listOf( + DestinationStream( + unmappedNamespace = namespace, + unmappedName = "test$date$random", + importType = Append, + schema = + ObjectType( + linkedMapOf("test" to FieldType(IntegerType, nullable = true)) + ), + generationId = 1, + minimumGenerationId = 0, + syncId = 1, + namespaceMapper = namespaceMapper + ) + ) + ) + } else { + val streams = + catalog.streams.map { stream -> + val importType = + when (stream.destinationSyncMode) { + null -> throw IllegalArgumentException("Destination sync mode was null") + DestinationSyncMode.OVERWRITE -> Overwrite + DestinationSyncMode.APPEND -> Append + DestinationSyncMode.APPEND_DEDUP -> + Dedupe( + primaryKey = stream.primaryKey ?: emptyList(), + cursor = stream.cursorField ?: emptyList() + ) + DestinationSyncMode.UPDATE -> Update + DestinationSyncMode.SOFT_DELETE -> SoftDelete + } + + DestinationStream( + unmappedName = stream.stream.name, + unmappedNamespace = stream.stream.namespace, + importType = importType, + schema = jsonSchemaToAirbyteType.convert(stream.stream.jsonSchema), + generationId = stream.generationId ?: 0, + minimumGenerationId = stream.minimumGenerationId ?: 0, + syncId = stream.syncId ?: 0, + namespaceMapper = namespaceMapper, + ) + } + return DestinationCatalog(streams) + } + } +} diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/spec/BigqueryConfiguration.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/spec/BigqueryConfiguration.kt index cddb15ef8b2e..ecc531163d2c 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/spec/BigqueryConfiguration.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/spec/BigqueryConfiguration.kt @@ -19,6 +19,10 @@ data class BigqueryConfiguration( val cdcDeletionMode: CdcDeletionMode, val internalTableDataset: String, val legacyRawTablesOnly: Boolean, + val defaultPartitioningField: String?, + val defaultClusteringField: String?, + val defaultTableSuffix: String?, + val streamConfigMap: Map, ) : DestinationConfiguration() { override val numOpenStreamWorkers = 3 // currently the base cdk declares 0.2 as the default. @@ -26,6 +30,13 @@ data class BigqueryConfiguration( override val maxMessageQueueMemoryUsageRatio = 0.4 } +data class StreamLevelConfig( + val partitioningField: String? = null, + val clusteringField: String? = null, + val tableSuffix: String? = null, + val dataset: String? = null, +) + sealed interface LoadingMethodConfiguration data object BatchedStandardInsertConfiguration : LoadingMethodConfiguration @@ -66,6 +77,17 @@ class BigqueryConfigurationFactory : pojo.internalTableDataset!! }, legacyRawTablesOnly = pojo.legacyRawTablesOnly ?: false, + defaultPartitioningField = pojo.defaultPartitioningField, + defaultClusteringField = pojo.defaultClusteringField, + defaultTableSuffix = pojo.defaultTableSuffix, + streamConfigMap = pojo.streams?.associate { + it.name to StreamLevelConfig( + partitioningField = it.partitioningField, + clusteringField = it.clusteringField, + tableSuffix = it.tableSuffix, + dataset = it.dataset + ) + } ?: emptyMap(), ) } } diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/spec/BigquerySpecification.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/spec/BigquerySpecification.kt index 4571d4cac0a5..fced3139e661 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/spec/BigquerySpecification.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/spec/BigquerySpecification.kt @@ -105,8 +105,60 @@ class BigquerySpecification : ConfigurationSpecification() { @get:JsonProperty("raw_data_dataset") @get:JsonSchemaInject(json = """{"group": "advanced", "order": 8}""") val internalTableDataset: String? = null + + @get:JsonSchemaTitle("Default Partitioning Field") + @get:JsonPropertyDescription("Default field to use for partitioning (e.g. _airbyte_extracted_at)") + @get:JsonProperty("default_partitioning_field") + @get:JsonSchemaInject(json = """{"group": "advanced", "order": 10}""") + val defaultPartitioningField: String? = null + + @get:JsonSchemaTitle("Default Clustering Field") + @get:JsonPropertyDescription("Default field to use for clustering (e.g. _airbyte_extracted_at)") + @get:JsonProperty("default_clustering_field") + @get:JsonSchemaInject(json = """{"group": "advanced", "order": 11}""") + val defaultClusteringField: String? = null + + @get:JsonSchemaTitle("Default Table Suffix") + @get:JsonPropertyDescription("Default suffix to append to table names") + @get:JsonProperty("default_table_suffix") + @get:JsonSchemaInject(json = """{"group": "advanced", "order": 12}""") + val defaultTableSuffix: String? = null + + @get:JsonSchemaTitle("Stream Configuration") + @get:JsonPropertyDescription( + """Per-stream configuration overrides.""", + ) + @get:JsonProperty("streams") + @get:JsonSchemaInject(json = """{"group": "advanced", "order": 13}""") + val streams: List? = null } +/** + * Per-stream configuration for custom partitioning, clustering, and table naming. + */ +data class SingleStreamConfiguration( + @get:JsonSchemaTitle("Stream Name") + @get:JsonPropertyDescription("Name of the stream (or namespace.stream_name)") + @JsonProperty("name") + val name: String = "", + + @get:JsonSchemaTitle("Partitioning Field") + @JsonProperty("partitioning_field") + val partitioningField: String? = null, + + @get:JsonSchemaTitle("Clustering Field") + @JsonProperty("clustering_field") + val clusteringField: String? = null, + + @get:JsonSchemaTitle("Table Suffix") + @JsonProperty("table_suffix") + val tableSuffix: String? = null, + + @get:JsonSchemaTitle("Target Dataset") + @JsonProperty("dataset") + val dataset: String? = null, +) + @JsonTypeInfo( use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.EXISTING_PROPERTY, diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/stream/StreamConfigProvider.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/stream/StreamConfigProvider.kt new file mode 100644 index 000000000000..3dbacc304219 --- /dev/null +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/stream/StreamConfigProvider.kt @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2026 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.bigquery.stream + +import io.airbyte.cdk.load.command.DestinationStream +import io.airbyte.integrations.destination.bigquery.spec.BigqueryConfiguration +import io.airbyte.integrations.destination.bigquery.spec.StreamLevelConfig +import jakarta.inject.Singleton + +/** + * Provides stream-level configuration by looking up the config map with fallback to defaults. + * + * Stream lookup order: + * 1. namespace.stream_name (if namespace is defined) + * 2. stream_name only + * 3. Falls back to connector-level defaults + */ +@Singleton +class StreamConfigProvider( + private val config: BigqueryConfiguration +) { + /** + * Get the stream-level configuration for a given stream descriptor. + * Returns null if no stream-specific config exists. + */ + fun getStreamConfig(descriptor: DestinationStream.Descriptor): StreamLevelConfig? { + // Try namespace.stream_name first + if (descriptor.namespace != null) { + val fullKey = "${descriptor.namespace}.${descriptor.name}" + config.streamConfigMap[fullKey]?.let { return it } + } + + // Try stream_name only + return config.streamConfigMap[descriptor.name] + } + + /** + * Get the effective clustering field for a stream. + * Priority: stream config > default config > null (use PK-based) + */ + fun getClusteringField(descriptor: DestinationStream.Descriptor): String? { + return getStreamConfig(descriptor)?.clusteringField + ?: config.defaultClusteringField + } + + /** + * Get the effective partitioning field for a stream. + * Priority: stream config > default config > "_airbyte_extracted_at" + */ + fun getPartitioningField(descriptor: DestinationStream.Descriptor): String { + return getStreamConfig(descriptor)?.partitioningField + ?: config.defaultPartitioningField + ?: "_airbyte_extracted_at" + } + + /** + * Get the effective table suffix for a stream. + * Priority: stream config > default config > "" (no suffix) + */ + fun getTableSuffix(descriptor: DestinationStream.Descriptor): String { + return getStreamConfig(descriptor)?.tableSuffix + ?: config.defaultTableSuffix + ?: "" + } + + /** + * Get the effective dataset for a stream. + * Priority: stream config > stream namespace > default dataset + */ + fun getDataset(descriptor: DestinationStream.Descriptor): String { + return getStreamConfig(descriptor)?.dataset + ?: descriptor.namespace + ?: config.datasetId + } +} diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/direct_load_tables/BigqueryDirectLoadNativeTableOperations.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/direct_load_tables/BigqueryDirectLoadNativeTableOperations.kt index 73fac24659e4..3868d8134664 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/direct_load_tables/BigqueryDirectLoadNativeTableOperations.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/direct_load_tables/BigqueryDirectLoadNativeTableOperations.kt @@ -41,6 +41,7 @@ class BigqueryDirectLoadNativeTableOperations( private val databaseHandler: BigQueryDatabaseHandler, private val projectId: String, private val tempTableNameGenerator: TempTableNameGenerator, + private val streamConfigProvider: io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider, ) : DirectLoadTableNativeOperations { override suspend fun ensureSchemaMatches( stream: DestinationStream, @@ -119,7 +120,7 @@ class BigqueryDirectLoadNativeTableOperations( var tablePartitioningMatches = false if (existingTable is StandardTableDefinition) { tableClusteringMatches = clusteringMatches(stream, columnNameMapping, existingTable) - tablePartitioningMatches = partitioningMatches(existingTable) + tablePartitioningMatches = partitioningMatches(stream, columnNameMapping, existingTable) } return !tableClusteringMatches || !tablePartitioningMatches } @@ -399,6 +400,7 @@ class BigqueryDirectLoadNativeTableOperations( stream: DestinationStream, columnNameMapping: ColumnNameMapping, existingTable: StandardTableDefinition, + streamConfigProvider: io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider, ): Boolean { // We always want to set a clustering config, so if the table doesn't have one, // then we should fix it. @@ -407,13 +409,37 @@ class BigqueryDirectLoadNativeTableOperations( } val existingClusteringFields = HashSet(existingTable.clustering!!.fields) + + // Calculate expected clustering columns using the provider + val expectedClusteringColumns = mutableListOf() + val customClusteringField = streamConfigProvider.getClusteringField(stream.unmappedDescriptor) + + if (customClusteringField != null) { + val clusterFields = customClusteringField.split(",").map{ it.trim() }.filter { it.isNotEmpty() } + + for (field in clusterFields) { + val actualColumnName = columnNameMapping[field] + if (actualColumnName != null) { + expectedClusteringColumns.add(actualColumnName) + } + } + + if (expectedClusteringColumns.isEmpty()) { + // Fallback to default if no columns found (shouldn't happen if validation passes elsewhere) + expectedClusteringColumns.addAll(BigqueryDirectLoadSqlGenerator.clusteringColumns(stream, columnNameMapping)) + } + } else { + // No custom field, use default logic + expectedClusteringColumns.addAll(BigqueryDirectLoadSqlGenerator.clusteringColumns(stream, columnNameMapping)) + } + // We're OK with a column being in the clustering config that we don't expect // (e.g. user set a composite PK, then makes one of those fields no longer a PK). // It doesn't really hurt us to have that extra clustering config. val clusteringConfigIsSupersetOfExpectedConfig = containsAllIgnoreCase( existingClusteringFields, - BigqueryDirectLoadSqlGenerator.clusteringColumns(stream, columnNameMapping), + expectedClusteringColumns, ) // We do, however, validate that all the clustering fields actually exist in the // intended schema. @@ -429,12 +455,51 @@ class BigqueryDirectLoadNativeTableOperations( } @VisibleForTesting - fun partitioningMatches(existingTable: StandardTableDefinition): Boolean { + fun partitioningMatches( + stream: DestinationStream, + columnNameMapping: ColumnNameMapping, + existingTable: StandardTableDefinition, + streamConfigProvider: io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider, + ): Boolean { + val expectedPartitionField = + resolvePartitioningField(stream, columnNameMapping, streamConfigProvider) return existingTable.timePartitioning != null && existingTable.timePartitioning!! .field - .equals("_airbyte_extracted_at", ignoreCase = true) && + .equals(expectedPartitionField, ignoreCase = true) && TimePartitioning.Type.DAY == existingTable.timePartitioning!!.type } + + private fun resolvePartitioningField( + stream: DestinationStream, + columnNameMapping: ColumnNameMapping, + streamConfigProvider: io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider, + ): String { + val requestedField = streamConfigProvider.getPartitioningField(stream.unmappedDescriptor) + if (requestedField == "_airbyte_extracted_at") { + return requestedField + } + + return columnNameMapping[requestedField] + ?: throw ConfigErrorException( + "Stream ${stream.mappedDescriptor.toPrettyString()}: Partitioning field '$requestedField' does not exist in the schema" + ) + } + } + + private fun partitioningMatches( + stream: DestinationStream, + columnNameMapping: ColumnNameMapping, + existingTable: StandardTableDefinition, + ): Boolean { + return partitioningMatches(stream, columnNameMapping, existingTable, streamConfigProvider) + } + + private fun clusteringMatches( + stream: DestinationStream, + columnNameMapping: ColumnNameMapping, + existingTable: StandardTableDefinition, + ): Boolean { + return clusteringMatches(stream, columnNameMapping, existingTable, streamConfigProvider) } } diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/direct_load_tables/BigqueryDirectLoadSqlGenerator.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/direct_load_tables/BigqueryDirectLoadSqlGenerator.kt index f7cae6ac6a02..976dfba03a7d 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/direct_load_tables/BigqueryDirectLoadSqlGenerator.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/direct_load_tables/BigqueryDirectLoadSqlGenerator.kt @@ -38,6 +38,7 @@ import org.apache.commons.lang3.StringUtils class BigqueryDirectLoadSqlGenerator( private val projectId: String?, private val cdcDeletionMode: CdcDeletionMode, + private val streamConfigProvider: io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider, ) : DirectLoadSqlGenerator { override fun createTable( stream: DestinationStream, @@ -59,12 +60,17 @@ class BigqueryDirectLoadSqlGenerator( .joinToString(",\n") val columnDeclarations = columnsAndTypes(stream, columnNameMapping) + + // Use custom clustering if provided, else use PK-based clustering val clusterConfig = - clusteringColumns(stream, columnNameMapping) + getClusteringColumns(stream, columnNameMapping) .stream() .map { c: String? -> StringUtils.wrap(c, QUOTE) } .collect(Collectors.joining(", ")) + + val partitioningField = streamConfigProvider.getPartitioningField(stream.unmappedDescriptor) val finalTableId = tableName.toPrettyString(QUOTE) + // bigquery has a CREATE OR REPLACE TABLE statement, but we can't use it // because you can't change a partitioning/clustering scheme in-place. // Bigquery requires you to drop+recreate the table in this case. @@ -86,7 +92,7 @@ class BigqueryDirectLoadSqlGenerator( _airbyte_generation_id INTEGER, $columnDeclarations ) - PARTITION BY (DATE_TRUNC(_airbyte_extracted_at, DAY)) + PARTITION BY (DATE_TRUNC(`$partitioningField`, DAY)) CLUSTER BY $clusterConfig; """.trimIndent() ) @@ -348,4 +354,47 @@ class BigqueryDirectLoadSqlGenerator( return clusterColumns } } + + /** + * Get clustering columns with stream-level override support. + * Priority: + * 1. Stream-level clustering field from config + * 2. Default clustering field from connector config + * 3. PK-based clustering (for dedupe mode) + */ + private fun getClusteringColumns( + stream: DestinationStream, + columnNameMapping: ColumnNameMapping + ): List { + val customClusteringField = streamConfigProvider.getClusteringField(stream.unmappedDescriptor) + + if (customClusteringField != null) { + // Validate that the clustering field exists and is a valid type + val clusterFields = customClusteringField.split(",").map{ it.trim() }.filter { it.isNotEmpty() } + val resolvedColumns = mutableListOf() + + for (field in clusterFields) { + val actualColumnName = columnNameMapping[field] + if (actualColumnName != null) { + val fieldType = stream.schema.asColumns()[field] + if (fieldType != null) { + val bigqueryType = toDialectType(fieldType.type) + if (bigqueryType == StandardSQLTypeName.JSON) { + throw ConfigErrorException( + "Stream ${stream.mappedDescriptor.toPrettyString()}: Clustering field '$field' is JSON type, which cannot be used for clustering" + ) + } + } + resolvedColumns.add(actualColumnName) + } + } + if (resolvedColumns.isNotEmpty()) { + return resolvedColumns + } + // If the fields don't exist in the schema, fall through to default behavior + } + + // Default behavior: use PK-based clustering + return clusteringColumns(stream, columnNameMapping) + } } From 104617bae1808e2f95436fc5478da26fe51209ff Mon Sep 17 00:00:00 2001 From: Pragyash Date: Mon, 26 Jan 2026 06:22:28 +0530 Subject: [PATCH 07/16] test(bigquery): Fix unit tests after DestinationCatalog refactor --- .../SafeDestinationCatalogFactoryTest.kt | 73 +++++++++++++++++++ ...ueryDirectLoadNativeTableOperationsTest.kt | 34 +++++++-- 2 files changed, 99 insertions(+), 8 deletions(-) create mode 100644 airbyte-integrations/connectors/destination-bigquery/src/test/kotlin/io/airbyte/integrations/destination/bigquery/SafeDestinationCatalogFactoryTest.kt diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test/kotlin/io/airbyte/integrations/destination/bigquery/SafeDestinationCatalogFactoryTest.kt b/airbyte-integrations/connectors/destination-bigquery/src/test/kotlin/io/airbyte/integrations/destination/bigquery/SafeDestinationCatalogFactoryTest.kt new file mode 100644 index 000000000000..a8df69a1d85d --- /dev/null +++ b/airbyte-integrations/connectors/destination-bigquery/src/test/kotlin/io/airbyte/integrations/destination/bigquery/SafeDestinationCatalogFactoryTest.kt @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2026 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.bigquery + +import io.airbyte.cdk.load.command.Dedupe +import io.airbyte.cdk.load.command.NamespaceMapper +import io.airbyte.cdk.load.data.json.JsonSchemaToAirbyteType +import io.airbyte.protocol.models.v0.AirbyteStream +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import io.airbyte.protocol.models.v0.DestinationSyncMode +import io.airbyte.protocol.models.v0.SyncMode +import io.mockk.every +import io.mockk.mockk +import kotlin.test.assertEquals +import org.junit.jupiter.api.Assertions.assertDoesNotThrow +import org.junit.jupiter.api.Test +import java.util.Collections + +class SafeDestinationCatalogFactoryTest { + + @Test + fun `test syncCatalog with APPEND_DEDUP and null PK cursor does not throw NPE`() { + val factory = SafeDestinationCatalogFactory() + + val stream = ConfiguredAirbyteStream() + .withStream(AirbyteStream().withName("test").withNamespace("ns").withJsonSchema(mockk(relaxed = true))) + .withDestinationSyncMode(DestinationSyncMode.APPEND_DEDUP) + .withSyncMode(SyncMode.INCREMENTAL) + .withPrimaryKey(null) + .withCursorField(null) + .withGenerationId(1L) + .withMinimumGenerationId(1L) + .withSyncId(1L) + + val catalog = ConfiguredAirbyteCatalog().withStreams(Collections.singletonList(stream)) + val namespaceMapper = mockk(relaxed = true) + val jsonSchemaConverter = mockk(relaxed = true) + + // Mock the convert method + every { jsonSchemaConverter.convert(any()) } returns mockk(relaxed=true) + + assertDoesNotThrow { + val destCatalog = factory.syncCatalog( + catalog, + namespaceMapper, + jsonSchemaConverter + ) + val importType = destCatalog.streams.first().importType + assert(importType is Dedupe) + val dedupe = importType as Dedupe + assert(dedupe.primaryKey.isEmpty()) + assert(dedupe.cursor.isEmpty()) + } + } + + @Test + fun `test checkCatalog returns test stream`() { + val factory = SafeDestinationCatalogFactory() + val namespaceMapper = mockk(relaxed = true) + + val destCatalog = factory.checkCatalog( + namespaceMapper, + "custom_check_ns" + ) + + assertEquals(1, destCatalog.streams.size) + assertEquals("custom_check_ns", destCatalog.streams.first().unmappedNamespace) + assert(destCatalog.streams.first().unmappedName.startsWith("test")) + } +} diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test/kotlin/io/airbyte/integrations/destination/bigquery/typing_deduping/direct_load_tables/BigqueryDirectLoadNativeTableOperationsTest.kt b/airbyte-integrations/connectors/destination-bigquery/src/test/kotlin/io/airbyte/integrations/destination/bigquery/typing_deduping/direct_load_tables/BigqueryDirectLoadNativeTableOperationsTest.kt index 65e2eb6d851d..ddec331704e8 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/test/kotlin/io/airbyte/integrations/destination/bigquery/typing_deduping/direct_load_tables/BigqueryDirectLoadNativeTableOperationsTest.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/test/kotlin/io/airbyte/integrations/destination/bigquery/typing_deduping/direct_load_tables/BigqueryDirectLoadNativeTableOperationsTest.kt @@ -28,11 +28,15 @@ import io.airbyte.cdk.load.orchestration.db.direct_load_table.ColumnChange import io.airbyte.integrations.destination.bigquery.write.typing_deduping.direct_load_tables.BigqueryDirectLoadNativeTableOperations import io.airbyte.integrations.destination.bigquery.write.typing_deduping.direct_load_tables.BigqueryDirectLoadNativeTableOperations.Companion.clusteringMatches import io.airbyte.integrations.destination.bigquery.write.typing_deduping.direct_load_tables.BigqueryDirectLoadNativeTableOperations.Companion.partitioningMatches +import io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider import io.airbyte.integrations.destination.bigquery.write.typing_deduping.direct_load_tables.BigqueryDirectLoadSqlGenerator.Companion.toDialectType import org.junit.jupiter.api.Assertions import org.junit.jupiter.api.Test import org.mockito.Mockito import org.mockito.Mockito.RETURNS_DEEP_STUBS +import org.mockito.kotlin.any +import org.mockito.kotlin.mock +import org.mockito.kotlin.whenever class BigqueryDirectLoadNativeTableOperationsTest { @Test @@ -73,6 +77,8 @@ class BigqueryDirectLoadNativeTableOperationsTest { val existingTable = Mockito.mock(StandardTableDefinition::class.java, RETURNS_DEEP_STUBS) Mockito.`when`(existingTable.schema!!.fields) .thenReturn(FieldList.of(Field.of("a2", StandardSQLTypeName.INT64))) + val streamConfigProvider = mock() + whenever(streamConfigProvider.getPartitioningField(any())).thenReturn(null) val alterTableReport = BigqueryDirectLoadNativeTableOperations( Mockito.mock(), @@ -80,6 +86,7 @@ class BigqueryDirectLoadNativeTableOperationsTest { Mockito.mock(), projectId = "unused", tempTableNameGenerator = DefaultTempTableNameGenerator("unused"), + streamConfigProvider = streamConfigProvider ) .buildAlterTableReport(stream, columnNameMapping, existingTable) Assertions.assertAll( @@ -130,6 +137,8 @@ class BigqueryDirectLoadNativeTableOperationsTest { ) ) ) + val streamConfigProvider = mock() + whenever(streamConfigProvider.getPartitioningField(any())).thenReturn(null) val alterTableReport = BigqueryDirectLoadNativeTableOperations( Mockito.mock(), @@ -137,6 +146,7 @@ class BigqueryDirectLoadNativeTableOperationsTest { Mockito.mock(), projectId = "unused", tempTableNameGenerator = DefaultTempTableNameGenerator("unused"), + streamConfigProvider = streamConfigProvider ) .buildAlterTableReport(stream, columnNameMapping, existingTable) // NB: column names in AlterTableReport are all _after_ destination name transform @@ -187,15 +197,18 @@ class BigqueryDirectLoadNativeTableOperationsTest { ) var columnNameMapping = ColumnNameMapping(mapOf("bar" to "foo")) + val streamConfigProvider = mock() + whenever(streamConfigProvider.getClusteringField(any())).thenReturn(null) + // Clustering is null val existingTable = Mockito.mock(StandardTableDefinition::class.java) Mockito.`when`(existingTable.clustering).thenReturn(null) - Assertions.assertFalse(clusteringMatches(stream, columnNameMapping, existingTable)) + Assertions.assertFalse(clusteringMatches(stream, columnNameMapping, existingTable, streamConfigProvider)) // Clustering does not contain all fields Mockito.`when`(existingTable.clustering) .thenReturn(Clustering.newBuilder().setFields(listOf("_airbyte_extracted_at")).build()) - Assertions.assertFalse(clusteringMatches(stream, columnNameMapping, existingTable)) + Assertions.assertFalse(clusteringMatches(stream, columnNameMapping, existingTable, streamConfigProvider)) // Clustering matches stream = @@ -209,7 +222,7 @@ class BigqueryDirectLoadNativeTableOperationsTest { syncId = 0, namespaceMapper = NamespaceMapper() ) - Assertions.assertTrue(clusteringMatches(stream, columnNameMapping, existingTable)) + Assertions.assertTrue(clusteringMatches(stream, columnNameMapping, existingTable, streamConfigProvider)) // Clustering only the first 3 PK columns (See // https://github.com/airbytehq/oncall/issues/2565) @@ -251,21 +264,26 @@ class BigqueryDirectLoadNativeTableOperationsTest { "e1" to "e2", ) ) - Assertions.assertTrue(clusteringMatches(stream, columnNameMapping, existingTable)) + Assertions.assertTrue(clusteringMatches(stream, columnNameMapping, existingTable, streamConfigProvider)) } @Test fun testPartitioningMatches() { val existingTable = Mockito.mock(StandardTableDefinition::class.java) + val stream = mock() + val columnNameMapping = mock() + val streamConfigProvider = mock() + whenever(streamConfigProvider.getPartitioningField(any())).thenReturn(null) + // Partitioning is null Mockito.`when`(existingTable.timePartitioning).thenReturn(null) - Assertions.assertFalse(partitioningMatches(existingTable)) + Assertions.assertFalse(partitioningMatches(stream, columnNameMapping, existingTable, streamConfigProvider)) // incorrect field Mockito.`when`(existingTable.timePartitioning) .thenReturn( TimePartitioning.newBuilder(TimePartitioning.Type.DAY).setField("_foo").build() ) - Assertions.assertFalse(partitioningMatches(existingTable)) + Assertions.assertFalse(partitioningMatches(stream, columnNameMapping, existingTable, streamConfigProvider)) // incorrect partitioning scheme Mockito.`when`(existingTable.timePartitioning) .thenReturn( @@ -273,7 +291,7 @@ class BigqueryDirectLoadNativeTableOperationsTest { .setField("_airbyte_extracted_at") .build() ) - Assertions.assertFalse(partitioningMatches(existingTable)) + Assertions.assertFalse(partitioningMatches(stream, columnNameMapping, existingTable, streamConfigProvider)) // partitioning matches Mockito.`when`(existingTable.timePartitioning) @@ -282,6 +300,6 @@ class BigqueryDirectLoadNativeTableOperationsTest { .setField("_airbyte_extracted_at") .build() ) - Assertions.assertTrue(partitioningMatches(existingTable)) + Assertions.assertTrue(partitioningMatches(stream, columnNameMapping, existingTable, streamConfigProvider)) } } From 8ec00b53afd6fe913ec56911d010c90ac837970a Mon Sep 17 00:00:00 2001 From: Pragyash Date: Mon, 26 Jan 2026 06:27:57 +0530 Subject: [PATCH 08/16] fix(bigquery): Remove duplicate @Singleton from StreamConfigProvider --- .../destination/bigquery/stream/StreamConfigProvider.kt | 1 - 1 file changed, 1 deletion(-) diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/stream/StreamConfigProvider.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/stream/StreamConfigProvider.kt index 3dbacc304219..f9638fef8166 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/stream/StreamConfigProvider.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/stream/StreamConfigProvider.kt @@ -17,7 +17,6 @@ import jakarta.inject.Singleton * 2. stream_name only * 3. Falls back to connector-level defaults */ -@Singleton class StreamConfigProvider( private val config: BigqueryConfiguration ) { From cc770eca4b390d6cd84cf254d8d4ae0b7b0cda89 Mon Sep 17 00:00:00 2001 From: Pragyash Date: Mon, 26 Jan 2026 18:15:26 +0530 Subject: [PATCH 09/16] feat: patch BQ connector with custom partitioning logic --- .../bigquery/BigqueryBeansFactory.kt | 50 +++++++++++++++++-- 1 file changed, 45 insertions(+), 5 deletions(-) diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/BigqueryBeansFactory.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/BigqueryBeansFactory.kt index 7ab37d94ca01..b94e457fb355 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/BigqueryBeansFactory.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/BigqueryBeansFactory.kt @@ -38,7 +38,13 @@ import io.airbyte.integrations.destination.bigquery.write.typing_deduping.direct import io.airbyte.integrations.destination.bigquery.write.typing_deduping.direct_load_tables.BigqueryDirectLoadSqlTableOperations import io.airbyte.integrations.destination.bigquery.write.typing_deduping.legacy_raw_tables.BigqueryRawTableOperations import io.airbyte.integrations.destination.bigquery.write.typing_deduping.legacy_raw_tables.BigqueryTypingDedupingDatabaseInitialStatusGatherer -import io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider // ADDED +import io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider +import io.airbyte.cdk.load.orchestration.db.RawTableNameGenerator +import io.airbyte.cdk.load.orchestration.db.FinalTableNameGenerator +import io.airbyte.cdk.load.orchestration.db.ColumnNameGenerator +import io.airbyte.integrations.destination.bigquery.write.typing_deduping.BigqueryRawTableNameGenerator +import io.airbyte.integrations.destination.bigquery.write.typing_deduping.BigqueryFinalTableNameGenerator +import io.airbyte.integrations.destination.bigquery.write.typing_deduping.BigqueryColumnNameGenerator import io.github.oshai.kotlinlogging.KotlinLogging import io.micronaut.context.annotation.Factory import io.micronaut.context.annotation.Requires @@ -84,7 +90,7 @@ class BigqueryBeansFactory { @Singleton fun getChecker( - catalog: DestinationCatalog, + @Named("safeDestinationCatalog") catalog: DestinationCatalog, @Named("inputStream") stdinPipe: InputStream, taskLauncher: DestinationTaskLauncher, syncManager: SyncManager, @@ -96,23 +102,58 @@ class BigqueryBeansFactory { BigqueryCheckCleaner(), ) + @Singleton + fun getRawTableNameGenerator( + config: BigqueryConfiguration, + streamConfigProvider: StreamConfigProvider + ): RawTableNameGenerator { + return BigqueryRawTableNameGenerator(config, streamConfigProvider) + } + + @Singleton + fun getFinalTableNameGenerator( + config: BigqueryConfiguration, + streamConfigProvider: StreamConfigProvider + ): FinalTableNameGenerator { + return BigqueryFinalTableNameGenerator(config, streamConfigProvider) + } + + @Singleton + fun getColumnNameGenerator(): ColumnNameGenerator { + return BigqueryColumnNameGenerator() + } + @Singleton fun getWriter( bigquery: BigQuery, config: BigqueryConfiguration, - names: TableCatalog, // micronaut will only instantiate a single instance of StreamStateStore, // so accept it as a * generic and cast as needed. // we use a different type depending on whether we're in legacy raw tables vs // direct-load tables mode. streamStateStore: StreamStateStore<*>, streamConfigProvider: StreamConfigProvider, + names: TableCatalog, ): DestinationWriter { val destinationHandler = BigQueryDatabaseHandler(bigquery, config.datasetLocation.region) + // We need to pass the generators to the TableCatalog manually since we are constructing it here? + // Actually, TableCatalog is usually injected. But wait, where is TableCatalog defined? + // It's usually created by the factory too. + // Let's check if we need to update TableCatalog creation. + // The original code had `names: TableCatalog` injected into getWriter. + // But TableCatalog takes generators as constructor args. + // So we need to ensure TableCatalog uses our new generators. + // Looking at existing TableCatalog in CDK, it uses @Named("rawTableNameGenerator") etc. + // So defining the beans with those names matches the expectation. + + // Wait, the previous getWriter signature had `names: TableCatalog`. + // Let's keep that, but ensure we define the generator beans so TableCatalog can find them. + if (config.legacyRawTablesOnly) { // force smart cast @Suppress("UNCHECKED_CAST") streamStateStore as StreamStateStore + return TypingDedupingWriter( names, BigqueryTypingDedupingDatabaseInitialStatusGatherer(bigquery), @@ -141,8 +182,7 @@ class BigqueryBeansFactory { // force smart cast @Suppress("UNCHECKED_CAST") streamStateStore as StreamStateStore - val tempTableNameGenerator = - DefaultTempTableNameGenerator(internalNamespace = config.internalTableDataset) + val tempTableNameGenerator = DefaultTempTableNameGenerator(internalNamespace = config.internalTableDataset) return DirectLoadTableWriter( internalNamespace = config.internalTableDataset, From 1f0a342e170b417e4644a43137450185f70ddb88 Mon Sep 17 00:00:00 2001 From: Pragyash Date: Mon, 26 Jan 2026 18:16:06 +0530 Subject: [PATCH 10/16] chore: add remaining modified files for custom BQ logic --- .../destination/bigquery/BigQueryUtils.kt | 2 +- .../bigquery/SafeDestinationCatalogFactory.kt | 118 ++++++++++-------- .../bigquery/check/BigqueryCheckCleaner.kt | 6 +- .../typing_deduping/BigqueryNameGenerators.kt | 35 +++--- 4 files changed, 89 insertions(+), 72 deletions(-) diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/BigQueryUtils.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/BigQueryUtils.kt index 7a473c55d48e..4c719bb0dd17 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/BigQueryUtils.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/BigQueryUtils.kt @@ -174,7 +174,7 @@ object BigQueryUtils { private val connectorNameOrDefault: String get() = Optional.ofNullable(System.getenv("WORKER_CONNECTOR_IMAGE")) - .map { name: String -> name.replace("airbyte/", "").replace(":", "/") } + .map { name: String -> name.trim().replace("airbyte/", "").replace(":", "/") } .orElse("destination-bigquery") } diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/SafeDestinationCatalogFactory.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/SafeDestinationCatalogFactory.kt index 81c006b56955..91277665e6c0 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/SafeDestinationCatalogFactory.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/SafeDestinationCatalogFactory.kt @@ -22,7 +22,9 @@ import io.airbyte.cdk.load.data.json.JsonSchemaToAirbyteType import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog import io.airbyte.protocol.models.v0.DestinationSyncMode import io.micronaut.context.annotation.Factory +import io.micronaut.context.annotation.Primary import io.micronaut.context.annotation.Replaces +import io.micronaut.context.annotation.Requires import io.micronaut.context.annotation.Value import jakarta.inject.Named import jakarta.inject.Singleton @@ -33,65 +35,73 @@ import org.apache.commons.lang3.RandomStringUtils @Factory @Replaces(DefaultDestinationCatalogFactory::class) class SafeDestinationCatalogFactory { + @Requires(property = Operation.PROPERTY, notEquals = "check") @Singleton - fun getDestinationCatalog( + @Primary + @Named("safeDestinationCatalog") + fun syncCatalog( catalog: ConfiguredAirbyteCatalog, namespaceMapper: NamespaceMapper, jsonSchemaToAirbyteType: JsonSchemaToAirbyteType, - @Value("\${${Operation.PROPERTY}}") operation: String, - @Named("checkNamespace") checkNamespace: String?, ): DestinationCatalog { - if (operation == "check") { - // Copied from DefaultDestinationCatalogFactory to maintain behavior - val date = LocalDate.now().format(DateTimeFormatter.ofPattern("yyyyMMdd")) - val random = RandomStringUtils.randomAlphabetic(5).lowercase() - val namespace = checkNamespace ?: "${CHECK_STREAM_NAMESPACE}_$date$random" - return DestinationCatalog( - listOf( - DestinationStream( - unmappedNamespace = namespace, - unmappedName = "test$date$random", - importType = Append, - schema = - ObjectType( - linkedMapOf("test" to FieldType(IntegerType, nullable = true)) - ), - generationId = 1, - minimumGenerationId = 0, - syncId = 1, - namespaceMapper = namespaceMapper - ) - ) - ) - } else { - val streams = - catalog.streams.map { stream -> - val importType = - when (stream.destinationSyncMode) { - null -> throw IllegalArgumentException("Destination sync mode was null") - DestinationSyncMode.OVERWRITE -> Overwrite - DestinationSyncMode.APPEND -> Append - DestinationSyncMode.APPEND_DEDUP -> - Dedupe( - primaryKey = stream.primaryKey ?: emptyList(), - cursor = stream.cursorField ?: emptyList() - ) - DestinationSyncMode.UPDATE -> Update - DestinationSyncMode.SOFT_DELETE -> SoftDelete - } + val streams = + catalog.streams.map { stream -> + val importType = + when (stream.destinationSyncMode) { + null -> throw IllegalArgumentException("Destination sync mode was null") + DestinationSyncMode.OVERWRITE -> Overwrite + DestinationSyncMode.APPEND -> Append + DestinationSyncMode.APPEND_DEDUP -> + Dedupe( + primaryKey = stream.primaryKey ?: emptyList(), + cursor = stream.cursorField ?: emptyList() + ) + DestinationSyncMode.UPDATE -> Update + DestinationSyncMode.SOFT_DELETE -> SoftDelete + } + + DestinationStream( + unmappedName = stream.stream.name, + unmappedNamespace = stream.stream.namespace, + importType = importType, + schema = jsonSchemaToAirbyteType.convert(stream.stream.jsonSchema), + generationId = stream.generationId ?: 0, + minimumGenerationId = stream.minimumGenerationId ?: 0, + syncId = stream.syncId ?: 0, + namespaceMapper = namespaceMapper, + ) + } + return DestinationCatalog(streams) + } - DestinationStream( - unmappedName = stream.stream.name, - unmappedNamespace = stream.stream.namespace, - importType = importType, - schema = jsonSchemaToAirbyteType.convert(stream.stream.jsonSchema), - generationId = stream.generationId ?: 0, - minimumGenerationId = stream.minimumGenerationId ?: 0, - syncId = stream.syncId ?: 0, - namespaceMapper = namespaceMapper, - ) - } - return DestinationCatalog(streams) - } + @Requires(property = Operation.PROPERTY, value = "check") + @Singleton + @Primary + @Named("safeDestinationCatalog") + fun checkCatalog( + namespaceMapper: NamespaceMapper, + @Named("checkNamespace") checkNamespace: String?, + ): DestinationCatalog { + // Copied from DefaultDestinationCatalogFactory to maintain behavior + val date = LocalDate.now().format(DateTimeFormatter.ofPattern("yyyyMMdd")) + val random = RandomStringUtils.randomAlphabetic(5).lowercase() + val namespace = checkNamespace ?: "${CHECK_STREAM_NAMESPACE}_$date$random" + return DestinationCatalog( + listOf( + DestinationStream( + unmappedNamespace = namespace, + unmappedName = "test$date$random", + importType = Append, + schema = + ObjectType( + linkedMapOf("test" to FieldType(IntegerType, nullable = true)) + ), + generationId = 1, + minimumGenerationId = 0, + syncId = 1, + namespaceMapper = namespaceMapper + ) + ) + ) } } diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/check/BigqueryCheckCleaner.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/check/BigqueryCheckCleaner.kt index 8a0843eb8d94..6d8c4452a6f5 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/check/BigqueryCheckCleaner.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/check/BigqueryCheckCleaner.kt @@ -10,19 +10,21 @@ import io.airbyte.integrations.destination.bigquery.BigqueryBeansFactory import io.airbyte.integrations.destination.bigquery.spec.BigqueryConfiguration import io.airbyte.integrations.destination.bigquery.write.typing_deduping.BigqueryFinalTableNameGenerator import io.airbyte.integrations.destination.bigquery.write.typing_deduping.BigqueryRawTableNameGenerator +import io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider import io.airbyte.integrations.destination.bigquery.write.typing_deduping.toTableId class BigqueryCheckCleaner : CheckCleaner { override fun cleanup(config: BigqueryConfiguration, stream: DestinationStream) { + val streamConfigProvider = StreamConfigProvider(config) val bq = BigqueryBeansFactory().getBigqueryClient(config) bq.getTable( - BigqueryRawTableNameGenerator(config) + BigqueryRawTableNameGenerator(config, streamConfigProvider) .getTableName(stream.mappedDescriptor) .toTableId() ) ?.delete() bq.getTable( - BigqueryFinalTableNameGenerator(config) + BigqueryFinalTableNameGenerator(config, streamConfigProvider) .getTableName(stream.mappedDescriptor) .toTableId() ) diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/BigqueryNameGenerators.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/BigqueryNameGenerators.kt index 4aca53ed3f5e..61c5743bf6a1 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/BigqueryNameGenerators.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/BigqueryNameGenerators.kt @@ -1,7 +1,3 @@ -/* - * Copyright (c) 2026 Airbyte, Inc., all rights reserved. - */ - package io.airbyte.integrations.destination.bigquery.write.typing_deduping import com.google.cloud.bigquery.TableId @@ -13,32 +9,41 @@ import io.airbyte.cdk.load.orchestration.db.TableName import io.airbyte.cdk.load.orchestration.db.legacy_typing_deduping.TypingDedupingUtil import io.airbyte.integrations.destination.bigquery.BigQuerySQLNameTransformer import io.airbyte.integrations.destination.bigquery.spec.BigqueryConfiguration +import io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider import java.util.Locale import javax.inject.Singleton private val nameTransformer = BigQuerySQLNameTransformer() -@Singleton -class BigqueryRawTableNameGenerator(val config: BigqueryConfiguration) : RawTableNameGenerator { - override fun getTableName(streamDescriptor: DestinationStream.Descriptor) = - TableName( +class BigqueryRawTableNameGenerator( + val config: BigqueryConfiguration, + val streamConfigProvider: StreamConfigProvider +) : RawTableNameGenerator { + override fun getTableName(streamDescriptor: DestinationStream.Descriptor): TableName { + val suffix = streamConfigProvider.getTableSuffix(streamDescriptor) + return TableName( nameTransformer.getNamespace(config.internalTableDataset), nameTransformer.convertStreamName( TypingDedupingUtil.concatenateRawTableName( streamDescriptor.namespace ?: config.datasetId, streamDescriptor.name, - ) + ) + suffix ), ) + } } -@Singleton -class BigqueryFinalTableNameGenerator(val config: BigqueryConfiguration) : FinalTableNameGenerator { - override fun getTableName(streamDescriptor: DestinationStream.Descriptor) = - TableName( - nameTransformer.getNamespace(streamDescriptor.namespace ?: config.datasetId), - nameTransformer.convertStreamName(streamDescriptor.name), +class BigqueryFinalTableNameGenerator( + val config: BigqueryConfiguration, + val streamConfigProvider: StreamConfigProvider +) : FinalTableNameGenerator { + override fun getTableName(streamDescriptor: DestinationStream.Descriptor): TableName { + val suffix = streamConfigProvider.getTableSuffix(streamDescriptor) + return TableName( + nameTransformer.getNamespace(streamConfigProvider.getDataset(streamDescriptor)), + nameTransformer.convertStreamName(streamDescriptor.name + suffix), ) + } } @Singleton From 493a382aabeb2ba8588efcfc72ae8d95c9488858 Mon Sep 17 00:00:00 2001 From: Pragyash Date: Mon, 2 Feb 2026 14:40:21 +0530 Subject: [PATCH 11/16] Add support for DAY/MONTH/YEAR based partitioning --- .../bigquery/spec/BigqueryConfiguration.kt | 4 ++ .../bigquery/spec/BigquerySpecification.kt | 24 ++++++++-- .../bigquery/stream/StreamConfigProvider.kt | 11 +++++ ...BigqueryDirectLoadNativeTableOperations.kt | 47 +++++++++++++++---- .../BigqueryDirectLoadSqlGenerator.kt | 40 +++++++++++++++- 5 files changed, 112 insertions(+), 14 deletions(-) diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/spec/BigqueryConfiguration.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/spec/BigqueryConfiguration.kt index ecc531163d2c..b9afb35750a5 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/spec/BigqueryConfiguration.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/spec/BigqueryConfiguration.kt @@ -22,6 +22,7 @@ data class BigqueryConfiguration( val defaultPartitioningField: String?, val defaultClusteringField: String?, val defaultTableSuffix: String?, + val defaultPartitioningGranularity: PartitioningGranularity?, val streamConfigMap: Map, ) : DestinationConfiguration() { override val numOpenStreamWorkers = 3 @@ -32,6 +33,7 @@ data class BigqueryConfiguration( data class StreamLevelConfig( val partitioningField: String? = null, + val partitioningGranularity: PartitioningGranularity? = null, val clusteringField: String? = null, val tableSuffix: String? = null, val dataset: String? = null, @@ -80,9 +82,11 @@ class BigqueryConfigurationFactory : defaultPartitioningField = pojo.defaultPartitioningField, defaultClusteringField = pojo.defaultClusteringField, defaultTableSuffix = pojo.defaultTableSuffix, + defaultPartitioningGranularity = pojo.defaultPartitioningGranularity, streamConfigMap = pojo.streams?.associate { it.name to StreamLevelConfig( partitioningField = it.partitioningField, + partitioningGranularity = it.partitioningGranularity, clusteringField = it.clusteringField, tableSuffix = it.tableSuffix, dataset = it.dataset diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/spec/BigquerySpecification.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/spec/BigquerySpecification.kt index fced3139e661..55b5161b9cb3 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/spec/BigquerySpecification.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/spec/BigquerySpecification.kt @@ -112,16 +112,22 @@ class BigquerySpecification : ConfigurationSpecification() { @get:JsonSchemaInject(json = """{"group": "advanced", "order": 10}""") val defaultPartitioningField: String? = null + @get:JsonSchemaTitle("Default Partitioning Granularity") + @get:JsonPropertyDescription("Default partitioning granularity: DAY, MONTH, or YEAR. Defaults to DAY.") + @get:JsonProperty("default_partitioning_granularity") + @get:JsonSchemaInject(json = """{"group": "advanced", "order": 11}""") + val defaultPartitioningGranularity: PartitioningGranularity? = null + @get:JsonSchemaTitle("Default Clustering Field") @get:JsonPropertyDescription("Default field to use for clustering (e.g. _airbyte_extracted_at)") @get:JsonProperty("default_clustering_field") - @get:JsonSchemaInject(json = """{"group": "advanced", "order": 11}""") + @get:JsonSchemaInject(json = """{"group": "advanced", "order": 12}""") val defaultClusteringField: String? = null @get:JsonSchemaTitle("Default Table Suffix") @get:JsonPropertyDescription("Default suffix to append to table names") @get:JsonProperty("default_table_suffix") - @get:JsonSchemaInject(json = """{"group": "advanced", "order": 12}""") + @get:JsonSchemaInject(json = """{"group": "advanced", "order": 13}""") val defaultTableSuffix: String? = null @get:JsonSchemaTitle("Stream Configuration") @@ -129,7 +135,7 @@ class BigquerySpecification : ConfigurationSpecification() { """Per-stream configuration overrides.""", ) @get:JsonProperty("streams") - @get:JsonSchemaInject(json = """{"group": "advanced", "order": 13}""") + @get:JsonSchemaInject(json = """{"group": "advanced", "order": 14}""") val streams: List? = null } @@ -146,6 +152,12 @@ data class SingleStreamConfiguration( @JsonProperty("partitioning_field") val partitioningField: String? = null, + @get:JsonSchemaTitle("Partitioning Granularity") + @get:JsonPropertyDescription( + "Partitioning granularity for the partitioning field. Allowed values: DAY, MONTH, YEAR. Defaults to DAY.") + @JsonProperty("partitioning_granularity") + val partitioningGranularity: PartitioningGranularity? = null, + @get:JsonSchemaTitle("Clustering Field") @JsonProperty("clustering_field") val clusteringField: String? = null, @@ -266,6 +278,12 @@ enum class CdcDeletionMode(@get:JsonValue val cdcDeletionMode: String) { SOFT_DELETE("Soft delete"), } +enum class PartitioningGranularity(@get:JsonValue val granularity: String) { + DAY("DAY"), + MONTH("MONTH"), + YEAR("YEAR"), +} + @Singleton class BigquerySpecificationExtension : DestinationSpecificationExtension { override val supportedSyncModes = diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/stream/StreamConfigProvider.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/stream/StreamConfigProvider.kt index f9638fef8166..a7d85c0ba364 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/stream/StreamConfigProvider.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/stream/StreamConfigProvider.kt @@ -7,6 +7,7 @@ package io.airbyte.integrations.destination.bigquery.stream import io.airbyte.cdk.load.command.DestinationStream import io.airbyte.integrations.destination.bigquery.spec.BigqueryConfiguration import io.airbyte.integrations.destination.bigquery.spec.StreamLevelConfig +import io.airbyte.integrations.destination.bigquery.spec.PartitioningGranularity import jakarta.inject.Singleton /** @@ -53,6 +54,16 @@ class StreamConfigProvider( ?: config.defaultPartitioningField ?: "_airbyte_extracted_at" } + + /** + * Get the effective partitioning granularity for a stream. + * Priority: stream config > default config > DAY + */ + fun getPartitioningGranularity(descriptor: DestinationStream.Descriptor): PartitioningGranularity { + return getStreamConfig(descriptor)?.partitioningGranularity + ?: config.defaultPartitioningGranularity + ?: PartitioningGranularity.DAY + } /** * Get the effective table suffix for a stream. diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/direct_load_tables/BigqueryDirectLoadNativeTableOperations.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/direct_load_tables/BigqueryDirectLoadNativeTableOperations.kt index 3868d8134664..eb1bf32b454e 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/direct_load_tables/BigqueryDirectLoadNativeTableOperations.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/direct_load_tables/BigqueryDirectLoadNativeTableOperations.kt @@ -27,6 +27,7 @@ import io.airbyte.cdk.util.CollectionUtils.containsAllIgnoreCase import io.airbyte.cdk.util.containsIgnoreCase import io.airbyte.cdk.util.findIgnoreCase import io.airbyte.integrations.destination.bigquery.write.typing_deduping.BigQueryDatabaseHandler +import io.airbyte.integrations.destination.bigquery.spec.PartitioningGranularity import io.airbyte.integrations.destination.bigquery.write.typing_deduping.toTableId import io.github.oshai.kotlinlogging.KotlinLogging import kotlinx.coroutines.runBlocking @@ -461,29 +462,57 @@ class BigqueryDirectLoadNativeTableOperations( existingTable: StandardTableDefinition, streamConfigProvider: io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider, ): Boolean { - val expectedPartitionField = + val (expectedPartitionField, expectedPartitionType) = resolvePartitioningField(stream, columnNameMapping, streamConfigProvider) return existingTable.timePartitioning != null && existingTable.timePartitioning!! .field .equals(expectedPartitionField, ignoreCase = true) && - TimePartitioning.Type.DAY == existingTable.timePartitioning!!.type + expectedPartitionType == existingTable.timePartitioning!!.type } private fun resolvePartitioningField( stream: DestinationStream, columnNameMapping: ColumnNameMapping, streamConfigProvider: io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider, - ): String { + ): Pair { val requestedField = streamConfigProvider.getPartitioningField(stream.unmappedDescriptor) - if (requestedField == "_airbyte_extracted_at") { - return requestedField - } + val granularity = streamConfigProvider.getPartitioningGranularity(stream.unmappedDescriptor) + val expectedPartitionType = + when (granularity) { + PartitioningGranularity.DAY -> TimePartitioning.Type.DAY + PartitioningGranularity.MONTH -> TimePartitioning.Type.MONTH + PartitioningGranularity.YEAR -> TimePartitioning.Type.YEAR + } - return columnNameMapping[requestedField] - ?: throw ConfigErrorException( - "Stream ${stream.mappedDescriptor.toPrettyString()}: Partitioning field '$requestedField' does not exist in the schema" + val (mappedField, fieldType) = + if (requestedField == "_airbyte_extracted_at") { + "_airbyte_extracted_at" to StandardSQLTypeName.TIMESTAMP + } else { + val mappedName = + columnNameMapping[requestedField] + ?: throw ConfigErrorException( + "Stream ${stream.mappedDescriptor.toPrettyString()}: Partitioning field '$requestedField' does not exist in the schema" + ) + val schemaField = + stream.schema.asColumns()[requestedField] + ?: throw ConfigErrorException( + "Stream ${stream.mappedDescriptor.toPrettyString()}: Partitioning field '$requestedField' does not exist in the schema" + ) + mappedName to BigqueryDirectLoadSqlGenerator.toDialectType(schemaField.type) + } + + if ( + fieldType != StandardSQLTypeName.DATE && + fieldType != StandardSQLTypeName.TIMESTAMP && + fieldType != StandardSQLTypeName.DATETIME + ) { + throw ConfigErrorException( + "Stream ${stream.mappedDescriptor.toPrettyString()}: Partitioning field '$requestedField' must be DATE, TIMESTAMP, or DATETIME" ) + } + + return mappedField to expectedPartitionType } } diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/direct_load_tables/BigqueryDirectLoadSqlGenerator.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/direct_load_tables/BigqueryDirectLoadSqlGenerator.kt index 976dfba03a7d..8d718d854c8d 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/direct_load_tables/BigqueryDirectLoadSqlGenerator.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/direct_load_tables/BigqueryDirectLoadSqlGenerator.kt @@ -68,7 +68,7 @@ class BigqueryDirectLoadSqlGenerator( .map { c: String? -> StringUtils.wrap(c, QUOTE) } .collect(Collectors.joining(", ")) - val partitioningField = streamConfigProvider.getPartitioningField(stream.unmappedDescriptor) + val partitioningExpression = resolvePartitioningExpression(stream, columnNameMapping) val finalTableId = tableName.toPrettyString(QUOTE) // bigquery has a CREATE OR REPLACE TABLE statement, but we can't use it @@ -92,7 +92,7 @@ class BigqueryDirectLoadSqlGenerator( _airbyte_generation_id INTEGER, $columnDeclarations ) - PARTITION BY (DATE_TRUNC(`$partitioningField`, DAY)) + PARTITION BY ($partitioningExpression) CLUSTER BY $clusterConfig; """.trimIndent() ) @@ -397,4 +397,40 @@ class BigqueryDirectLoadSqlGenerator( // Default behavior: use PK-based clustering return clusteringColumns(stream, columnNameMapping) } + + private fun resolvePartitioningExpression( + stream: DestinationStream, + columnNameMapping: ColumnNameMapping, + ): String { + val requestedField = streamConfigProvider.getPartitioningField(stream.unmappedDescriptor) + val granularity = streamConfigProvider.getPartitioningGranularity(stream.unmappedDescriptor) + + val (sqlField, fieldType) = + if (requestedField == "_airbyte_extracted_at") { + "_airbyte_extracted_at" to StandardSQLTypeName.TIMESTAMP + } else { + val mappedName = + columnNameMapping[requestedField] + ?: throw ConfigErrorException( + "Stream ${stream.mappedDescriptor.toPrettyString()}: Partitioning field '$requestedField' does not exist in the schema" + ) + val schemaField = + stream.schema.asColumns()[requestedField] + ?: throw ConfigErrorException( + "Stream ${stream.mappedDescriptor.toPrettyString()}: Partitioning field '$requestedField' does not exist in the schema" + ) + mappedName to toDialectType(schemaField.type) + } + + val granularityLiteral = granularity.name + return when (fieldType) { + StandardSQLTypeName.DATE -> "DATE_TRUNC(`$sqlField`, $granularityLiteral)" + StandardSQLTypeName.TIMESTAMP -> "TIMESTAMP_TRUNC(`$sqlField`, $granularityLiteral)" + StandardSQLTypeName.DATETIME -> "DATETIME_TRUNC(`$sqlField`, $granularityLiteral)" + else -> + throw ConfigErrorException( + "Stream ${stream.mappedDescriptor.toPrettyString()}: Partitioning field '$requestedField' must be DATE, TIMESTAMP, or DATETIME for $granularityLiteral partitioning" + ) + } + } } From 104ed4ef6b6a85f5951eb12ebeab0e5487920566 Mon Sep 17 00:00:00 2001 From: Pragyash Date: Thu, 5 Feb 2026 10:44:16 +0530 Subject: [PATCH 12/16] Fix BQ partitioning test defaults --- ...ueryDirectLoadNativeTableOperationsTest.kt | 40 +++++++++++++++++-- 1 file changed, 36 insertions(+), 4 deletions(-) diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test/kotlin/io/airbyte/integrations/destination/bigquery/typing_deduping/direct_load_tables/BigqueryDirectLoadNativeTableOperationsTest.kt b/airbyte-integrations/connectors/destination-bigquery/src/test/kotlin/io/airbyte/integrations/destination/bigquery/typing_deduping/direct_load_tables/BigqueryDirectLoadNativeTableOperationsTest.kt index ddec331704e8..8120753a38cc 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/test/kotlin/io/airbyte/integrations/destination/bigquery/typing_deduping/direct_load_tables/BigqueryDirectLoadNativeTableOperationsTest.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/test/kotlin/io/airbyte/integrations/destination/bigquery/typing_deduping/direct_load_tables/BigqueryDirectLoadNativeTableOperationsTest.kt @@ -25,6 +25,11 @@ import io.airbyte.cdk.load.orchestration.db.ColumnNameMapping import io.airbyte.cdk.load.orchestration.db.DefaultTempTableNameGenerator import io.airbyte.cdk.load.orchestration.db.direct_load_table.ColumnAdd import io.airbyte.cdk.load.orchestration.db.direct_load_table.ColumnChange +import io.airbyte.integrations.destination.bigquery.spec.BatchedStandardInsertConfiguration +import io.airbyte.integrations.destination.bigquery.spec.BigqueryConfiguration +import io.airbyte.integrations.destination.bigquery.spec.BigqueryRegion +import io.airbyte.integrations.destination.bigquery.spec.CdcDeletionMode +import io.airbyte.integrations.destination.bigquery.spec.PartitioningGranularity import io.airbyte.integrations.destination.bigquery.write.typing_deduping.direct_load_tables.BigqueryDirectLoadNativeTableOperations import io.airbyte.integrations.destination.bigquery.write.typing_deduping.direct_load_tables.BigqueryDirectLoadNativeTableOperations.Companion.clusteringMatches import io.airbyte.integrations.destination.bigquery.write.typing_deduping.direct_load_tables.BigqueryDirectLoadNativeTableOperations.Companion.partitioningMatches @@ -35,6 +40,7 @@ import org.junit.jupiter.api.Test import org.mockito.Mockito import org.mockito.Mockito.RETURNS_DEEP_STUBS import org.mockito.kotlin.any +import org.mockito.kotlin.doReturn import org.mockito.kotlin.mock import org.mockito.kotlin.whenever @@ -270,10 +276,36 @@ class BigqueryDirectLoadNativeTableOperationsTest { @Test fun testPartitioningMatches() { val existingTable = Mockito.mock(StandardTableDefinition::class.java) - val stream = mock() - val columnNameMapping = mock() - val streamConfigProvider = mock() - whenever(streamConfigProvider.getPartitioningField(any())).thenReturn(null) + val stream = + DestinationStream( + "foo", + "bar", + Append, + ObjectType(linkedMapOf()), + generationId = 0, + minimumGenerationId = 0, + syncId = 0, + namespaceMapper = NamespaceMapper() + ) + val columnNameMapping = ColumnNameMapping(mapOf()) + val streamConfigProvider = + StreamConfigProvider( + BigqueryConfiguration( + projectId = "unused", + datasetLocation = BigqueryRegion.US, + datasetId = "unused_dataset", + loadingMethod = BatchedStandardInsertConfiguration, + credentialsJson = null, + cdcDeletionMode = CdcDeletionMode.HARD_DELETE, + internalTableDataset = "unused_internal", + legacyRawTablesOnly = false, + defaultPartitioningField = null, + defaultClusteringField = null, + defaultTableSuffix = null, + defaultPartitioningGranularity = PartitioningGranularity.DAY, + streamConfigMap = emptyMap(), + ) + ) // Partitioning is null Mockito.`when`(existingTable.timePartitioning).thenReturn(null) From 156322c903ad311aeae19845fd27a028162ce466 Mon Sep 17 00:00:00 2001 From: Pragyash Date: Sat, 7 Feb 2026 13:20:39 +0530 Subject: [PATCH 13/16] Fix destination-bigquery CI failures --- .../destination-bigquery/gradle.properties | 2 +- .../typing_deduping/BigqueryNameGenerators.kt | 4 ++ .../bigquery/BigqueryDataDumper.kt | 11 +++- .../SafeDestinationCatalogFactoryTest.kt | 50 +++++++++---------- ...ueryDirectLoadNativeTableOperationsTest.kt | 35 +++++++++---- 5 files changed, 63 insertions(+), 39 deletions(-) diff --git a/airbyte-integrations/connectors/destination-bigquery/gradle.properties b/airbyte-integrations/connectors/destination-bigquery/gradle.properties index b3a770fa0b0e..421d0854f420 100644 --- a/airbyte-integrations/connectors/destination-bigquery/gradle.properties +++ b/airbyte-integrations/connectors/destination-bigquery/gradle.properties @@ -1,3 +1,3 @@ testExecutionConcurrency=-1 JunitMethodExecutionTimeout=10m -cdkVersion=0.2.0 +cdkVersion=0.2.8 diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/BigqueryNameGenerators.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/BigqueryNameGenerators.kt index 61c5743bf6a1..7cbe080378d3 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/BigqueryNameGenerators.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/BigqueryNameGenerators.kt @@ -1,3 +1,7 @@ +/* + * Copyright (c) 2026 Airbyte, Inc., all rights reserved. + */ + package io.airbyte.integrations.destination.bigquery.write.typing_deduping import com.google.cloud.bigquery.TableId diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/kotlin/io/airbyte/integrations/destination/bigquery/BigqueryDataDumper.kt b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/kotlin/io/airbyte/integrations/destination/bigquery/BigqueryDataDumper.kt index b77c78cab78a..0aaf712de0da 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/kotlin/io/airbyte/integrations/destination/bigquery/BigqueryDataDumper.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/kotlin/io/airbyte/integrations/destination/bigquery/BigqueryDataDumper.kt @@ -30,6 +30,7 @@ import io.airbyte.cdk.load.util.Jsons import io.airbyte.cdk.load.util.deserializeToNode import io.airbyte.integrations.destination.bigquery.spec.BigqueryConfigurationFactory import io.airbyte.integrations.destination.bigquery.spec.BigquerySpecification +import io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider import io.airbyte.integrations.destination.bigquery.write.typing_deduping.BigqueryFinalTableNameGenerator import io.airbyte.integrations.destination.bigquery.write.typing_deduping.BigqueryRawTableNameGenerator import io.airbyte.protocol.models.v0.AirbyteRecordMessageMetaChange @@ -47,9 +48,12 @@ object BigqueryRawTableDataDumper : DestinationDataDumper { ): List { val config = BigqueryConfigurationFactory().make(spec as BigquerySpecification) val bigquery = BigqueryBeansFactory().getBigqueryClient(config) + val streamConfigProvider = StreamConfigProvider(config) val (_, rawTableName) = - BigqueryRawTableNameGenerator(config).getTableName(stream.mappedDescriptor) + BigqueryRawTableNameGenerator(config, streamConfigProvider).getTableName( + stream.mappedDescriptor + ) return bigquery.getTable(TableId.of(config.internalTableDataset, rawTableName))?.let { table -> @@ -94,9 +98,12 @@ object BigqueryFinalTableDataDumper : DestinationDataDumper { ): List { val config = BigqueryConfigurationFactory().make(spec as BigquerySpecification) val bigquery = BigqueryBeansFactory().getBigqueryClient(config) + val streamConfigProvider = StreamConfigProvider(config) val (datasetName, finalTableName) = - BigqueryFinalTableNameGenerator(config).getTableName(stream.mappedDescriptor) + BigqueryFinalTableNameGenerator(config, streamConfigProvider).getTableName( + stream.mappedDescriptor + ) return bigquery.getTable(TableId.of(datasetName, finalTableName))?.let { table -> val bigquerySchema = table.getDefinition().schema!! diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test/kotlin/io/airbyte/integrations/destination/bigquery/SafeDestinationCatalogFactoryTest.kt b/airbyte-integrations/connectors/destination-bigquery/src/test/kotlin/io/airbyte/integrations/destination/bigquery/SafeDestinationCatalogFactoryTest.kt index a8df69a1d85d..f85fb3dbf511 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/test/kotlin/io/airbyte/integrations/destination/bigquery/SafeDestinationCatalogFactoryTest.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/test/kotlin/io/airbyte/integrations/destination/bigquery/SafeDestinationCatalogFactoryTest.kt @@ -14,40 +14,41 @@ import io.airbyte.protocol.models.v0.DestinationSyncMode import io.airbyte.protocol.models.v0.SyncMode import io.mockk.every import io.mockk.mockk +import java.util.Collections import kotlin.test.assertEquals import org.junit.jupiter.api.Assertions.assertDoesNotThrow import org.junit.jupiter.api.Test -import java.util.Collections class SafeDestinationCatalogFactoryTest { - @Test fun `test syncCatalog with APPEND_DEDUP and null PK cursor does not throw NPE`() { val factory = SafeDestinationCatalogFactory() - - val stream = ConfiguredAirbyteStream() - .withStream(AirbyteStream().withName("test").withNamespace("ns").withJsonSchema(mockk(relaxed = true))) - .withDestinationSyncMode(DestinationSyncMode.APPEND_DEDUP) - .withSyncMode(SyncMode.INCREMENTAL) - .withPrimaryKey(null) - .withCursorField(null) - .withGenerationId(1L) - .withMinimumGenerationId(1L) - .withSyncId(1L) - + + val stream = + ConfiguredAirbyteStream() + .withStream( + AirbyteStream() + .withName("test") + .withNamespace("ns") + .withJsonSchema(mockk(relaxed = true)) + ) + .withDestinationSyncMode(DestinationSyncMode.APPEND_DEDUP) + .withSyncMode(SyncMode.INCREMENTAL) + .withPrimaryKey(null) + .withCursorField(null) + .withGenerationId(1L) + .withMinimumGenerationId(1L) + .withSyncId(1L) + val catalog = ConfiguredAirbyteCatalog().withStreams(Collections.singletonList(stream)) val namespaceMapper = mockk(relaxed = true) val jsonSchemaConverter = mockk(relaxed = true) - + // Mock the convert method - every { jsonSchemaConverter.convert(any()) } returns mockk(relaxed=true) + every { jsonSchemaConverter.convert(any()) } returns mockk(relaxed = true) assertDoesNotThrow { - val destCatalog = factory.syncCatalog( - catalog, - namespaceMapper, - jsonSchemaConverter - ) + val destCatalog = factory.syncCatalog(catalog, namespaceMapper, jsonSchemaConverter) val importType = destCatalog.streams.first().importType assert(importType is Dedupe) val dedupe = importType as Dedupe @@ -60,12 +61,9 @@ class SafeDestinationCatalogFactoryTest { fun `test checkCatalog returns test stream`() { val factory = SafeDestinationCatalogFactory() val namespaceMapper = mockk(relaxed = true) - - val destCatalog = factory.checkCatalog( - namespaceMapper, - "custom_check_ns" - ) - + + val destCatalog = factory.checkCatalog(namespaceMapper, "custom_check_ns") + assertEquals(1, destCatalog.streams.size) assertEquals("custom_check_ns", destCatalog.streams.first().unmappedNamespace) assert(destCatalog.streams.first().unmappedName.startsWith("test")) diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test/kotlin/io/airbyte/integrations/destination/bigquery/typing_deduping/direct_load_tables/BigqueryDirectLoadNativeTableOperationsTest.kt b/airbyte-integrations/connectors/destination-bigquery/src/test/kotlin/io/airbyte/integrations/destination/bigquery/typing_deduping/direct_load_tables/BigqueryDirectLoadNativeTableOperationsTest.kt index 8120753a38cc..43a201a79829 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/test/kotlin/io/airbyte/integrations/destination/bigquery/typing_deduping/direct_load_tables/BigqueryDirectLoadNativeTableOperationsTest.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/test/kotlin/io/airbyte/integrations/destination/bigquery/typing_deduping/direct_load_tables/BigqueryDirectLoadNativeTableOperationsTest.kt @@ -30,17 +30,16 @@ import io.airbyte.integrations.destination.bigquery.spec.BigqueryConfiguration import io.airbyte.integrations.destination.bigquery.spec.BigqueryRegion import io.airbyte.integrations.destination.bigquery.spec.CdcDeletionMode import io.airbyte.integrations.destination.bigquery.spec.PartitioningGranularity +import io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider import io.airbyte.integrations.destination.bigquery.write.typing_deduping.direct_load_tables.BigqueryDirectLoadNativeTableOperations import io.airbyte.integrations.destination.bigquery.write.typing_deduping.direct_load_tables.BigqueryDirectLoadNativeTableOperations.Companion.clusteringMatches import io.airbyte.integrations.destination.bigquery.write.typing_deduping.direct_load_tables.BigqueryDirectLoadNativeTableOperations.Companion.partitioningMatches -import io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider import io.airbyte.integrations.destination.bigquery.write.typing_deduping.direct_load_tables.BigqueryDirectLoadSqlGenerator.Companion.toDialectType import org.junit.jupiter.api.Assertions import org.junit.jupiter.api.Test import org.mockito.Mockito import org.mockito.Mockito.RETURNS_DEEP_STUBS import org.mockito.kotlin.any -import org.mockito.kotlin.doReturn import org.mockito.kotlin.mock import org.mockito.kotlin.whenever @@ -209,12 +208,16 @@ class BigqueryDirectLoadNativeTableOperationsTest { // Clustering is null val existingTable = Mockito.mock(StandardTableDefinition::class.java) Mockito.`when`(existingTable.clustering).thenReturn(null) - Assertions.assertFalse(clusteringMatches(stream, columnNameMapping, existingTable, streamConfigProvider)) + Assertions.assertFalse( + clusteringMatches(stream, columnNameMapping, existingTable, streamConfigProvider) + ) // Clustering does not contain all fields Mockito.`when`(existingTable.clustering) .thenReturn(Clustering.newBuilder().setFields(listOf("_airbyte_extracted_at")).build()) - Assertions.assertFalse(clusteringMatches(stream, columnNameMapping, existingTable, streamConfigProvider)) + Assertions.assertFalse( + clusteringMatches(stream, columnNameMapping, existingTable, streamConfigProvider) + ) // Clustering matches stream = @@ -228,7 +231,9 @@ class BigqueryDirectLoadNativeTableOperationsTest { syncId = 0, namespaceMapper = NamespaceMapper() ) - Assertions.assertTrue(clusteringMatches(stream, columnNameMapping, existingTable, streamConfigProvider)) + Assertions.assertTrue( + clusteringMatches(stream, columnNameMapping, existingTable, streamConfigProvider) + ) // Clustering only the first 3 PK columns (See // https://github.com/airbytehq/oncall/issues/2565) @@ -270,7 +275,9 @@ class BigqueryDirectLoadNativeTableOperationsTest { "e1" to "e2", ) ) - Assertions.assertTrue(clusteringMatches(stream, columnNameMapping, existingTable, streamConfigProvider)) + Assertions.assertTrue( + clusteringMatches(stream, columnNameMapping, existingTable, streamConfigProvider) + ) } @Test @@ -309,13 +316,17 @@ class BigqueryDirectLoadNativeTableOperationsTest { // Partitioning is null Mockito.`when`(existingTable.timePartitioning).thenReturn(null) - Assertions.assertFalse(partitioningMatches(stream, columnNameMapping, existingTable, streamConfigProvider)) + Assertions.assertFalse( + partitioningMatches(stream, columnNameMapping, existingTable, streamConfigProvider) + ) // incorrect field Mockito.`when`(existingTable.timePartitioning) .thenReturn( TimePartitioning.newBuilder(TimePartitioning.Type.DAY).setField("_foo").build() ) - Assertions.assertFalse(partitioningMatches(stream, columnNameMapping, existingTable, streamConfigProvider)) + Assertions.assertFalse( + partitioningMatches(stream, columnNameMapping, existingTable, streamConfigProvider) + ) // incorrect partitioning scheme Mockito.`when`(existingTable.timePartitioning) .thenReturn( @@ -323,7 +334,9 @@ class BigqueryDirectLoadNativeTableOperationsTest { .setField("_airbyte_extracted_at") .build() ) - Assertions.assertFalse(partitioningMatches(stream, columnNameMapping, existingTable, streamConfigProvider)) + Assertions.assertFalse( + partitioningMatches(stream, columnNameMapping, existingTable, streamConfigProvider) + ) // partitioning matches Mockito.`when`(existingTable.timePartitioning) @@ -332,6 +345,8 @@ class BigqueryDirectLoadNativeTableOperationsTest { .setField("_airbyte_extracted_at") .build() ) - Assertions.assertTrue(partitioningMatches(stream, columnNameMapping, existingTable, streamConfigProvider)) + Assertions.assertTrue( + partitioningMatches(stream, columnNameMapping, existingTable, streamConfigProvider) + ) } } From da3b59d27303dd8fd3d8b8ba223aa3e536467538 Mon Sep 17 00:00:00 2001 From: Pragyash Date: Sat, 7 Feb 2026 13:35:49 +0530 Subject: [PATCH 14/16] Apply formatting and update spec fixtures --- .../bigquery/BigqueryBeansFactory.kt | 26 +++---- .../bigquery/SafeDestinationCatalogFactory.kt | 5 +- .../bigquery/check/BigqueryCheckCleaner.kt | 2 +- .../bigquery/spec/BigqueryConfiguration.kt | 21 +++--- .../bigquery/spec/BigquerySpecification.kt | 24 +++---- .../bigquery/stream/StreamConfigProvider.kt | 68 ++++++++---------- ...BigqueryDirectLoadNativeTableOperations.kt | 37 ++++++---- .../BigqueryDirectLoadSqlGenerator.kt | 22 +++--- .../bigquery/BigqueryDataDumper.kt | 10 ++- .../resources/expected-spec-cloud.json | 70 +++++++++++++++++++ .../resources/expected-spec-oss.json | 70 +++++++++++++++++++ 11 files changed, 250 insertions(+), 105 deletions(-) diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/BigqueryBeansFactory.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/BigqueryBeansFactory.kt index b94e457fb355..01d214508f60 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/BigqueryBeansFactory.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/BigqueryBeansFactory.kt @@ -11,7 +11,10 @@ import com.google.cloud.bigquery.BigQueryOptions import io.airbyte.cdk.load.check.DestinationCheckerSync import io.airbyte.cdk.load.command.DestinationCatalog import io.airbyte.cdk.load.command.DestinationConfiguration +import io.airbyte.cdk.load.orchestration.db.ColumnNameGenerator import io.airbyte.cdk.load.orchestration.db.DefaultTempTableNameGenerator +import io.airbyte.cdk.load.orchestration.db.FinalTableNameGenerator +import io.airbyte.cdk.load.orchestration.db.RawTableNameGenerator import io.airbyte.cdk.load.orchestration.db.direct_load_table.DefaultDirectLoadTableSqlOperations import io.airbyte.cdk.load.orchestration.db.direct_load_table.DirectLoadTableExecutionConfig import io.airbyte.cdk.load.orchestration.db.direct_load_table.DirectLoadTableWriter @@ -27,24 +30,21 @@ import io.airbyte.cdk.load.write.StreamStateStore import io.airbyte.cdk.load.write.WriteOperation import io.airbyte.integrations.destination.bigquery.check.BigqueryCheckCleaner import io.airbyte.integrations.destination.bigquery.spec.BigqueryConfiguration +import io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider import io.airbyte.integrations.destination.bigquery.write.bulk_loader.BigQueryBulkOneShotUploader import io.airbyte.integrations.destination.bigquery.write.bulk_loader.BigQueryBulkOneShotUploaderStep import io.airbyte.integrations.destination.bigquery.write.bulk_loader.BigqueryBulkLoadConfiguration import io.airbyte.integrations.destination.bigquery.write.bulk_loader.BigqueryConfiguredForBulkLoad import io.airbyte.integrations.destination.bigquery.write.typing_deduping.BigQueryDatabaseHandler +import io.airbyte.integrations.destination.bigquery.write.typing_deduping.BigqueryColumnNameGenerator +import io.airbyte.integrations.destination.bigquery.write.typing_deduping.BigqueryFinalTableNameGenerator +import io.airbyte.integrations.destination.bigquery.write.typing_deduping.BigqueryRawTableNameGenerator import io.airbyte.integrations.destination.bigquery.write.typing_deduping.direct_load_tables.BigqueryDirectLoadDatabaseInitialStatusGatherer import io.airbyte.integrations.destination.bigquery.write.typing_deduping.direct_load_tables.BigqueryDirectLoadNativeTableOperations import io.airbyte.integrations.destination.bigquery.write.typing_deduping.direct_load_tables.BigqueryDirectLoadSqlGenerator import io.airbyte.integrations.destination.bigquery.write.typing_deduping.direct_load_tables.BigqueryDirectLoadSqlTableOperations import io.airbyte.integrations.destination.bigquery.write.typing_deduping.legacy_raw_tables.BigqueryRawTableOperations import io.airbyte.integrations.destination.bigquery.write.typing_deduping.legacy_raw_tables.BigqueryTypingDedupingDatabaseInitialStatusGatherer -import io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider -import io.airbyte.cdk.load.orchestration.db.RawTableNameGenerator -import io.airbyte.cdk.load.orchestration.db.FinalTableNameGenerator -import io.airbyte.cdk.load.orchestration.db.ColumnNameGenerator -import io.airbyte.integrations.destination.bigquery.write.typing_deduping.BigqueryRawTableNameGenerator -import io.airbyte.integrations.destination.bigquery.write.typing_deduping.BigqueryFinalTableNameGenerator -import io.airbyte.integrations.destination.bigquery.write.typing_deduping.BigqueryColumnNameGenerator import io.github.oshai.kotlinlogging.KotlinLogging import io.micronaut.context.annotation.Factory import io.micronaut.context.annotation.Requires @@ -136,7 +136,8 @@ class BigqueryBeansFactory { names: TableCatalog, ): DestinationWriter { val destinationHandler = BigQueryDatabaseHandler(bigquery, config.datasetLocation.region) - // We need to pass the generators to the TableCatalog manually since we are constructing it here? + // We need to pass the generators to the TableCatalog manually since we are constructing it + // here? // Actually, TableCatalog is usually injected. But wait, where is TableCatalog defined? // It's usually created by the factory too. // Let's check if we need to update TableCatalog creation. @@ -145,15 +146,15 @@ class BigqueryBeansFactory { // So we need to ensure TableCatalog uses our new generators. // Looking at existing TableCatalog in CDK, it uses @Named("rawTableNameGenerator") etc. // So defining the beans with those names matches the expectation. - + // Wait, the previous getWriter signature had `names: TableCatalog`. // Let's keep that, but ensure we define the generator beans so TableCatalog can find them. - + if (config.legacyRawTablesOnly) { // force smart cast @Suppress("UNCHECKED_CAST") streamStateStore as StreamStateStore - + return TypingDedupingWriter( names, BigqueryTypingDedupingDatabaseInitialStatusGatherer(bigquery), @@ -182,7 +183,8 @@ class BigqueryBeansFactory { // force smart cast @Suppress("UNCHECKED_CAST") streamStateStore as StreamStateStore - val tempTableNameGenerator = DefaultTempTableNameGenerator(internalNamespace = config.internalTableDataset) + val tempTableNameGenerator = + DefaultTempTableNameGenerator(internalNamespace = config.internalTableDataset) return DirectLoadTableWriter( internalNamespace = config.internalTableDataset, diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/SafeDestinationCatalogFactory.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/SafeDestinationCatalogFactory.kt index 91277665e6c0..5d75c3967b56 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/SafeDestinationCatalogFactory.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/SafeDestinationCatalogFactory.kt @@ -25,7 +25,6 @@ import io.micronaut.context.annotation.Factory import io.micronaut.context.annotation.Primary import io.micronaut.context.annotation.Replaces import io.micronaut.context.annotation.Requires -import io.micronaut.context.annotation.Value import jakarta.inject.Named import jakarta.inject.Singleton import java.time.LocalDate @@ -93,9 +92,7 @@ class SafeDestinationCatalogFactory { unmappedName = "test$date$random", importType = Append, schema = - ObjectType( - linkedMapOf("test" to FieldType(IntegerType, nullable = true)) - ), + ObjectType(linkedMapOf("test" to FieldType(IntegerType, nullable = true))), generationId = 1, minimumGenerationId = 0, syncId = 1, diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/check/BigqueryCheckCleaner.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/check/BigqueryCheckCleaner.kt index 6d8c4452a6f5..fdae2d4f15b1 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/check/BigqueryCheckCleaner.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/check/BigqueryCheckCleaner.kt @@ -8,9 +8,9 @@ import io.airbyte.cdk.load.check.CheckCleaner import io.airbyte.cdk.load.command.DestinationStream import io.airbyte.integrations.destination.bigquery.BigqueryBeansFactory import io.airbyte.integrations.destination.bigquery.spec.BigqueryConfiguration +import io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider import io.airbyte.integrations.destination.bigquery.write.typing_deduping.BigqueryFinalTableNameGenerator import io.airbyte.integrations.destination.bigquery.write.typing_deduping.BigqueryRawTableNameGenerator -import io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider import io.airbyte.integrations.destination.bigquery.write.typing_deduping.toTableId class BigqueryCheckCleaner : CheckCleaner { diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/spec/BigqueryConfiguration.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/spec/BigqueryConfiguration.kt index b9afb35750a5..41251b4e9f45 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/spec/BigqueryConfiguration.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/spec/BigqueryConfiguration.kt @@ -83,15 +83,18 @@ class BigqueryConfigurationFactory : defaultClusteringField = pojo.defaultClusteringField, defaultTableSuffix = pojo.defaultTableSuffix, defaultPartitioningGranularity = pojo.defaultPartitioningGranularity, - streamConfigMap = pojo.streams?.associate { - it.name to StreamLevelConfig( - partitioningField = it.partitioningField, - partitioningGranularity = it.partitioningGranularity, - clusteringField = it.clusteringField, - tableSuffix = it.tableSuffix, - dataset = it.dataset - ) - } ?: emptyMap(), + streamConfigMap = + pojo.streams?.associate { + it.name to + StreamLevelConfig( + partitioningField = it.partitioningField, + partitioningGranularity = it.partitioningGranularity, + clusteringField = it.clusteringField, + tableSuffix = it.tableSuffix, + dataset = it.dataset + ) + } + ?: emptyMap(), ) } } diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/spec/BigquerySpecification.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/spec/BigquerySpecification.kt index 55b5161b9cb3..f5af908afc4b 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/spec/BigquerySpecification.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/spec/BigquerySpecification.kt @@ -107,13 +107,17 @@ class BigquerySpecification : ConfigurationSpecification() { val internalTableDataset: String? = null @get:JsonSchemaTitle("Default Partitioning Field") - @get:JsonPropertyDescription("Default field to use for partitioning (e.g. _airbyte_extracted_at)") + @get:JsonPropertyDescription( + "Default field to use for partitioning (e.g. _airbyte_extracted_at)" + ) @get:JsonProperty("default_partitioning_field") @get:JsonSchemaInject(json = """{"group": "advanced", "order": 10}""") val defaultPartitioningField: String? = null @get:JsonSchemaTitle("Default Partitioning Granularity") - @get:JsonPropertyDescription("Default partitioning granularity: DAY, MONTH, or YEAR. Defaults to DAY.") + @get:JsonPropertyDescription( + "Default partitioning granularity: DAY, MONTH, or YEAR. Defaults to DAY." + ) @get:JsonProperty("default_partitioning_granularity") @get:JsonSchemaInject(json = """{"group": "advanced", "order": 11}""") val defaultPartitioningGranularity: PartitioningGranularity? = null @@ -139,36 +143,28 @@ class BigquerySpecification : ConfigurationSpecification() { val streams: List? = null } -/** - * Per-stream configuration for custom partitioning, clustering, and table naming. - */ +/** Per-stream configuration for custom partitioning, clustering, and table naming. */ data class SingleStreamConfiguration( @get:JsonSchemaTitle("Stream Name") @get:JsonPropertyDescription("Name of the stream (or namespace.stream_name)") @JsonProperty("name") val name: String = "", - @get:JsonSchemaTitle("Partitioning Field") @JsonProperty("partitioning_field") val partitioningField: String? = null, - @get:JsonSchemaTitle("Partitioning Granularity") @get:JsonPropertyDescription( - "Partitioning granularity for the partitioning field. Allowed values: DAY, MONTH, YEAR. Defaults to DAY.") + "Partitioning granularity for the partitioning field. Allowed values: DAY, MONTH, YEAR. Defaults to DAY." + ) @JsonProperty("partitioning_granularity") val partitioningGranularity: PartitioningGranularity? = null, - @get:JsonSchemaTitle("Clustering Field") @JsonProperty("clustering_field") val clusteringField: String? = null, - @get:JsonSchemaTitle("Table Suffix") @JsonProperty("table_suffix") val tableSuffix: String? = null, - - @get:JsonSchemaTitle("Target Dataset") - @JsonProperty("dataset") - val dataset: String? = null, + @get:JsonSchemaTitle("Target Dataset") @JsonProperty("dataset") val dataset: String? = null, ) @JsonTypeInfo( diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/stream/StreamConfigProvider.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/stream/StreamConfigProvider.kt index a7d85c0ba364..1eb30635d846 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/stream/StreamConfigProvider.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/stream/StreamConfigProvider.kt @@ -6,82 +6,76 @@ package io.airbyte.integrations.destination.bigquery.stream import io.airbyte.cdk.load.command.DestinationStream import io.airbyte.integrations.destination.bigquery.spec.BigqueryConfiguration -import io.airbyte.integrations.destination.bigquery.spec.StreamLevelConfig import io.airbyte.integrations.destination.bigquery.spec.PartitioningGranularity -import jakarta.inject.Singleton +import io.airbyte.integrations.destination.bigquery.spec.StreamLevelConfig /** * Provides stream-level configuration by looking up the config map with fallback to defaults. - * + * * Stream lookup order: * 1. namespace.stream_name (if namespace is defined) * 2. stream_name only * 3. Falls back to connector-level defaults */ -class StreamConfigProvider( - private val config: BigqueryConfiguration -) { +class StreamConfigProvider(private val config: BigqueryConfiguration) { /** - * Get the stream-level configuration for a given stream descriptor. - * Returns null if no stream-specific config exists. + * Get the stream-level configuration for a given stream descriptor. Returns null if no + * stream-specific config exists. */ fun getStreamConfig(descriptor: DestinationStream.Descriptor): StreamLevelConfig? { // Try namespace.stream_name first if (descriptor.namespace != null) { val fullKey = "${descriptor.namespace}.${descriptor.name}" - config.streamConfigMap[fullKey]?.let { return it } + config.streamConfigMap[fullKey]?.let { + return it + } } - + // Try stream_name only return config.streamConfigMap[descriptor.name] } - + /** - * Get the effective clustering field for a stream. - * Priority: stream config > default config > null (use PK-based) + * Get the effective clustering field for a stream. Priority: stream config > default config > + * null (use PK-based) */ fun getClusteringField(descriptor: DestinationStream.Descriptor): String? { - return getStreamConfig(descriptor)?.clusteringField - ?: config.defaultClusteringField + return getStreamConfig(descriptor)?.clusteringField ?: config.defaultClusteringField } - + /** - * Get the effective partitioning field for a stream. - * Priority: stream config > default config > "_airbyte_extracted_at" + * Get the effective partitioning field for a stream. Priority: stream config > default config > + * "_airbyte_extracted_at" */ fun getPartitioningField(descriptor: DestinationStream.Descriptor): String { return getStreamConfig(descriptor)?.partitioningField - ?: config.defaultPartitioningField - ?: "_airbyte_extracted_at" + ?: config.defaultPartitioningField ?: "_airbyte_extracted_at" } /** - * Get the effective partitioning granularity for a stream. - * Priority: stream config > default config > DAY + * Get the effective partitioning granularity for a stream. Priority: stream config > default + * config > DAY */ - fun getPartitioningGranularity(descriptor: DestinationStream.Descriptor): PartitioningGranularity { + fun getPartitioningGranularity( + descriptor: DestinationStream.Descriptor + ): PartitioningGranularity { return getStreamConfig(descriptor)?.partitioningGranularity - ?: config.defaultPartitioningGranularity - ?: PartitioningGranularity.DAY + ?: config.defaultPartitioningGranularity ?: PartitioningGranularity.DAY } - + /** - * Get the effective table suffix for a stream. - * Priority: stream config > default config > "" (no suffix) + * Get the effective table suffix for a stream. Priority: stream config > default config > "" + * (no suffix) */ fun getTableSuffix(descriptor: DestinationStream.Descriptor): String { - return getStreamConfig(descriptor)?.tableSuffix - ?: config.defaultTableSuffix - ?: "" + return getStreamConfig(descriptor)?.tableSuffix ?: config.defaultTableSuffix ?: "" } - + /** - * Get the effective dataset for a stream. - * Priority: stream config > stream namespace > default dataset + * Get the effective dataset for a stream. Priority: stream config > stream namespace > default + * dataset */ fun getDataset(descriptor: DestinationStream.Descriptor): String { - return getStreamConfig(descriptor)?.dataset - ?: descriptor.namespace - ?: config.datasetId + return getStreamConfig(descriptor)?.dataset ?: descriptor.namespace ?: config.datasetId } } diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/direct_load_tables/BigqueryDirectLoadNativeTableOperations.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/direct_load_tables/BigqueryDirectLoadNativeTableOperations.kt index eb1bf32b454e..4728631dab6a 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/direct_load_tables/BigqueryDirectLoadNativeTableOperations.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/direct_load_tables/BigqueryDirectLoadNativeTableOperations.kt @@ -26,8 +26,8 @@ import io.airbyte.cdk.load.orchestration.db.direct_load_table.DirectLoadTableNat import io.airbyte.cdk.util.CollectionUtils.containsAllIgnoreCase import io.airbyte.cdk.util.containsIgnoreCase import io.airbyte.cdk.util.findIgnoreCase -import io.airbyte.integrations.destination.bigquery.write.typing_deduping.BigQueryDatabaseHandler import io.airbyte.integrations.destination.bigquery.spec.PartitioningGranularity +import io.airbyte.integrations.destination.bigquery.write.typing_deduping.BigQueryDatabaseHandler import io.airbyte.integrations.destination.bigquery.write.typing_deduping.toTableId import io.github.oshai.kotlinlogging.KotlinLogging import kotlinx.coroutines.runBlocking @@ -42,7 +42,8 @@ class BigqueryDirectLoadNativeTableOperations( private val databaseHandler: BigQueryDatabaseHandler, private val projectId: String, private val tempTableNameGenerator: TempTableNameGenerator, - private val streamConfigProvider: io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider, + private val streamConfigProvider: + io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider, ) : DirectLoadTableNativeOperations { override suspend fun ensureSchemaMatches( stream: DestinationStream, @@ -401,7 +402,8 @@ class BigqueryDirectLoadNativeTableOperations( stream: DestinationStream, columnNameMapping: ColumnNameMapping, existingTable: StandardTableDefinition, - streamConfigProvider: io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider, + streamConfigProvider: + io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider, ): Boolean { // We always want to set a clustering config, so if the table doesn't have one, // then we should fix it. @@ -413,10 +415,12 @@ class BigqueryDirectLoadNativeTableOperations( // Calculate expected clustering columns using the provider val expectedClusteringColumns = mutableListOf() - val customClusteringField = streamConfigProvider.getClusteringField(stream.unmappedDescriptor) + val customClusteringField = + streamConfigProvider.getClusteringField(stream.unmappedDescriptor) if (customClusteringField != null) { - val clusterFields = customClusteringField.split(",").map{ it.trim() }.filter { it.isNotEmpty() } + val clusterFields = + customClusteringField.split(",").map { it.trim() }.filter { it.isNotEmpty() } for (field in clusterFields) { val actualColumnName = columnNameMapping[field] @@ -426,12 +430,17 @@ class BigqueryDirectLoadNativeTableOperations( } if (expectedClusteringColumns.isEmpty()) { - // Fallback to default if no columns found (shouldn't happen if validation passes elsewhere) - expectedClusteringColumns.addAll(BigqueryDirectLoadSqlGenerator.clusteringColumns(stream, columnNameMapping)) + // Fallback to default if no columns found (shouldn't happen if validation + // passes elsewhere) + expectedClusteringColumns.addAll( + BigqueryDirectLoadSqlGenerator.clusteringColumns(stream, columnNameMapping) + ) } } else { // No custom field, use default logic - expectedClusteringColumns.addAll(BigqueryDirectLoadSqlGenerator.clusteringColumns(stream, columnNameMapping)) + expectedClusteringColumns.addAll( + BigqueryDirectLoadSqlGenerator.clusteringColumns(stream, columnNameMapping) + ) } // We're OK with a column being in the clustering config that we don't expect @@ -460,7 +469,8 @@ class BigqueryDirectLoadNativeTableOperations( stream: DestinationStream, columnNameMapping: ColumnNameMapping, existingTable: StandardTableDefinition, - streamConfigProvider: io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider, + streamConfigProvider: + io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider, ): Boolean { val (expectedPartitionField, expectedPartitionType) = resolvePartitioningField(stream, columnNameMapping, streamConfigProvider) @@ -474,10 +484,13 @@ class BigqueryDirectLoadNativeTableOperations( private fun resolvePartitioningField( stream: DestinationStream, columnNameMapping: ColumnNameMapping, - streamConfigProvider: io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider, + streamConfigProvider: + io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider, ): Pair { - val requestedField = streamConfigProvider.getPartitioningField(stream.unmappedDescriptor) - val granularity = streamConfigProvider.getPartitioningGranularity(stream.unmappedDescriptor) + val requestedField = + streamConfigProvider.getPartitioningField(stream.unmappedDescriptor) + val granularity = + streamConfigProvider.getPartitioningGranularity(stream.unmappedDescriptor) val expectedPartitionType = when (granularity) { PartitioningGranularity.DAY -> TimePartitioning.Type.DAY diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/direct_load_tables/BigqueryDirectLoadSqlGenerator.kt b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/direct_load_tables/BigqueryDirectLoadSqlGenerator.kt index 8d718d854c8d..0098931f925e 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/direct_load_tables/BigqueryDirectLoadSqlGenerator.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/kotlin/io/airbyte/integrations/destination/bigquery/write/typing_deduping/direct_load_tables/BigqueryDirectLoadSqlGenerator.kt @@ -38,7 +38,8 @@ import org.apache.commons.lang3.StringUtils class BigqueryDirectLoadSqlGenerator( private val projectId: String?, private val cdcDeletionMode: CdcDeletionMode, - private val streamConfigProvider: io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider, + private val streamConfigProvider: + io.airbyte.integrations.destination.bigquery.stream.StreamConfigProvider, ) : DirectLoadSqlGenerator { override fun createTable( stream: DestinationStream, @@ -60,7 +61,7 @@ class BigqueryDirectLoadSqlGenerator( .joinToString(",\n") val columnDeclarations = columnsAndTypes(stream, columnNameMapping) - + // Use custom clustering if provided, else use PK-based clustering val clusterConfig = getClusteringColumns(stream, columnNameMapping) @@ -70,7 +71,7 @@ class BigqueryDirectLoadSqlGenerator( val partitioningExpression = resolvePartitioningExpression(stream, columnNameMapping) val finalTableId = tableName.toPrettyString(QUOTE) - + // bigquery has a CREATE OR REPLACE TABLE statement, but we can't use it // because you can't change a partitioning/clustering scheme in-place. // Bigquery requires you to drop+recreate the table in this case. @@ -356,8 +357,7 @@ class BigqueryDirectLoadSqlGenerator( } /** - * Get clustering columns with stream-level override support. - * Priority: + * Get clustering columns with stream-level override support. Priority: * 1. Stream-level clustering field from config * 2. Default clustering field from connector config * 3. PK-based clustering (for dedupe mode) @@ -366,11 +366,13 @@ class BigqueryDirectLoadSqlGenerator( stream: DestinationStream, columnNameMapping: ColumnNameMapping ): List { - val customClusteringField = streamConfigProvider.getClusteringField(stream.unmappedDescriptor) + val customClusteringField = + streamConfigProvider.getClusteringField(stream.unmappedDescriptor) if (customClusteringField != null) { // Validate that the clustering field exists and is a valid type - val clusterFields = customClusteringField.split(",").map{ it.trim() }.filter { it.isNotEmpty() } + val clusterFields = + customClusteringField.split(",").map { it.trim() }.filter { it.isNotEmpty() } val resolvedColumns = mutableListOf() for (field in clusterFields) { @@ -378,12 +380,12 @@ class BigqueryDirectLoadSqlGenerator( if (actualColumnName != null) { val fieldType = stream.schema.asColumns()[field] if (fieldType != null) { - val bigqueryType = toDialectType(fieldType.type) - if (bigqueryType == StandardSQLTypeName.JSON) { + val bigqueryType = toDialectType(fieldType.type) + if (bigqueryType == StandardSQLTypeName.JSON) { throw ConfigErrorException( "Stream ${stream.mappedDescriptor.toPrettyString()}: Clustering field '$field' is JSON type, which cannot be used for clustering" ) - } + } } resolvedColumns.add(actualColumnName) } diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/kotlin/io/airbyte/integrations/destination/bigquery/BigqueryDataDumper.kt b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/kotlin/io/airbyte/integrations/destination/bigquery/BigqueryDataDumper.kt index 0aaf712de0da..8406b046e558 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/kotlin/io/airbyte/integrations/destination/bigquery/BigqueryDataDumper.kt +++ b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/kotlin/io/airbyte/integrations/destination/bigquery/BigqueryDataDumper.kt @@ -51,9 +51,8 @@ object BigqueryRawTableDataDumper : DestinationDataDumper { val streamConfigProvider = StreamConfigProvider(config) val (_, rawTableName) = - BigqueryRawTableNameGenerator(config, streamConfigProvider).getTableName( - stream.mappedDescriptor - ) + BigqueryRawTableNameGenerator(config, streamConfigProvider) + .getTableName(stream.mappedDescriptor) return bigquery.getTable(TableId.of(config.internalTableDataset, rawTableName))?.let { table -> @@ -101,9 +100,8 @@ object BigqueryFinalTableDataDumper : DestinationDataDumper { val streamConfigProvider = StreamConfigProvider(config) val (datasetName, finalTableName) = - BigqueryFinalTableNameGenerator(config, streamConfigProvider).getTableName( - stream.mappedDescriptor - ) + BigqueryFinalTableNameGenerator(config, streamConfigProvider) + .getTableName(stream.mappedDescriptor) return bigquery.getTable(TableId.of(datasetName, finalTableName))?.let { table -> val bigquerySchema = table.getDefinition().schema!! diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/resources/expected-spec-cloud.json b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/resources/expected-spec-cloud.json index aae17d3359df..505d690bd17e 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/resources/expected-spec-cloud.json +++ b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/resources/expected-spec-cloud.json @@ -153,6 +153,76 @@ "title" : "Airbyte Internal Table Dataset Name", "group" : "advanced", "order" : 8 + }, + "default_partitioning_field" : { + "type" : "string", + "description" : "Default field to use for partitioning (e.g. _airbyte_extracted_at)", + "title" : "Default Partitioning Field", + "group" : "advanced", + "order" : 10 + }, + "default_partitioning_granularity" : { + "type" : "string", + "enum" : [ "DAY", "MONTH", "YEAR" ], + "description" : "Default partitioning granularity: DAY, MONTH, or YEAR. Defaults to DAY.", + "title" : "Default Partitioning Granularity", + "group" : "advanced", + "order" : 11 + }, + "default_clustering_field" : { + "type" : "string", + "description" : "Default field to use for clustering (e.g. _airbyte_extracted_at)", + "title" : "Default Clustering Field", + "group" : "advanced", + "order" : 12 + }, + "default_table_suffix" : { + "type" : "string", + "description" : "Default suffix to append to table names", + "title" : "Default Table Suffix", + "group" : "advanced", + "order" : 13 + }, + "streams" : { + "type" : "array", + "items" : { + "type" : "object", + "additionalProperties" : true, + "properties" : { + "name" : { + "type" : "string", + "description" : "Name of the stream (or namespace.stream_name)", + "title" : "Stream Name" + }, + "partitioning_field" : { + "type" : "string", + "title" : "Partitioning Field" + }, + "partitioning_granularity" : { + "type" : "string", + "enum" : [ "DAY", "MONTH", "YEAR" ], + "description" : "Partitioning granularity for the partitioning field. Allowed values: DAY, MONTH, YEAR. Defaults to DAY.", + "title" : "Partitioning Granularity" + }, + "clustering_field" : { + "type" : "string", + "title" : "Clustering Field" + }, + "table_suffix" : { + "type" : "string", + "title" : "Table Suffix" + }, + "dataset" : { + "type" : "string", + "title" : "Target Dataset" + } + }, + "required" : [ "name" ] + }, + "description" : "Per-stream configuration overrides.", + "title" : "Stream Configuration", + "group" : "advanced", + "order" : 14 } }, "required" : [ "project_id", "dataset_location", "dataset_id" ], diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/resources/expected-spec-oss.json b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/resources/expected-spec-oss.json index aae17d3359df..505d690bd17e 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/resources/expected-spec-oss.json +++ b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/resources/expected-spec-oss.json @@ -153,6 +153,76 @@ "title" : "Airbyte Internal Table Dataset Name", "group" : "advanced", "order" : 8 + }, + "default_partitioning_field" : { + "type" : "string", + "description" : "Default field to use for partitioning (e.g. _airbyte_extracted_at)", + "title" : "Default Partitioning Field", + "group" : "advanced", + "order" : 10 + }, + "default_partitioning_granularity" : { + "type" : "string", + "enum" : [ "DAY", "MONTH", "YEAR" ], + "description" : "Default partitioning granularity: DAY, MONTH, or YEAR. Defaults to DAY.", + "title" : "Default Partitioning Granularity", + "group" : "advanced", + "order" : 11 + }, + "default_clustering_field" : { + "type" : "string", + "description" : "Default field to use for clustering (e.g. _airbyte_extracted_at)", + "title" : "Default Clustering Field", + "group" : "advanced", + "order" : 12 + }, + "default_table_suffix" : { + "type" : "string", + "description" : "Default suffix to append to table names", + "title" : "Default Table Suffix", + "group" : "advanced", + "order" : 13 + }, + "streams" : { + "type" : "array", + "items" : { + "type" : "object", + "additionalProperties" : true, + "properties" : { + "name" : { + "type" : "string", + "description" : "Name of the stream (or namespace.stream_name)", + "title" : "Stream Name" + }, + "partitioning_field" : { + "type" : "string", + "title" : "Partitioning Field" + }, + "partitioning_granularity" : { + "type" : "string", + "enum" : [ "DAY", "MONTH", "YEAR" ], + "description" : "Partitioning granularity for the partitioning field. Allowed values: DAY, MONTH, YEAR. Defaults to DAY.", + "title" : "Partitioning Granularity" + }, + "clustering_field" : { + "type" : "string", + "title" : "Clustering Field" + }, + "table_suffix" : { + "type" : "string", + "title" : "Table Suffix" + }, + "dataset" : { + "type" : "string", + "title" : "Target Dataset" + } + }, + "required" : [ "name" ] + }, + "description" : "Per-stream configuration overrides.", + "title" : "Stream Configuration", + "group" : "advanced", + "order" : 14 } }, "required" : [ "project_id", "dataset_location", "dataset_id" ], From fb35f93cd205171267618da068654d106019a34d Mon Sep 17 00:00:00 2001 From: Pragyash Date: Sat, 7 Feb 2026 22:32:12 +0530 Subject: [PATCH 15/16] Skip no-creds integration tests --- .../destination-bigquery/poe_tasks.toml | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/airbyte-integrations/connectors/destination-bigquery/poe_tasks.toml b/airbyte-integrations/connectors/destination-bigquery/poe_tasks.toml index d53e9a2f08bf..01f4d309e5f4 100644 --- a/airbyte-integrations/connectors/destination-bigquery/poe_tasks.toml +++ b/airbyte-integrations/connectors/destination-bigquery/poe_tasks.toml @@ -1,3 +1,27 @@ include = [ "${POE_GIT_DIR}/poe-tasks/gradle-connector-tasks.toml", ] + +[tasks.test-integration-tests] +shell = ''' +set -eu # Ensure we return non-zero exit code upon failure + +if [ ! -d src/test-integration ]; then + echo "No integration tests defined; skipping integration tests." + exit 0 +fi + +set -- \ + secrets/credentials-1s1t-standard.json \ + secrets/credentials-1s1t-gcs.json \ + secrets/credentials-badproject.json +for secret_file in "$@"; do + if [ ! -f "$secret_file" ]; then + echo "Missing required integration secret '$secret_file'; skipping integration tests." + exit 0 + fi +done + +echo "Found 'src/test-integration' directory and required secrets, running integration tests..." +gradle integrationTestJava +''' From 97ad6b4fef18ac1be3fdf5e66457f5adc0e99ba2 Mon Sep 17 00:00:00 2001 From: Pragyash Date: Sun, 8 Feb 2026 08:57:14 +0530 Subject: [PATCH 16/16] Revert "Skip no-creds integration tests" This reverts commit fb35f93cd205171267618da068654d106019a34d. --- .../destination-bigquery/poe_tasks.toml | 24 ------------------- 1 file changed, 24 deletions(-) diff --git a/airbyte-integrations/connectors/destination-bigquery/poe_tasks.toml b/airbyte-integrations/connectors/destination-bigquery/poe_tasks.toml index 01f4d309e5f4..d53e9a2f08bf 100644 --- a/airbyte-integrations/connectors/destination-bigquery/poe_tasks.toml +++ b/airbyte-integrations/connectors/destination-bigquery/poe_tasks.toml @@ -1,27 +1,3 @@ include = [ "${POE_GIT_DIR}/poe-tasks/gradle-connector-tasks.toml", ] - -[tasks.test-integration-tests] -shell = ''' -set -eu # Ensure we return non-zero exit code upon failure - -if [ ! -d src/test-integration ]; then - echo "No integration tests defined; skipping integration tests." - exit 0 -fi - -set -- \ - secrets/credentials-1s1t-standard.json \ - secrets/credentials-1s1t-gcs.json \ - secrets/credentials-badproject.json -for secret_file in "$@"; do - if [ ! -f "$secret_file" ]; then - echo "Missing required integration secret '$secret_file'; skipping integration tests." - exit 0 - fi -done - -echo "Found 'src/test-integration' directory and required secrets, running integration tests..." -gradle integrationTestJava -'''